Merge development into master

This commit is contained in:
github-actions[bot] 2024-09-15 19:26:21 +00:00 committed by GitHub
commit ad80ac4453
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
346 changed files with 13096 additions and 6243 deletions

View file

@ -7,9 +7,16 @@ sleep 30
if kill -s 0 $PID
then
echo "Bazarr is still running. We'll kill it..."
kill $PID
exit 0
echo "Bazarr is still running. We'll test if UI is working..."
else
exit 1
fi
fi
exitcode=0
curl -fsSL --retry-all-errors --retry 60 --retry-max-time 120 --max-time 10 "http://127.0.0.1:6767" --output /dev/null || exitcode=$?
[[ ${exitcode} == 0 ]] && echo "UI is responsive, good news!" || echo "Oops, UI isn't reachable, bad news..."
echo "Let's stop Bazarr before we exit..."
pkill -INT -P $PID
exit ${exitcode}

View file

@ -10,5 +10,5 @@ latest_verion=$(git describe --tags --abbrev=0)
if [[ $RELEASE_MASTER -eq 1 ]]; then
auto-changelog --stdout -t changelog-master.hbs --starting-version "$master_version" --commit-limit 3
else
auto-changelog --stdout --starting-version "$latest_verion" --unreleased-only --commit-limit 0
auto-changelog --stdout --starting-version "$latest_verion" --unreleased-only --commit-limit false
fi

View file

@ -36,7 +36,7 @@ jobs:
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version: "lts/*"
node-version-file: "${{ env.UI_DIRECTORY }}/.nvmrc"
- name: Install dependencies
run: npm install

View file

@ -38,7 +38,7 @@ jobs:
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version: "lts/*"
node-version-file: "${{ env.UI_DIRECTORY }}/.nvmrc"
- name: Install Global Tools
run: npm install -g release-it auto-changelog

View file

@ -40,7 +40,7 @@ jobs:
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version: "lts/*"
node-version-file: "${{ env.UI_DIRECTORY }}/.nvmrc"
- name: Install Global Tools
run: npm install -g release-it auto-changelog

View file

@ -24,7 +24,7 @@ jobs:
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version: "lts/*"
node-version-file: "${{ env.UI_DIRECTORY }}/.nvmrc"
- name: Install UI Dependencies
run: npm install

View file

@ -114,6 +114,8 @@ class Subtitles(Resource):
subtitles_path = args.get('path')
media_type = args.get('type')
id = args.get('id')
forced = True if args.get('forced') == 'True' else False
hi = True if args.get('hi') == 'True' else False
if not os.path.exists(subtitles_path):
return 'Subtitles file not found. Path mapping issue?', 500
@ -144,6 +146,8 @@ class Subtitles(Resource):
'video_path': video_path,
'srt_path': subtitles_path,
'srt_lang': language,
'hi': hi,
'forced': forced,
'reference': args.get('reference') if args.get('reference') not in empty_values else video_path,
'max_offset_seconds': args.get('max_offset_seconds') if args.get('max_offset_seconds') not in
empty_values else str(settings.subsync.max_offset_seconds),
@ -167,8 +171,6 @@ class Subtitles(Resource):
elif action == 'translate':
from_language = subtitles_lang_from_filename(subtitles_path)
dest_language = language
forced = True if args.get('forced') == 'true' else False
hi = True if args.get('hi') == 'true' else False
try:
translate_subtitles_file(video_path=video_path, source_srt_file=subtitles_path,
from_lang=from_language, to_lang=dest_language, forced=forced, hi=hi,

View file

@ -8,6 +8,8 @@ from app.database import TableShows, TableMovies, database, select
from ..utils import authenticate
import textdistance
api_ns_system_searches = Namespace('System Searches', description='Search for series or movies by name')
@ -61,4 +63,6 @@ class Searches(Resource):
results.append(result)
# sort results by how closely they match the query
results = sorted(results, key=lambda x: textdistance.hamming.distance(query, x['title']))
return results

View file

@ -73,6 +73,7 @@ class SystemSettings(Resource):
mustNotContain=str(item['mustNotContain']),
originalFormat=int(item['originalFormat']) if item['originalFormat'] not in None_Keys else
None,
tag=item['tag'] if 'tag' in item else None,
)
.where(TableLanguagesProfiles.profileId == item['profileId']))
existing.remove(item['profileId'])
@ -89,6 +90,7 @@ class SystemSettings(Resource):
mustNotContain=str(item['mustNotContain']),
originalFormat=int(item['originalFormat']) if item['originalFormat'] not in None_Keys else
None,
tag=item['tag'] if 'tag' in item else None,
))
for profileId in existing:
# Remove deleted profiles

View file

@ -31,12 +31,20 @@ def base_url_slash_cleaner(uri):
def validate_ip_address(ip_string):
if ip_string == '*':
return True
try:
ip_address(ip_string)
return True
except ValueError:
return False
def validate_tags(tags):
if not tags:
return True
return all(re.match( r'^[a-z0-9_-]+$', item) for item in tags)
ONE_HUNDRED_YEARS_IN_MINUTES = 52560000
ONE_HUNDRED_YEARS_IN_HOURS = 876000
@ -67,7 +75,7 @@ validators = [
# general section
Validator('general.flask_secret_key', must_exist=True, default=hexlify(os.urandom(16)).decode(),
is_type_of=str),
Validator('general.ip', must_exist=True, default='0.0.0.0', is_type_of=str, condition=validate_ip_address),
Validator('general.ip', must_exist=True, default='*', is_type_of=str, condition=validate_ip_address),
Validator('general.port', must_exist=True, default=6767, is_type_of=int, gte=1, lte=65535),
Validator('general.base_url', must_exist=True, default='', is_type_of=str),
Validator('general.path_mappings', must_exist=True, default=[], is_type_of=list),
@ -88,6 +96,9 @@ validators = [
Validator('general.use_sonarr', must_exist=True, default=False, is_type_of=bool),
Validator('general.use_radarr', must_exist=True, default=False, is_type_of=bool),
Validator('general.path_mappings_movie', must_exist=True, default=[], is_type_of=list),
Validator('general.serie_tag_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.movie_tag_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.remove_profile_tags', must_exist=True, default=[], is_type_of=list, condition=validate_tags),
Validator('general.serie_default_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.serie_default_profile', must_exist=True, default='', is_type_of=(int, str)),
Validator('general.movie_default_enabled', must_exist=True, default=False, is_type_of=bool),
@ -176,7 +187,7 @@ validators = [
Validator('sonarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.series_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440, 10080, ONE_HUNDRED_YEARS_IN_MINUTES]),
Validator('sonarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.excluded_tags', must_exist=True, default=[], is_type_of=list, condition=validate_tags),
Validator('sonarr.excluded_series_types', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('sonarr.exclude_season_zero', must_exist=True, default=False, is_type_of=bool),
@ -199,7 +210,7 @@ validators = [
Validator('radarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.movies_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440, 10080, ONE_HUNDRED_YEARS_IN_MINUTES]),
Validator('radarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('radarr.excluded_tags', must_exist=True, default=[], is_type_of=list, condition=validate_tags),
Validator('radarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('radarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.sync_only_monitored_movies', must_exist=True, default=False, is_type_of=bool),
@ -271,6 +282,10 @@ validators = [
Validator('legendasdivx.password', must_exist=True, default='', is_type_of=str, cast=str),
Validator('legendasdivx.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
# legendasnet section
Validator('legendasnet.username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('legendasnet.password', must_exist=True, default='', is_type_of=str, cast=str),
# ktuvit section
Validator('ktuvit.email', must_exist=True, default='', is_type_of=str),
Validator('ktuvit.hashed_password', must_exist=True, default='', is_type_of=str, cast=str),
@ -298,6 +313,12 @@ validators = [
# analytics section
Validator('analytics.enabled', must_exist=True, default=True, is_type_of=bool),
# jimaku section
Validator('jimaku.api_key', must_exist=True, default='', is_type_of=str),
Validator('jimaku.enable_name_search_fallback', must_exist=True, default=True, is_type_of=bool),
Validator('jimaku.enable_archives_download', must_exist=True, default=False, is_type_of=bool),
Validator('jimaku.enable_ai_subs', must_exist=True, default=False, is_type_of=bool),
# titlovi section
Validator('titlovi.username', must_exist=True, default='', is_type_of=str, cast=str),
@ -321,6 +342,9 @@ validators = [
Validator('karagarga.f_username', must_exist=True, default='', is_type_of=str, cast=str),
Validator('karagarga.f_password', must_exist=True, default='', is_type_of=str, cast=str),
# subdl section
Validator('subdl.api_key', must_exist=True, default='', is_type_of=str, cast=str),
# subsync section
Validator('subsync.use_subsync', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.use_subsync_threshold', must_exist=True, default=False, is_type_of=bool),
@ -451,6 +475,7 @@ array_keys = ['excluded_tags',
'enabled_integrations',
'path_mappings',
'path_mappings_movie',
'remove_profile_tags',
'language_equals',
'blacklisted_languages',
'blacklisted_providers']

View file

@ -172,6 +172,7 @@ class TableHistory(Base):
video_path = mapped_column(Text)
matched = mapped_column(Text)
not_matched = mapped_column(Text)
upgradedFromId = mapped_column(Integer, ForeignKey('table_history.id'))
class TableHistoryMovie(Base):
@ -190,6 +191,7 @@ class TableHistoryMovie(Base):
video_path = mapped_column(Text)
matched = mapped_column(Text)
not_matched = mapped_column(Text)
upgradedFromId = mapped_column(Integer, ForeignKey('table_history_movie.id'))
class TableLanguagesProfiles(Base):
@ -202,6 +204,7 @@ class TableLanguagesProfiles(Base):
name = mapped_column(Text, nullable=False)
mustContain = mapped_column(Text)
mustNotContain = mapped_column(Text)
tag = mapped_column(Text)
class TableMovies(Base):
@ -376,6 +379,7 @@ def update_profile_id_list():
'mustContain': ast.literal_eval(x.mustContain) if x.mustContain else [],
'mustNotContain': ast.literal_eval(x.mustNotContain) if x.mustNotContain else [],
'originalFormat': x.originalFormat,
'tag': x.tag,
} for x in database.execute(
select(TableLanguagesProfiles.profileId,
TableLanguagesProfiles.name,
@ -383,7 +387,8 @@ def update_profile_id_list():
TableLanguagesProfiles.items,
TableLanguagesProfiles.mustContain,
TableLanguagesProfiles.mustNotContain,
TableLanguagesProfiles.originalFormat))
TableLanguagesProfiles.originalFormat,
TableLanguagesProfiles.tag))
.all()
]
@ -418,7 +423,7 @@ def get_profile_cutoff(profile_id):
if profile_id and profile_id != 'null':
cutoff_language = []
for profile in profile_id_list:
profileId, name, cutoff, items, mustContain, mustNotContain, originalFormat = profile.values()
profileId, name, cutoff, items, mustContain, mustNotContain, originalFormat, tag = profile.values()
if cutoff:
if profileId == int(profile_id):
for item in items:
@ -497,3 +502,29 @@ def convert_list_to_clause(arr: list):
return f"({','.join(str(x) for x in arr)})"
else:
return ""
def upgrade_languages_profile_hi_values():
for languages_profile in (database.execute(
select(
TableLanguagesProfiles.profileId,
TableLanguagesProfiles.name,
TableLanguagesProfiles.cutoff,
TableLanguagesProfiles.items,
TableLanguagesProfiles.mustContain,
TableLanguagesProfiles.mustNotContain,
TableLanguagesProfiles.originalFormat,
TableLanguagesProfiles.tag)
))\
.all():
items = json.loads(languages_profile.items)
for language in items:
if language['hi'] == "only":
language['hi'] = "True"
elif language['hi'] in ["also", "never"]:
language['hi'] = "False"
database.execute(
update(TableLanguagesProfiles)
.values({"items": json.dumps(items)})
.where(TableLanguagesProfiles.profileId == languages_profile.profileId)
)

View file

@ -264,6 +264,10 @@ def get_providers_auth():
'password': settings.legendasdivx.password,
'skip_wrong_fps': settings.legendasdivx.skip_wrong_fps,
},
'legendasnet': {
'username': settings.legendasnet.username,
'password': settings.legendasnet.password,
},
'xsubs': {
'username': settings.xsubs.username,
'password': settings.xsubs.password,
@ -285,6 +289,12 @@ def get_providers_auth():
'username': settings.titlovi.username,
'password': settings.titlovi.password,
},
'jimaku': {
'api_key': settings.jimaku.api_key,
'enable_name_search_fallback': settings.jimaku.enable_name_search_fallback,
'enable_archives_download': settings.jimaku.enable_archives_download,
'enable_ai_subs': settings.jimaku.enable_ai_subs,
},
'ktuvit': {
'email': settings.ktuvit.email,
'hashed_password': settings.ktuvit.hashed_password,
@ -322,6 +332,9 @@ def get_providers_auth():
},
"animetosho": {
'search_threshold': settings.animetosho.search_threshold,
},
"subdl": {
'api_key': settings.subdl.api_key,
}
}

View file

@ -58,10 +58,13 @@ class NoExceptionFormatter(logging.Formatter):
class UnwantedWaitressMessageFilter(logging.Filter):
def filter(self, record):
if settings.general.debug:
# no filtering in debug mode
if settings.general.debug or "BAZARR" in record.msg:
# no filtering in debug mode or if originating from us
return True
if record.level != loggin.ERROR:
return False
unwantedMessages = [
"Exception while serving /api/socket.io/",
['Session is disconnected', 'Session not found'],
@ -161,7 +164,7 @@ def configure_logging(debug=False):
logging.getLogger("websocket").setLevel(logging.CRITICAL)
logging.getLogger("ga4mp.ga4mp").setLevel(logging.ERROR)
logging.getLogger("waitress").setLevel(logging.ERROR)
logging.getLogger("waitress").setLevel(logging.INFO)
logging.getLogger("waitress").addFilter(UnwantedWaitressMessageFilter())
logging.getLogger("knowit").setLevel(logging.CRITICAL)
logging.getLogger("enzyme").setLevel(logging.CRITICAL)

View file

@ -50,7 +50,7 @@ class Server:
self.connected = True
except OSError as error:
if error.errno == errno.EADDRNOTAVAIL:
logging.exception("BAZARR cannot bind to specified IP, trying with default (0.0.0.0)")
logging.exception("BAZARR cannot bind to specified IP, trying with 0.0.0.0")
self.address = '0.0.0.0'
self.connected = False
super(Server, self).__init__()
@ -76,8 +76,7 @@ class Server:
self.shutdown(EXIT_INTERRUPT)
def start(self):
logging.info(f'BAZARR is started and waiting for request on http://{self.server.effective_host}:'
f'{self.server.effective_port}')
self.server.print_listen("BAZARR is started and waiting for requests on: http://{}:{}")
signal.signal(signal.SIGINT, self.interrupt_handler)
try:
self.server.run()

View file

@ -20,9 +20,10 @@ from .config import settings, base_url
from .database import System
from .get_args import args
frontend_build_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'frontend', 'build')
ui_bp = Blueprint('ui', __name__,
template_folder=os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
'frontend', 'build'),
template_folder=frontend_build_path,
static_folder=os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'frontend',
'build', 'assets'),
static_url_path='/assets')
@ -38,13 +39,15 @@ static_bp = Blueprint('images', __name__, static_folder=static_directory, static
ui_bp.register_blueprint(static_bp)
mimetypes.add_type('application/javascript', '.js')
mimetypes.add_type('text/css', '.css')
mimetypes.add_type('font/woff2', '.woff2')
mimetypes.add_type('image/svg+xml', '.svg')
mimetypes.add_type('image/png', '.png')
mimetypes.add_type('image/x-icon', '.ico')
mimetypes.add_type('application/manifest+json', '.webmanifest')
pwa_assets = ['registerSW.js', 'manifest.webmanifest', 'sw.js']
def check_login(actual_method):
@ -70,6 +73,10 @@ def catch_all(path):
# login page has been accessed when no authentication is enabled
return redirect(base_url or "/", code=302)
# PWA Assets are returned from frontend root folder
if path in pwa_assets or path.startswith('workbox-'):
return send_file(os.path.join(frontend_build_path, path))
auth = True
if settings.auth.type == 'basic':
auth = request.authorization
@ -153,8 +160,8 @@ def backup_download(filename):
def swaggerui_static(filename):
basepath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'libs', 'flask_restx',
'static')
fullpath = os.path.join(basepath, filename)
if not fullpath.startswith(basepath):
fullpath = os.path.realpath(os.path.join(basepath, filename))
if not basepath == os.path.commonpath((basepath, fullpath)):
return '', 404
else:
return send_file(fullpath)
@ -186,7 +193,8 @@ def proxy(protocol, url):
elif result.status_code == 401:
return dict(status=False, error='Access Denied. Check API key.', code=result.status_code)
elif result.status_code == 404:
return dict(status=False, error='Cannot get version. Maybe unsupported legacy API call?', code=result.status_code)
return dict(status=False, error='Cannot get version. Maybe unsupported legacy API call?',
code=result.status_code)
elif 300 <= result.status_code <= 399:
return dict(status=False, error='Wrong URL Base.', code=result.status_code)
else:

View file

@ -5,7 +5,8 @@ import os
from subzero.language import Language
from app.database import database, insert
from app.database import database, insert, update
from sqlalchemy.exc import IntegrityError
logger = logging.getLogger(__name__)
@ -18,7 +19,7 @@ class CustomLanguage:
language = "pt-BR"
official_alpha2 = "pt"
official_alpha3 = "por"
name = "Brazilian Portuguese"
name = "Portuguese (Brazil)"
iso = "BR"
_scripts = []
_possible_matches = ("pt-br", "pob", "pb", "brazilian", "brasil", "brazil")
@ -50,13 +51,19 @@ class CustomLanguage:
"""Register the custom language subclasses in the database."""
for sub in cls.__subclasses__():
database.execute(
insert(table)
.values(code3=sub.alpha3,
code2=sub.alpha2,
name=sub.name,
enabled=0)
.on_conflict_do_nothing())
try:
database.execute(
insert(table)
.values(code3=sub.alpha3,
code2=sub.alpha2,
name=sub.name,
enabled=0))
except IntegrityError:
database.execute(
update(table)
.values(code2=sub.alpha2,
name=sub.name)
.where(table.code3 == sub.alpha3))
@classmethod
def found_external(cls, subtitle, subtitle_path):
@ -212,7 +219,7 @@ class LatinAmericanSpanish(CustomLanguage):
language = "es-MX"
official_alpha2 = "es"
official_alpha3 = "spa"
name = "Latin American Spanish"
name = "Spanish (Latino)"
iso = "MX" # Not fair, but ok
_scripts = ("419",)
_possible_matches = (

View file

@ -44,6 +44,12 @@ def create_languages_dict():
.values(name='Chinese Simplified')
.where(TableSettingsLanguages.code3 == 'zho'))
# replace Modern Greek by Greek to match Sonarr and Radarr languages
database.execute(
update(TableSettingsLanguages)
.values(name='Greek')
.where(TableSettingsLanguages.code3 == 'ell'))
languages_dict = [{
'code3': x.code3,
'code2': x.code2,
@ -55,6 +61,19 @@ def create_languages_dict():
.all()]
def audio_language_from_name(lang):
lang_map = {
'Chinese': 'zh',
}
alpha2_code = lang_map.get(lang, None)
if alpha2_code is None:
return lang
return language_from_alpha2(alpha2_code)
def language_from_alpha2(lang):
return next((item['name'] for item in languages_dict if item['code2'] == lang[:2]), None)

View file

@ -35,7 +35,7 @@ else:
# there's missing embedded packages after a commit
check_if_new_update()
from app.database import System, database, update, migrate_db, create_db_revision # noqa E402
from app.database import System, database, update, migrate_db, create_db_revision, upgrade_languages_profile_hi_values # noqa E402
from app.notifier import update_notifier # noqa E402
from languages.get_languages import load_language_in_db # noqa E402
from app.signalr_client import sonarr_signalr_client, radarr_signalr_client # noqa E402
@ -49,6 +49,7 @@ if args.create_db_revision:
stop_bazarr(EXIT_NORMAL)
else:
migrate_db(app)
upgrade_languages_profile_hi_values()
configure_proxy_func()

View file

@ -28,6 +28,11 @@ def trace(message):
logging.debug(FEATURE_PREFIX + message)
def get_language_profiles():
return database.execute(
select(TableLanguagesProfiles.profileId, TableLanguagesProfiles.name, TableLanguagesProfiles.tag)).all()
def update_all_movies():
movies_full_scan_subtitles()
logging.info('BAZARR All existing movie subtitles indexed from disk.')
@ -59,7 +64,7 @@ def update_movie(updated_movie, send_event):
def get_movie_monitored_status(movie_id):
existing_movie_monitored = database.execute(
select(TableMovies.monitored)
.where(TableMovies.tmdbId == movie_id))\
.where(TableMovies.tmdbId == str(movie_id)))\
.first()
if existing_movie_monitored is None:
return True
@ -108,6 +113,7 @@ def update_movies(send_event=True):
else:
audio_profiles = get_profile_list()
tagsDict = get_tags()
language_profiles = get_language_profiles()
# Get movies data from radarr
movies = get_movies_from_radarr_api(apikey_radarr=apikey_radarr)
@ -178,6 +184,7 @@ def update_movies(send_event=True):
if str(movie['tmdbId']) in current_movies_id_db:
parsed_movie = movieParser(movie, action='update',
tags_dict=tagsDict,
language_profiles=language_profiles,
movie_default_profile=movie_default_profile,
audio_profiles=audio_profiles)
if not any([parsed_movie.items() <= x for x in current_movies_db_kv]):
@ -186,6 +193,7 @@ def update_movies(send_event=True):
else:
parsed_movie = movieParser(movie, action='insert',
tags_dict=tagsDict,
language_profiles=language_profiles,
movie_default_profile=movie_default_profile,
audio_profiles=audio_profiles)
add_movie(parsed_movie, send_event)
@ -247,6 +255,7 @@ def update_one_movie(movie_id, action, defer_search=False):
audio_profiles = get_profile_list()
tagsDict = get_tags()
language_profiles = get_language_profiles()
try:
# Get movie data from radarr api
@ -256,10 +265,10 @@ def update_one_movie(movie_id, action, defer_search=False):
return
else:
if action == 'updated' and existing_movie:
movie = movieParser(movie_data, action='update', tags_dict=tagsDict,
movie = movieParser(movie_data, action='update', tags_dict=tagsDict, language_profiles=language_profiles,
movie_default_profile=movie_default_profile, audio_profiles=audio_profiles)
elif action == 'updated' and not existing_movie:
movie = movieParser(movie_data, action='insert', tags_dict=tagsDict,
movie = movieParser(movie_data, action='insert', tags_dict=tagsDict, language_profiles=language_profiles,
movie_default_profile=movie_default_profile, audio_profiles=audio_profiles)
except Exception:
logging.exception('BAZARR cannot get movie returned by SignalR feed from Radarr API.')

View file

@ -3,7 +3,7 @@
import os
from app.config import settings
from languages.get_languages import language_from_alpha2
from languages.get_languages import audio_language_from_name
from radarr.info import get_radarr_info
from utilities.video_analyzer import embedded_audio_reader
from utilities.path_mappings import path_mappings
@ -11,7 +11,17 @@ from utilities.path_mappings import path_mappings
from .converter import RadarrFormatAudioCodec, RadarrFormatVideoCodec
def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles):
def get_matching_profile(tags, language_profiles):
matching_profile = None
if len(tags) > 0:
for profileId, name, tag in language_profiles:
if tag in tags:
matching_profile = profileId
break
return matching_profile
def movieParser(movie, action, tags_dict, language_profiles, movie_default_profile, audio_profiles):
if 'movieFile' in movie:
try:
overview = str(movie['overview'])
@ -107,9 +117,7 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
for item in movie['movieFile']['languages']:
if isinstance(item, dict):
if 'name' in item:
language = item['name']
if item['name'] == 'Portuguese (Brazil)':
language = language_from_alpha2('pb')
language = audio_language_from_name(item['name'])
audio_language.append(language)
tags = [d['label'] for d in tags_dict if d['id'] in movie['tags']]
@ -140,6 +148,15 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
parsed_movie['subtitles'] = '[]'
parsed_movie['profileId'] = movie_default_profile
if settings.general.movie_tag_enabled:
tag_profile = get_matching_profile(tags, language_profiles)
if tag_profile:
parsed_movie['profileId'] = tag_profile
remove_profile_tags_list = settings.general.remove_profile_tags
if len(remove_profile_tags_list) > 0:
if set(tags) & set(remove_profile_tags_list):
parsed_movie['profileId'] = None
return parsed_movie

View file

@ -5,6 +5,7 @@ import os
from app.config import settings
from app.database import TableShows, database, select
from constants import MINIMUM_VIDEO_SIZE
from languages.get_languages import audio_language_from_name
from utilities.path_mappings import path_mappings
from utilities.video_analyzer import embedded_audio_reader
from sonarr.info import get_sonarr_info
@ -12,7 +13,17 @@ from sonarr.info import get_sonarr_info
from .converter import SonarrFormatVideoCodec, SonarrFormatAudioCodec
def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles):
def get_matching_profile(tags, language_profiles):
matching_profile = None
if len(tags) > 0:
for profileId, name, tag in language_profiles:
if tag in tags:
matching_profile = profileId
break
return matching_profile
def seriesParser(show, action, tags_dict, language_profiles, serie_default_profile, audio_profiles):
overview = show['overview'] if 'overview' in show else ''
poster = ''
fanart = ''
@ -24,9 +35,11 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
if image['coverType'] == 'fanart':
fanart = image['url'].split('?')[0]
alternate_titles = None
if show['alternateTitles'] is not None:
alternate_titles = str([item['title'] for item in show['alternateTitles']])
alternate_titles = [item['title'] for item in show['alternateTitles'] if 'title' in item and item['title'] not
in [None, ''] and item["title"] != show["title"]]
else:
alternate_titles = []
tags = [d['label'] for d in tags_dict if d['id'] in show['tags']]
@ -42,39 +55,37 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
else:
audio_language = []
if action == 'update':
return {'title': show["title"],
'path': show["path"],
'tvdbId': int(show["tvdbId"]),
'sonarrSeriesId': int(show["id"]),
'overview': overview,
'poster': poster,
'fanart': fanart,
'audio_language': str(audio_language),
'sortTitle': show['sortTitle'],
'year': str(show['year']),
'alternativeTitles': alternate_titles,
'tags': str(tags),
'seriesType': show['seriesType'],
'imdbId': imdbId,
'monitored': str(bool(show['monitored']))}
else:
return {'title': show["title"],
'path': show["path"],
'tvdbId': show["tvdbId"],
'sonarrSeriesId': show["id"],
'overview': overview,
'poster': poster,
'fanart': fanart,
'audio_language': str(audio_language),
'sortTitle': show['sortTitle'],
'year': str(show['year']),
'alternativeTitles': alternate_titles,
'tags': str(tags),
'seriesType': show['seriesType'],
'imdbId': imdbId,
'profileId': serie_default_profile,
'monitored': str(bool(show['monitored']))}
parsed_series = {
'title': show["title"],
'path': show["path"],
'tvdbId': int(show["tvdbId"]),
'sonarrSeriesId': int(show["id"]),
'overview': overview,
'poster': poster,
'fanart': fanart,
'audio_language': str(audio_language),
'sortTitle': show['sortTitle'],
'year': str(show['year']),
'alternativeTitles': str(alternate_titles),
'tags': str(tags),
'seriesType': show['seriesType'],
'imdbId': imdbId,
'monitored': str(bool(show['monitored']))
}
if action == 'insert':
parsed_series['profileId'] = serie_default_profile
if settings.general.serie_tag_enabled:
tag_profile = get_matching_profile(tags, language_profiles)
if tag_profile:
parsed_series['profileId'] = tag_profile
remove_profile_tags_list = settings.general.remove_profile_tags
if len(remove_profile_tags_list) > 0:
if set(tags) & set(remove_profile_tags_list):
parsed_series['profileId'] = None
return parsed_series
def profile_id_to_language(id_, profiles):
@ -111,13 +122,13 @@ def episodeParser(episode):
item = episode['episodeFile']['language']
if isinstance(item, dict):
if 'name' in item:
audio_language.append(item['name'])
audio_language.append(audio_language_from_name(item['name']))
elif 'languages' in episode['episodeFile'] and len(episode['episodeFile']['languages']):
items = episode['episodeFile']['languages']
if isinstance(items, list):
for item in items:
if 'name' in item:
audio_language.append(item['name'])
audio_language.append(audio_language_from_name(item['name']))
else:
audio_language = database.execute(
select(TableShows.audio_language)

View file

@ -26,6 +26,11 @@ def trace(message):
logging.debug(FEATURE_PREFIX + message)
def get_language_profiles():
return database.execute(
select(TableLanguagesProfiles.profileId, TableLanguagesProfiles.name, TableLanguagesProfiles.tag)).all()
def get_series_monitored_table():
series_monitored = database.execute(
select(TableShows.tvdbId, TableShows.monitored))\
@ -58,6 +63,7 @@ def update_series(send_event=True):
audio_profiles = get_profile_list()
tagsDict = get_tags()
language_profiles = get_language_profiles()
# Get shows data from Sonarr
series = get_series_from_sonarr_api(apikey_sonarr=apikey_sonarr)
@ -111,6 +117,7 @@ def update_series(send_event=True):
if show['id'] in current_shows_db:
updated_series = seriesParser(show, action='update', tags_dict=tagsDict,
language_profiles=language_profiles,
serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles)
@ -132,6 +139,7 @@ def update_series(send_event=True):
event_stream(type='series', payload=show['id'])
else:
added_series = seriesParser(show, action='insert', tags_dict=tagsDict,
language_profiles=language_profiles,
serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles)
@ -203,7 +211,7 @@ def update_one_series(series_id, action):
audio_profiles = get_profile_list()
tagsDict = get_tags()
language_profiles = get_language_profiles()
try:
# Get series data from sonarr api
series = None
@ -215,10 +223,12 @@ def update_one_series(series_id, action):
else:
if action == 'updated' and existing_series:
series = seriesParser(series_data[0], action='update', tags_dict=tagsDict,
language_profiles=language_profiles,
serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles)
elif action == 'updated' and not existing_series:
series = seriesParser(series_data[0], action='insert', tags_dict=tagsDict,
language_profiles=language_profiles,
serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles)
except Exception:

View file

@ -182,7 +182,9 @@ def list_missing_subtitles_movies(no=None, send_event=True):
if any(x['code2'] == language['language'] for x in get_audio_profile_languages(
movie_subtitles.audio_language)):
continue
desired_subtitles_list.append([language['language'], language['forced'], language['hi']])
desired_subtitles_list.append({'language': language['language'],
'forced': language['forced'],
'hi': language['hi']})
# get existing subtitles
actual_subtitles_list = []
@ -204,7 +206,9 @@ def list_missing_subtitles_movies(no=None, send_event=True):
elif subtitles[1] == 'hi':
forced = False
hi = True
actual_subtitles_list.append([lang, str(forced), str(hi)])
actual_subtitles_list.append({'language': lang,
'forced': str(forced),
'hi': str(hi)})
# check if cutoff is reached and skip any further check
cutoff_met = False
@ -212,7 +216,9 @@ def list_missing_subtitles_movies(no=None, send_event=True):
if cutoff_temp_list:
for cutoff_temp in cutoff_temp_list:
cutoff_language = [cutoff_temp['language'], cutoff_temp['forced'], cutoff_temp['hi']]
cutoff_language = {'language': cutoff_temp['language'],
'forced': cutoff_temp['forced'],
'hi': cutoff_temp['hi']}
if cutoff_temp['audio_exclude'] == 'True' and \
any(x['code2'] == cutoff_temp['language'] for x in
get_audio_profile_languages(movie_subtitles.audio_language)):
@ -220,7 +226,10 @@ def list_missing_subtitles_movies(no=None, send_event=True):
elif cutoff_language in actual_subtitles_list:
cutoff_met = True
# HI is considered as good as normal
elif cutoff_language and [cutoff_language[0], 'False', 'True'] in actual_subtitles_list:
elif (cutoff_language and
{'language': cutoff_language['language'],
'forced': 'False',
'hi': 'True'} in actual_subtitles_list):
cutoff_met = True
if cutoff_met:
@ -232,21 +241,23 @@ def list_missing_subtitles_movies(no=None, send_event=True):
if item not in actual_subtitles_list:
missing_subtitles_list.append(item)
# remove missing that have forced or hi subtitles for this language in existing
# remove missing that have forced or hi subtitles for this language in existing
for item in actual_subtitles_list:
if item[2] == 'True':
if item['hi'] == 'True':
try:
missing_subtitles_list.remove([item[0], 'False', 'False'])
missing_subtitles_list.remove({'language': item['language'],
'forced': 'False',
'hi': 'False'})
except ValueError:
pass
# make the missing languages list looks like expected
missing_subtitles_output_list = []
for item in missing_subtitles_list:
lang = item[0]
if item[1] == 'True':
lang = item['language']
if item['forced'] == 'True':
lang += ':forced'
elif item[2] == 'True':
elif item['hi'] == 'True':
lang += ':hi'
missing_subtitles_output_list.append(lang)

View file

@ -182,7 +182,9 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
if any(x['code2'] == language['language'] for x in get_audio_profile_languages(
episode_subtitles.audio_language)):
continue
desired_subtitles_list.append([language['language'], language['forced'], language['hi']])
desired_subtitles_list.append({'language': language['language'],
'forced': language['forced'],
'hi': language['hi']})
# get existing subtitles
actual_subtitles_list = []
@ -204,7 +206,9 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
elif subtitles[1] == 'hi':
forced = False
hi = True
actual_subtitles_list.append([lang, str(forced), str(hi)])
actual_subtitles_list.append({'language': lang,
'forced': str(forced),
'hi': str(hi)})
# check if cutoff is reached and skip any further check
cutoff_met = False
@ -212,7 +216,9 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
if cutoff_temp_list:
for cutoff_temp in cutoff_temp_list:
cutoff_language = [cutoff_temp['language'], cutoff_temp['forced'], cutoff_temp['hi']]
cutoff_language = {'language': cutoff_temp['language'],
'forced': cutoff_temp['forced'],
'hi': cutoff_temp['hi']}
if cutoff_temp['audio_exclude'] == 'True' and \
any(x['code2'] == cutoff_temp['language'] for x in
get_audio_profile_languages(episode_subtitles.audio_language)):
@ -220,7 +226,10 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
elif cutoff_language in actual_subtitles_list:
cutoff_met = True
# HI is considered as good as normal
elif [cutoff_language[0], 'False', 'True'] in actual_subtitles_list:
elif (cutoff_language and
{'language': cutoff_language['language'],
'forced': 'False',
'hi': 'True'} in actual_subtitles_list):
cutoff_met = True
if cutoff_met:
@ -234,21 +243,23 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
if item not in actual_subtitles_list:
missing_subtitles_list.append(item)
# remove missing that have hi subtitles for this language in existing
# remove missing that have hi subtitles for this language in existing
for item in actual_subtitles_list:
if item[2] == 'True':
if item['hi'] == 'True':
try:
missing_subtitles_list.remove([item[0], 'False', 'False'])
missing_subtitles_list.remove({'language': item['language'],
'forced': 'False',
'hi': 'False'})
except ValueError:
pass
# make the missing languages list looks like expected
missing_subtitles_output_list = []
for item in missing_subtitles_list:
lang = item[0]
if item[1] == 'True':
lang = item['language']
if item['forced'] == 'True':
lang += ':forced'
elif item[2] == 'True':
elif item['hi'] == 'True':
lang += ':hi'
missing_subtitles_output_list.append(lang)

View file

@ -2,7 +2,6 @@
import os
import logging
import re
from guess_language import guess_language
from subliminal_patch import core
@ -136,6 +135,7 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
continue
text = text.decode(encoding)
if bool(re.search(core.HI_REGEX, text)):
if core.parse_for_hi_regex(subtitle_text=text,
alpha3_language=language.alpha3 if hasattr(language, 'alpha3') else None):
subtitles[subtitle] = Language.rebuild(subtitles[subtitle], forced=False, hi=True)
return subtitles

View file

@ -158,8 +158,9 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
subtitle.language.forced = True
else:
subtitle.language.forced = False
if use_original_format == 'True':
subtitle.use_original_format = use_original_format
if use_original_format in ("1", "True"):
subtitle.use_original_format = True
subtitle.mods = get_array_from(settings.general.subzero_mods)
video = get_video(force_unicode(path), title, sceneName, providers={provider}, media_type=media_type)
if video:

View file

@ -88,6 +88,7 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
from .sync import sync_subtitles
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
hi=subtitle.language.hi,
srt_lang=downloaded_language_code2,
percent_score=percent_score,
sonarr_series_id=episode_metadata.sonarrSeriesId,
@ -106,6 +107,7 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
from .sync import sync_subtitles
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
hi=subtitle.language.hi,
srt_lang=downloaded_language_code2,
percent_score=percent_score,
radarr_id=movie_metadata.radarrId)

View file

@ -4,10 +4,12 @@ from .ffprobe import refine_from_ffprobe
from .database import refine_from_db
from .arr_history import refine_from_arr_history
from .anidb import refine_from_anidb
from .anilist import refine_from_anilist
registered = {
"database": refine_from_db,
"ffprobe": refine_from_ffprobe,
"arr_history": refine_from_arr_history,
"anidb": refine_from_anidb,
"anilist": refine_from_anilist, # Must run AFTER AniDB
}

View file

@ -4,11 +4,13 @@
import logging
import requests
from collections import namedtuple
from datetime import timedelta
from datetime import datetime, timedelta
from requests.exceptions import HTTPError
from app.config import settings
from subliminal import Episode, region
from subliminal.cache import REFINER_EXPIRATION_TIME
from subliminal_patch.exceptions import TooManyRequests
try:
from lxml import etree
@ -18,16 +20,40 @@ except ImportError:
except ImportError:
import xml.etree.ElementTree as etree
refined_providers = {'animetosho'}
refined_providers = {'animetosho', 'jimaku'}
providers_requiring_anidb_api = {'animetosho'}
logger = logging.getLogger(__name__)
api_url = 'http://api.anidb.net:9001/httpapi'
cache_key_refiner = "anidb_refiner"
# Soft Limit for amount of requests per day
daily_limit_request_count = 200
class AniDBClient(object):
def __init__(self, api_client_key=None, api_client_ver=1, session=None):
self.session = session or requests.Session()
self.api_client_key = api_client_key
self.api_client_ver = api_client_ver
self.cache = region.get(cache_key_refiner, expiration_time=timedelta(days=1).total_seconds())
@property
def is_throttled(self):
return self.cache and self.cache.get('is_throttled')
@property
def has_api_credentials(self):
return self.api_client_key != '' and self.api_client_key is not None
@property
def daily_api_request_count(self):
if not self.cache:
return 0
return self.cache.get('daily_api_request_count', 0)
AnimeInfo = namedtuple('AnimeInfo', ['anime', 'episode_offset'])
@ -43,7 +69,9 @@ class AniDBClient(object):
return r.content
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
def get_series_id(self, mappings, tvdb_series_season, tvdb_series_id, episode):
def get_show_information(self, tvdb_series_id, tvdb_series_season, episode):
mappings = etree.fromstring(self.get_series_mappings())
# Enrich the collection of anime with the episode offset
animes = [
self.AnimeInfo(anime, int(anime.attrib.get('episodeoffset', 0)))
@ -52,40 +80,78 @@ class AniDBClient(object):
)
]
is_special_entry = False
if not animes:
return None, None
# Some entries will store TVDB seasons in a nested mapping list, identifiable by the value 'a' as the season
special_entries = mappings.findall(
f".//anime[@tvdbid='{tvdb_series_id}'][@defaulttvdbseason='a']"
)
# Sort the anime by offset in ascending order
animes.sort(key=lambda a: a.episode_offset)
if not special_entries:
return None, None, None
# Different from Tvdb, Anidb have different ids for the Parts of a season
anidb_id = None
offset = 0
is_special_entry = True
for special_entry in special_entries:
mapping_list = special_entry.findall(f".//mapping[@tvdbseason='{tvdb_series_season}']")
if len(mapping_list) > 0:
anidb_id = int(special_entry.attrib.get('anidbid'))
offset = int(mapping_list[0].attrib.get('offset', 0))
for index, anime_info in enumerate(animes):
anime, episode_offset = anime_info
anidb_id = int(anime.attrib.get('anidbid'))
if episode > episode_offset:
anidb_id = anidb_id
offset = episode_offset
if not is_special_entry:
# Sort the anime by offset in ascending order
animes.sort(key=lambda a: a.episode_offset)
return anidb_id, episode - offset
# Different from Tvdb, Anidb have different ids for the Parts of a season
anidb_id = None
offset = 0
for index, anime_info in enumerate(animes):
anime, episode_offset = anime_info
mapping_list = anime.find('mapping-list')
# Handle mapping list for Specials
if mapping_list:
for mapping in mapping_list.findall("mapping"):
if mapping.text is None:
continue
# Mapping values are usually like ;1-1;2-1;3-1;
for episode_ref in mapping.text.split(';'):
if not episode_ref:
continue
anidb_episode, tvdb_episode = map(int, episode_ref.split('-'))
if tvdb_episode == episode:
anidb_id = int(anime.attrib.get('anidbid'))
return anidb_id, anidb_episode, 0
if episode > episode_offset:
anidb_id = int(anime.attrib.get('anidbid'))
offset = episode_offset
return anidb_id, episode - offset, offset
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
def get_series_episodes_ids(self, tvdb_series_id, season, episode):
mappings = etree.fromstring(self.get_series_mappings())
series_id, episode_no = self.get_series_id(mappings, season, tvdb_series_id, episode)
def get_episode_ids(self, series_id, episode_no):
if not series_id:
return None, None
return None
episodes = etree.fromstring(self.get_episodes(series_id))
return series_id, int(episodes.find(f".//episode[epno='{episode_no}']").attrib.get('id'))
episode = episodes.find(f".//episode[epno='{episode_no}']")
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
if not episode:
return series_id, None
return series_id, int(episode.attrib.get('id'))
@region.cache_on_arguments(expiration_time=REFINER_EXPIRATION_TIME)
def get_episodes(self, series_id):
if self.daily_api_request_count >= 200:
raise TooManyRequests('Daily API request limit exceeded')
r = self.session.get(
api_url,
params={
@ -102,10 +168,12 @@ class AniDBClient(object):
response_code = xml_root.attrib.get('code')
if response_code == '500':
raise HTTPError('AniDB API Abuse detected. Banned status.')
raise TooManyRequests('AniDB API Abuse detected. Banned status.')
elif response_code == '302':
raise HTTPError('AniDB API Client error. Client is disabled or does not exists.')
self.increment_daily_quota()
episode_elements = xml_root.find('episodes')
if not episode_elements:
@ -113,11 +181,25 @@ class AniDBClient(object):
return etree.tostring(episode_elements, encoding='utf8', method='xml')
def increment_daily_quota(self):
daily_quota = self.daily_api_request_count + 1
if not self.cache:
region.set(cache_key_refiner, {'daily_api_request_count': daily_quota})
return
self.cache['daily_api_request_count'] = daily_quota
region.set(cache_key_refiner, self.cache)
@staticmethod
def mark_as_throttled():
region.set(cache_key_refiner, {'is_throttled': True})
def refine_from_anidb(path, video):
if not isinstance(video, Episode) or not video.series_tvdb_id:
logging.debug(f'Video is not an Anime TV series, skipping refinement for {video}')
return
if refined_providers.intersection(settings.general.enabled_providers) and video.series_anidb_id is None:
@ -129,12 +211,35 @@ def refine_anidb_ids(video):
season = video.season if video.season else 0
anidb_series_id, anidb_episode_id = anidb_client.get_series_episodes_ids(video.series_tvdb_id, season, video.episode)
if not anidb_episode_id:
logging.error(f'Could not find anime series {video.series}')
anidb_series_id, anidb_episode_no, anidb_season_episode_offset = anidb_client.get_show_information(
video.series_tvdb_id,
season,
video.episode,
)
if not anidb_series_id:
logger.error(f'Could not find anime series {video.series}')
return video
anidb_episode_id = None
if anidb_client.has_api_credentials:
if anidb_client.is_throttled:
logger.warning(f'API daily limit reached. Skipping episode ID refinement for {video.series}')
else:
try:
anidb_episode_id = anidb_client.get_episode_ids(
anidb_series_id,
anidb_episode_no
)
except TooManyRequests:
logger.error(f'API daily limit reached while refining {video.series}')
anidb_client.mark_as_throttled()
else:
intersect = providers_requiring_anidb_api.intersection(settings.general.enabled_providers)
if len(intersect) >= 1:
logger.warn(f'AniDB API credentials are not fully set up, the following providers may not work: {intersect}')
video.series_anidb_id = anidb_series_id
video.series_anidb_episode_id = anidb_episode_id
video.series_anidb_episode_no = anidb_episode_no
video.series_anidb_season_episode_offset = anidb_season_episode_offset

View file

@ -0,0 +1,79 @@
# coding=utf-8
# fmt: off
import logging
import time
import requests
from collections import namedtuple
from datetime import timedelta
from app.config import settings
from subliminal import Episode, region, __short_version__
logger = logging.getLogger(__name__)
refined_providers = {'jimaku'}
class AniListClient(object):
def __init__(self, session=None, timeout=10):
self.session = session or requests.Session()
self.session.timeout = timeout
self.session.headers['Content-Type'] = 'application/json'
self.session.headers['User-Agent'] = 'Subliminal/%s' % __short_version__
@region.cache_on_arguments(expiration_time=timedelta(days=1).total_seconds())
def get_series_mappings(self):
r = self.session.get(
'https://raw.githubusercontent.com/Fribb/anime-lists/master/anime-list-mini.json'
)
r.raise_for_status()
return r.json()
def get_series_id(self, candidate_id_name, candidate_id_value):
anime_list = self.get_series_mappings()
tag_map = {
"series_anidb_id": "anidb_id",
"imdb_id": "imdb_id"
}
mapped_tag = tag_map.get(candidate_id_name, candidate_id_name)
obj = [obj for obj in anime_list if mapped_tag in obj and str(obj[mapped_tag]) == str(candidate_id_value)]
logger.debug(f"Based on '{mapped_tag}': '{candidate_id_value}', anime-list matched: {obj}")
if len(obj) > 0:
return obj[0]["anilist_id"]
else:
logger.debug(f"Could not find corresponding AniList ID with '{mapped_tag}': {candidate_id_value}")
return None
def refine_from_anilist(path, video):
# Safety checks
if isinstance(video, Episode):
if not video.series_anidb_id:
return
if refined_providers.intersection(settings.general.enabled_providers) and video.anilist_id is None:
refine_anilist_ids(video)
def refine_anilist_ids(video):
anilist_client = AniListClient()
if isinstance(video, Episode):
candidate_id_name = "series_anidb_id"
else:
candidate_id_name = "imdb_id"
candidate_id_value = getattr(video, candidate_id_name, None)
if not candidate_id_value:
logger.error(f"Found no value for property {candidate_id_name} of video.")
return video
anilist_id = anilist_client.get_series_id(candidate_id_name, candidate_id_value)
if not anilist_id:
return video
video.anilist_id = anilist_id

View file

@ -8,7 +8,7 @@ from app.config import settings
from subtitles.tools.subsyncer import SubSyncer
def sync_subtitles(video_path, srt_path, srt_lang, forced, percent_score, sonarr_series_id=None,
def sync_subtitles(video_path, srt_path, srt_lang, forced, hi, percent_score, sonarr_series_id=None,
sonarr_episode_id=None, radarr_id=None):
if forced:
logging.debug('BAZARR cannot sync forced subtitles. Skipping sync routine.')
@ -30,6 +30,8 @@ def sync_subtitles(video_path, srt_path, srt_lang, forced, percent_score, sonarr
'video_path': video_path,
'srt_path': srt_path,
'srt_lang': srt_lang,
'forced': forced,
'hi': hi,
'max_offset_seconds': str(settings.subsync.max_offset_seconds),
'no_fix_framerate': settings.subsync.no_fix_framerate,
'gss': settings.subsync.gss,

View file

@ -30,7 +30,7 @@ class SubSyncer:
self.vad = 'subs_then_webrtc'
self.log_dir_path = os.path.join(args.config_dir, 'log')
def sync(self, video_path, srt_path, srt_lang,
def sync(self, video_path, srt_path, srt_lang, hi, forced,
max_offset_seconds, no_fix_framerate, gss, reference=None,
sonarr_series_id=None, sonarr_episode_id=None, radarr_id=None):
self.reference = video_path
@ -97,8 +97,7 @@ class SubSyncer:
result = run(self.args)
except Exception:
logging.exception(
f'BAZARR an exception occurs during the synchronization process for this subtitles: {self.srtin}')
raise OSError
f'BAZARR an exception occurs during the synchronization process for this subtitle file: {self.srtin}')
else:
if settings.subsync.debug:
return result
@ -118,10 +117,10 @@ class SubSyncer:
downloaded_language_code2=srt_lang,
downloaded_provider=None,
score=None,
forced=None,
forced=forced,
subtitle_id=None,
reversed_subtitles_path=srt_path,
hearing_impaired=None)
hearing_impaired=hi)
if sonarr_episode_id:
history_log(action=5, sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id,

View file

@ -6,12 +6,16 @@ import pysubs2
from subliminal_patch.core import get_subtitle_path
from subzero.language import Language
from deep_translator import GoogleTranslator
from deep_translator.exceptions import TooManyRequests, RequestError, TranslationNotFound
from time import sleep
from concurrent.futures import ThreadPoolExecutor
from languages.custom_lang import CustomLanguage
from languages.get_languages import alpha3_from_alpha2, language_from_alpha2, language_from_alpha3
from radarr.history import history_log_movie
from sonarr.history import history_log
from subtitles.processing import ProcessSubtitlesResult
from app.event_handler import show_progress, hide_progress
def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, forced, hi, media_type, sonarr_series_id,
@ -33,8 +37,6 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
logging.debug(f'BAZARR is translating in {lang_obj} this subtitles {source_srt_file}')
max_characters = 5000
dest_srt_file = get_subtitle_path(video_path,
language=lang_obj if isinstance(lang_obj, Language) else lang_obj.subzero_language(),
extension='.srt',
@ -44,40 +46,53 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
subs = pysubs2.load(source_srt_file, encoding='utf-8')
subs.remove_miscellaneous_events()
lines_list = [x.plaintext for x in subs]
joined_lines_str = '\n\n\n'.join(lines_list)
lines_list_len = len(lines_list)
logging.debug(f'BAZARR splitting subtitles into {max_characters} characters blocks')
lines_block_list = []
translated_lines_list = []
while len(joined_lines_str):
partial_lines_str = joined_lines_str[:max_characters]
if len(joined_lines_str) > max_characters:
new_partial_lines_str = partial_lines_str.rsplit('\n\n\n', 1)[0]
else:
new_partial_lines_str = partial_lines_str
lines_block_list.append(new_partial_lines_str)
joined_lines_str = joined_lines_str.replace(new_partial_lines_str, '')
logging.debug(f'BAZARR is sending {len(lines_block_list)} blocks to Google Translate')
for block_str in lines_block_list:
def translate_line(id, line, attempt):
try:
translated_partial_srt_text = GoogleTranslator(source='auto',
target=language_code_convert_dict.get(lang_obj.alpha2,
lang_obj.alpha2)
).translate(text=block_str)
except Exception:
logging.exception(f'BAZARR Unable to translate subtitles {source_srt_file}')
return False
translated_text = GoogleTranslator(
source='auto',
target=language_code_convert_dict.get(lang_obj.alpha2, lang_obj.alpha2)
).translate(text=line)
except TooManyRequests:
if attempt <= 5:
sleep(1)
super(translate_line(id, line, attempt+1))
else:
logging.debug(f'Too many requests while translating {line}')
translated_lines.append({'id': id, 'line': line})
except (RequestError, TranslationNotFound):
logging.debug(f'Unable to translate line {line}')
translated_lines.append({'id': id, 'line': line})
else:
translated_partial_srt_list = translated_partial_srt_text.split('\n\n\n')
translated_lines_list += translated_partial_srt_list
translated_lines.append({'id': id, 'line': translated_text})
finally:
show_progress(id=f'translate_progress_{dest_srt_file}',
header=f'Translating subtitles lines to {language_from_alpha3(to_lang)}...',
name='',
value=len(translated_lines),
count=lines_list_len)
logging.debug(f'BAZARR is sending {lines_list_len} blocks to Google Translate')
pool = ThreadPoolExecutor(max_workers=10)
translated_lines = []
for i, line in enumerate(lines_list):
pool.submit(translate_line, i, line, 1)
pool.shutdown(wait=True)
for i, line in enumerate(translated_lines):
lines_list[line['id']] = line['line']
hide_progress(id=f'translate_progress_{dest_srt_file}')
logging.debug(f'BAZARR saving translated subtitles to {dest_srt_file}')
for i, line in enumerate(subs):
try:
line.plaintext = translated_lines_list[i]
line.plaintext = lines_list[i]
except IndexError:
logging.error(f'BAZARR is unable to translate malformed subtitles: {source_srt_file}')
return False
@ -94,10 +109,10 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
downloaded_language_code2=to_lang,
downloaded_provider=None,
score=None,
forced=None,
forced=forced,
subtitle_id=None,
reversed_subtitles_path=dest_srt_file,
hearing_impaired=None)
hearing_impaired=hi)
if media_type == 'series':
history_log(action=6, sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id, result=result)

View file

@ -69,14 +69,12 @@ def upgrade_subtitles():
.join(TableEpisodes, onclause=TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)
.join(episodes_to_upgrade, onclause=TableHistory.id == episodes_to_upgrade.c.id, isouter=True)
.where(episodes_to_upgrade.c.id.is_not(None)))
.all() if _language_still_desired(x.language, x.profileId)]
.all() if _language_still_desired(x.language, x.profileId) and
x.subtitles_path in x.external_subtitles and
x.video_path == x.path
]
for item in episodes_data:
if item['upgradable']:
if item['subtitles_path'] not in item['external_subtitles'] or \
not item['video_path'] == item['path']:
item.update({"upgradable": False})
del item['path']
del item['external_subtitles']
@ -156,14 +154,12 @@ def upgrade_subtitles():
.join(TableMovies, onclause=TableHistoryMovie.radarrId == TableMovies.radarrId)
.join(movies_to_upgrade, onclause=TableHistoryMovie.id == movies_to_upgrade.c.id, isouter=True)
.where(movies_to_upgrade.c.id.is_not(None)))
.all() if _language_still_desired(x.language, x.profileId)]
.all() if _language_still_desired(x.language, x.profileId) and
x.subtitles_path in x.external_subtitles and
x.video_path == x.path
]
for item in movies_data:
if item['upgradable']:
if item['subtitles_path'] not in item['external_subtitles'] or \
not item['video_path'] == item['path']:
item.update({"upgradable": False})
del item['path']
del item['external_subtitles']

View file

@ -138,7 +138,7 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
series_id = episode_metadata.sonarrSeriesId
episode_id = episode_metadata.sonarrEpisodeId
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, percent_score=100,
sonarr_series_id=episode_metadata.sonarrSeriesId, forced=forced,
sonarr_series_id=episode_metadata.sonarrSeriesId, forced=forced, hi=hi,
sonarr_episode_id=episode_metadata.sonarrEpisodeId)
else:
if not movie_metadata:
@ -146,7 +146,7 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
series_id = ""
episode_id = movie_metadata.radarrId
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, percent_score=100,
radarr_id=movie_metadata.radarrId, forced=forced)
radarr_id=movie_metadata.radarrId, forced=forced, hi=hi)
if use_postprocessing:
command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language, uploaded_language_code2,

View file

@ -121,7 +121,9 @@ def subtitles_sync_references(subtitles_path, sonarr_episode_id=None, radarr_mov
if not media_data:
return references_dict
data = parse_video_metadata(media_data.path, media_data.file_size, media_data.episode_file_id, None,
mapped_path = path_mappings.path_replace(media_data.path)
data = parse_video_metadata(mapped_path, media_data.file_size, media_data.episode_file_id, None,
use_cache=True)
elif radarr_movie_id:
media_data = database.execute(
@ -132,7 +134,9 @@ def subtitles_sync_references(subtitles_path, sonarr_episode_id=None, radarr_mov
if not media_data:
return references_dict
data = parse_video_metadata(media_data.path, media_data.file_size, None, media_data.movie_file_id,
mapped_path = path_mappings.path_replace_movie(media_data.path)
data = parse_video_metadata(mapped_path, media_data.file_size, None, media_data.movie_file_id,
use_cache=True)
if not data:
@ -213,6 +217,25 @@ def subtitles_sync_references(subtitles_path, sonarr_episode_id=None, radarr_mov
def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=None, use_cache=True):
"""
This function return the video file properties as parsed by knowit using ffprobe or mediainfo using the cached
value by default.
@type file: string
@param file: Properly mapped path of a video file
@type file_size: int
@param file_size: File size in bytes of the video file
@type episode_file_id: int or None
@param episode_file_id: episode ID of the video file from Sonarr (or None if it's a movie)
@type movie_file_id: int or None
@param movie_file_id: movie ID of the video file from Radarr (or None if it's an episode)
@type use_cache: bool
@param use_cache:
@rtype: dict or None
@return: return a dictionary including the video file properties as parsed by ffprobe or mediainfo
"""
# Define default data keys value
data = {
"ffprobe": {},
@ -228,12 +251,12 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
if episode_file_id:
cache_key = database.execute(
select(TableEpisodes.ffprobe_cache)
.where(TableEpisodes.path == path_mappings.path_replace_reverse(file))) \
.where(TableEpisodes.episode_file_id == episode_file_id)) \
.first()
elif movie_file_id:
cache_key = database.execute(
select(TableMovies.ffprobe_cache)
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(file))) \
.where(TableMovies.movie_file_id == movie_file_id)) \
.first()
else:
cache_key = None
@ -243,6 +266,7 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
# Unpickle ffprobe cache
cached_value = pickle.loads(cache_key.ffprobe_cache)
except Exception:
# No cached value available, we'll parse the file
pass
else:
# Check if file size and file id matches and if so, we return the cached value if available for the
@ -281,9 +305,7 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
# or if we have mediainfo available
elif mediainfo_path:
try:
# disabling mediainfo path temporarily until issue with knowit is fixed.
# data["mediainfo"] = know(video_path=file, context={"provider": "mediainfo", "mediainfo": mediainfo_path})
data["mediainfo"] = know(video_path=file, context={"provider": "mediainfo"})
data["mediainfo"] = know(video_path=file, context={"provider": "mediainfo", "mediainfo": mediainfo_path})
except KnowitException as e:
logging.error(f"BAZARR mediainfo cannot analyze this video file {file}. Could it be corrupted? {e}")
return None
@ -291,19 +313,19 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
else:
logging.error("BAZARR require ffmpeg/ffprobe or mediainfo, please install it and make sure to choose it in "
"Settings-->Subtitles.")
return
return None
# we write to db the result and return the newly cached ffprobe dict
if episode_file_id:
database.execute(
update(TableEpisodes)
.values(ffprobe_cache=pickle.dumps(data, pickle.HIGHEST_PROTOCOL))
.where(TableEpisodes.path == path_mappings.path_replace_reverse(file)))
.where(TableEpisodes.episode_file_id == episode_file_id))
elif movie_file_id:
database.execute(
update(TableMovies)
.values(ffprobe_cache=pickle.dumps(data, pickle.HIGHEST_PROTOCOL))
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(file)))
.where(TableMovies.movie_file_id == movie_file_id))
return data

View file

@ -130,7 +130,8 @@ class Episode(Video):
"""
def __init__(self, name, series, season, episode, title=None, year=None, original_series=True, tvdb_id=None,
series_tvdb_id=None, series_imdb_id=None, alternative_series=None, series_anidb_id=None,
series_anidb_episode_id=None, **kwargs):
series_anidb_episode_id=None, series_anidb_season_episode_offset=None,
anilist_id=None, **kwargs):
super(Episode, self).__init__(name, **kwargs)
#: Series of the episode
@ -163,8 +164,11 @@ class Episode(Video):
#: Alternative names of the series
self.alternative_series = alternative_series or []
#: Anime specific information
self.series_anidb_episode_id = series_anidb_episode_id
self.series_anidb_id = series_anidb_id
self.series_anidb_season_episode_offset = series_anidb_season_episode_offset
self.anilist_id = anilist_id
@classmethod
def fromguess(cls, name, guess):
@ -207,10 +211,11 @@ class Movie(Video):
:param str title: title of the movie.
:param int year: year of the movie.
:param list alternative_titles: alternative titles of the movie
:param int anilist_id: AniList ID of movie (if Anime)
:param \*\*kwargs: additional parameters for the :class:`Video` constructor.
"""
def __init__(self, name, title, year=None, alternative_titles=None, **kwargs):
def __init__(self, name, title, year=None, alternative_titles=None, anilist_id=None, **kwargs):
super(Movie, self).__init__(name, **kwargs)
#: Title of the movie
@ -221,6 +226,9 @@ class Movie(Video):
#: Alternative titles of the movie
self.alternative_titles = alternative_titles or []
#: AniList ID of the movie
self.anilist_id = anilist_id
@classmethod
def fromguess(cls, name, guess):

View file

@ -0,0 +1,90 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from babelfish import LanguageReverseConverter
from subliminal.exceptions import ConfigurationError
class SubdlConverter(LanguageReverseConverter):
def __init__(self):
self.from_subdl = {
"AR": ("ara", None, None), # Arabic
"DA": ("dan", None, None), # Danish
"NL": ("nld", None, None), # Dutch
"EN": ("eng", None, None), # English
"FA": ("fas", None, None), # Farsi_Persian
"FI": ("fin", None, None), # Finnish
"FR": ("fra", None, None), # French
"ID": ("ind", None, None), # Indonesian
"IT": ("ita", None, None), # Italian
"NO": ("nor", None, None), # Norwegian
"RO": ("ron", None, None), # Romanian
"ES": ("spa", None, None), # Spanish
"SV": ("swe", None, None), # Swedish
"VI": ("vie", None, None), # Vietnamese
"SQ": ("sqi", None, None), # Albanian
"AZ": ("aze", None, None), # Azerbaijani
"BE": ("bel", None, None), # Belarusian
"BN": ("ben", None, None), # Bengali
"BS": ("bos", None, None), # Bosnian
"BG": ("bul", None, None), # Bulgarian
"MY": ("mya", None, None), # Burmese
"CA": ("cat", None, None), # Catalan
"ZH": ("zho", None, None), # Chinese BG code
"HR": ("hrv", None, None), # Croatian
"CS": ("ces", None, None), # Czech
"EO": ("epo", None, None), # Esperanto
"ET": ("est", None, None), # Estonian
"KA": ("kat", None, None), # Georgian
"DE": ("deu", None, None), # German
"EL": ("ell", None, None), # Greek
"KL": ("kal", None, None), # Greenlandic
"HE": ("heb", None, None), # Hebrew
"HI": ("hin", None, None), # Hindi
"HU": ("hun", None, None), # Hungarian
"IS": ("isl", None, None), # Icelandic
"JA": ("jpn", None, None), # Japanese
"KO": ("kor", None, None), # Korean
"KU": ("kur", None, None), # Kurdish
"LV": ("lav", None, None), # Latvian
"LT": ("lit", None, None), # Lithuanian
"MK": ("mkd", None, None), # Macedonian
"MS": ("msa", None, None), # Malay
"ML": ("mal", None, None), # Malayalam
"PL": ("pol", None, None), # Polish
"PT": ("por", None, None), # Portuguese
"RU": ("rus", None, None), # Russian
"SR": ("srp", None, None), # Serbian
"SI": ("sin", None, None), # Sinhala
"SK": ("slk", None, None), # Slovak
"SL": ("slv", None, None), # Slovenian
"TL": ("tgl", None, None), # Tagalog
"TA": ("tam", None, None), # Tamil
"TE": ("tel", None, None), # Telugu
"TH": ("tha", None, None), # Thai
"TR": ("tur", None, None), # Turkish
"UK": ("ukr", None, None), # Ukrainian
"UR": ("urd", None, None), # Urdu
# custom languages
"BR_PT": ("por", "BR", None), # Brazilian Portuguese
"ZH_BG": ("zho", None, "Hant"), # Big 5 code
# unsupported language in Bazarr
# "BG_EN": "Bulgarian_English",
# "NL_EN": "Dutch_English",
# "EN_DE": "English_German",
# "HU_EN": "Hungarian_English",
# "MNI": "Manipuri",
}
self.to_subdl = {v: k for k, v in self.from_subdl.items()}
self.codes = set(self.from_subdl.keys())
def convert(self, alpha3, country=None, script=None):
if (alpha3, country, script) in self.to_subdl:
return self.to_subdl[(alpha3, country, script)]
raise ConfigurationError('Unsupported language for subdl: %s, %s, %s' % (alpha3, country, script))
def reverse(self, subdl):
if subdl in self.from_subdl:
return self.from_subdl[subdl]
raise ConfigurationError('Unsupported language code for subdl: %s' % subdl)

View file

@ -49,7 +49,17 @@ SUBTITLE_EXTENSIONS = ('.srt', '.sub', '.smi', '.txt', '.ssa', '.ass', '.mpl', '
_POOL_LIFETIME = datetime.timedelta(hours=12)
HI_REGEX = re.compile(r'[*¶♫♪].{3,}[*¶♫♪]|[\[\(\{].{3,}[\]\)\}](?<!{\\an\d})')
HI_REGEX_WITHOUT_PARENTHESIS = re.compile(r'[*¶♫♪].{3,}[*¶♫♪]|[\[\{].{3,}[\]\}](?<!{\\an\d})')
HI_REGEX_WITH_PARENTHESIS = re.compile(r'[*¶♫♪].{3,}[*¶♫♪]|[\[\(\{].{3,}[\]\)\}](?<!{\\an\d})')
HI_REGEX_PARENTHESIS_EXCLUDED_LANGUAGES = ['ara']
def parse_for_hi_regex(subtitle_text, alpha3_language):
if alpha3_language in HI_REGEX_PARENTHESIS_EXCLUDED_LANGUAGES:
return bool(re.search(HI_REGEX_WITHOUT_PARENTHESIS, subtitle_text))
else:
return bool(re.search(HI_REGEX_WITH_PARENTHESIS, subtitle_text))
def remove_crap_from_fn(fn):
@ -946,8 +956,8 @@ def _search_external_subtitles(path, languages=None, only_one=False, match_stric
lambda m: "" if str(m.group(1)).lower() in FULL_LANGUAGE_LIST else m.group(0), p_root)
p_root_lower = p_root_bare.lower()
filename_matches = p_root_lower == fn_no_ext_lower
# comparing to both unicode normalization forms to prevent broking stuff and improve indexing on some platforms.
filename_matches = fn_no_ext_lower in [p_root_lower, unicodedata.normalize('NFC', p_root_lower)]
filename_contains = p_root_lower in fn_no_ext_lower
if not filename_matches:
@ -1203,7 +1213,10 @@ def save_subtitles(file_path, subtitles, single=False, directory=None, chmod=Non
continue
# create subtitle path
if subtitle.text and bool(re.search(HI_REGEX, subtitle.text)):
if subtitle.text and parse_for_hi_regex(subtitle_text=subtitle.text,
alpha3_language=subtitle.language.alpha3 if
(hasattr(subtitle, 'language') and hasattr(subtitle.language, 'alpha3'))
else None):
subtitle.language.hi = True
subtitle_path = get_subtitle_path(file_path, None if single else subtitle.language,
forced_tag=subtitle.language.forced,

View file

@ -141,7 +141,8 @@ class AnimeToshoProvider(Provider, ProviderSubtitleArchiveMixin):
for subtitle_file in subtitle_files:
hex_id = format(subtitle_file['id'], '08x')
lang = Language.fromalpha3b(subtitle_file['info']['lang'])
# Animetosho assumes missing languages as english as fallback when not specified.
lang = Language.fromalpha3b(subtitle_file['info'].get('lang', 'eng'))
# For Portuguese and Portuguese Brazilian they both share the same code, the name is the only
# identifier AnimeTosho provides. Also, some subtitles does not have name, in this case it could

View file

@ -5,7 +5,7 @@ from random import randint
import pycountry
from requests.cookies import RequestsCookieJar
from subliminal.exceptions import AuthenticationError
from subliminal.exceptions import AuthenticationError, ProviderError
from subliminal.providers import ParserBeautifulSoup
from subliminal_patch.http import RetryingCFSession
from subliminal_patch.pitcher import store_verification
@ -318,7 +318,7 @@ class AvistazNetworkProviderBase(Provider):
release_name = release['Title'].get_text().strip()
lang = lookup_lang(subtitle_cols['Language'].get_text().strip())
download_link = subtitle_cols['Download'].a['href']
uploader_name = subtitle_cols['Uploader'].get_text().strip()
uploader_name = subtitle_cols['Uploader'].get_text().strip() if 'Uploader' in subtitle_cols else None
if lang not in languages:
continue
@ -354,7 +354,10 @@ class AvistazNetworkProviderBase(Provider):
def _parse_release_table(self, html):
release_data_table = (ParserBeautifulSoup(html, ['html.parser'])
.select_one('#content-area > div:nth-child(4) > div.table-responsive > table > tbody'))
.select_one('#content-area > div.block > div.table-responsive > table > tbody'))
if release_data_table is None:
raise ProviderError('Unexpected HTML page layout - no release data table found')
rows = {}
for tr in release_data_table.find_all('tr', recursive=False):

View file

@ -112,7 +112,11 @@ class EmbeddedSubtitlesProvider(Provider):
# Default is True
container.FFMPEG_STATS = False
tags.LANGUAGE_FALLBACK = self._fallback_lang if self._unknown_as_fallback and self._fallback_lang else None
tags.LANGUAGE_FALLBACK = (
self._fallback_lang
if self._unknown_as_fallback and self._fallback_lang
else None
)
logger.debug("Language fallback set: %s", tags.LANGUAGE_FALLBACK)
def initialize(self):
@ -194,7 +198,7 @@ class EmbeddedSubtitlesProvider(Provider):
def download_subtitle(self, subtitle: EmbeddedSubtitle):
try:
path = self._get_subtitle_path(subtitle)
except KeyError: # TODO: add MustGetBlacklisted support
except KeyError: # TODO: add MustGetBlacklisted support
logger.error("Couldn't get subtitle path")
return None
@ -229,6 +233,7 @@ class EmbeddedSubtitlesProvider(Provider):
timeout=self._timeout,
fallback_to_convert=True,
basename_callback=_basename_callback,
progress_callback=lambda d: logger.debug("Progress: %s", d),
)
# Add the extracted paths to the containter path key
self._cached_paths[container.path] = extracted

View file

@ -96,7 +96,12 @@ class HDBitsProvider(Provider):
"https://hdbits.org/api/torrents", json={**self._def_params, **lookup}
)
response.raise_for_status()
ids = [item["id"] for item in response.json()["data"]]
try:
ids = [item["id"] for item in response.json()["data"]]
except KeyError:
logger.debug("No data found")
return []
subtitles = []
for torrent_id in ids:

View file

@ -0,0 +1,419 @@
from __future__ import absolute_import
from datetime import timedelta
import logging
import os
import re
import time
from requests import Session
from subliminal import region, __short_version__
from subliminal.cache import REFINER_EXPIRATION_TIME
from subliminal.exceptions import ConfigurationError, AuthenticationError, ServiceUnavailable
from subliminal.utils import sanitize
from subliminal.video import Episode, Movie
from subliminal_patch.providers import Provider
from subliminal_patch.subtitle import Subtitle
from subliminal_patch.exceptions import APIThrottled
from subliminal_patch.providers.utils import get_subtitle_from_archive, get_archive_from_bytes
from urllib.parse import urlencode, urljoin
from guessit import guessit
from subzero.language import Language, FULL_LANGUAGE_LIST
logger = logging.getLogger(__name__)
# Unhandled formats, such files will always get filtered out
unhandled_archive_formats = (".7z",)
accepted_archive_formats = (".zip", ".rar")
class JimakuSubtitle(Subtitle):
'''Jimaku Subtitle.'''
provider_name = 'jimaku'
hash_verifiable = False
def __init__(self, language, video, download_url, filename):
super(JimakuSubtitle, self).__init__(language, page_link=download_url)
self.video = video
self.download_url = download_url
self.filename = filename
self.release_info = filename
self.is_archive = filename.endswith(accepted_archive_formats)
@property
def id(self):
return self.download_url
def get_matches(self, video):
matches = set()
# Episode/Movie specific matches
if isinstance(video, Episode):
if sanitize(video.series) and sanitize(self.video.series) in (
sanitize(name) for name in [video.series] + video.alternative_series):
matches.add('series')
if video.season and self.video.season is None or video.season and video.season == self.video.season:
matches.add('season')
elif isinstance(video, Movie):
if sanitize(video.title) and sanitize(self.video.title) in (
sanitize(name) for name in [video.title] + video.alternative_titles):
matches.add('title')
# General matches
if video.year and video.year == self.video.year:
matches.add('year')
video_type = 'movie' if isinstance(video, Movie) else 'episode'
matches.add(video_type)
guess = guessit(self.filename, {'type': video_type})
for g in guess:
if g[0] == "release_group" or "source":
if video.release_group == g[1]:
matches.add('release_group')
break
# Prioritize .srt by repurposing the audio_codec match
if self.filename.endswith(".srt"):
matches.add('audio_codec')
return matches
class JimakuProvider(Provider):
'''Jimaku Provider.'''
video_types = (Episode, Movie)
api_url = 'https://jimaku.cc/api'
api_ratelimit_max_delay_seconds = 5
api_ratelimit_backoff_limit = 3
corrupted_file_size_threshold = 500
languages = {Language.fromietf("ja")}
def __init__(self, enable_name_search_fallback, enable_archives_download, enable_ai_subs, api_key):
if api_key:
self.api_key = api_key
else:
raise ConfigurationError('Missing api_key.')
self.enable_name_search_fallback = enable_name_search_fallback
self.download_archives = enable_archives_download
self.enable_ai_subs = enable_ai_subs
self.session = None
def initialize(self):
self.session = Session()
self.session.headers['Content-Type'] = 'application/json'
self.session.headers['Authorization'] = self.api_key
self.session.headers['User-Agent'] = os.environ.get("SZ_USER_AGENT")
def terminate(self):
self.session.close()
def _query(self, video):
if isinstance(video, Movie):
media_name = video.title.lower()
elif isinstance(video, Episode):
media_name = video.series.lower()
# With entries that have a season larger than 1, Jimaku appends the corresponding season number to the name.
# We'll reassemble media_name here to account for cases where we can only search by name alone.
season_addendum = str(video.season) if video.season > 1 else None
media_name = f"{media_name} {season_addendum}" if season_addendum else media_name
# Search for entry
searching_for_entry_attempts = 0
additional_url_params = {}
while searching_for_entry_attempts < 2:
searching_for_entry_attempts += 1
url = self._assemble_jimaku_search_url(video, media_name, additional_url_params)
if not url:
return None
searching_for_entry = "query" in url
data = self._search_for_entry(url)
if not data:
if searching_for_entry and searching_for_entry_attempts < 2:
logger.info("Maybe this is live action media? Will retry search without anime parameter...")
additional_url_params = {'anime': "false"}
else:
return None
else:
break
# We only go for the first entry
entry = data[0]
entry_id = entry.get('id')
anilist_id = entry.get('anilist_id', None)
entry_name = entry.get('name')
is_movie = entry.get('flags', {}).get('movie', False)
if isinstance(video, Episode) and is_movie:
logger.warn("Bazarr thinks this is a series, but Jimaku says this is a movie! May not be able to match subtitles...")
logger.info(f"Matched entry: ID: '{entry_id}', anilist_id: '{anilist_id}', name: '{entry_name}', english_name: '{entry.get('english_name')}', movie: {is_movie}")
if entry.get("flags").get("unverified"):
logger.warning(f"This entry '{entry_id}' is unverified, subtitles might be incomplete or have quality issues!")
# Get a list of subtitles for entry
episode_number = video.episode if "episode" in dir(video) else None
url_params = {'episode': episode_number} if isinstance(video, Episode) and not is_movie else {}
only_look_for_archives = False
has_offset = isinstance(video, Episode) and video.series_anidb_season_episode_offset is not None
retry_count = 0
adjusted_ep_num = None
while retry_count <= 1:
# Account for positive episode offset first
if isinstance(video, Episode) and not is_movie and retry_count < 1:
if video.season > 1 and has_offset:
offset_value = video.series_anidb_season_episode_offset
offset_value = offset_value if offset_value > 0 else -offset_value
if episode_number < offset_value:
adjusted_ep_num = episode_number + offset_value
logger.warning(f"Will try using adjusted episode number {adjusted_ep_num} first")
url_params = {'episode': adjusted_ep_num}
url = f"entries/{entry_id}/files"
data = self._search_for_subtitles(url, url_params)
if not data:
if isinstance(video, Episode) and not is_movie and has_offset and retry_count < 1:
logger.warning(f"Found no subtitles for adjusted episode number, but will retry with normal episode number {episode_number}")
url_params = {'episode': episode_number}
elif isinstance(video, Episode) and not is_movie and retry_count < 1:
logger.warning(f"Found no subtitles for episode number {episode_number}, but will retry without 'episode' parameter")
url_params = {}
only_look_for_archives = True
else:
return None
retry_count += 1
else:
if adjusted_ep_num:
video.episode = adjusted_ep_num
logger.debug(f"This videos episode attribute has been updated to: {video.episode}")
break
# Filter subtitles
list_of_subtitles = []
data = [item for item in data if not item['name'].endswith(unhandled_archive_formats)]
# Detect only archives being uploaded
archive_entries = [item for item in data if item['name'].endswith(accepted_archive_formats)]
subtitle_entries = [item for item in data if not item['name'].endswith(accepted_archive_formats)]
has_only_archives = len(archive_entries) > 0 and len(subtitle_entries) == 0
if has_only_archives:
logger.warning("Have only found archived subtitles")
elif only_look_for_archives:
data = [item for item in data if item['name'].endswith(accepted_archive_formats)]
for item in data:
filename = item.get('name')
download_url = item.get('url')
is_archive = filename.endswith(accepted_archive_formats)
# Archives will still be considered if they're the only files available, as is mostly the case for movies.
if is_archive and not has_only_archives and not self.download_archives:
logger.warning(f"Skipping archive '{filename}' because normal subtitles are available instead")
continue
if not self.enable_ai_subs:
p = re.compile(r'[\[\(]?(whisperai)[\]\)]?|[\[\(]whisper[\]\)]', re.IGNORECASE)
if p.search(filename):
logger.warning(f"Skipping subtitle '{filename}' as it's suspected of being AI generated")
continue
sub_languages = self._try_determine_subtitle_languages(filename)
if len(sub_languages) > 1:
logger.warning(f"Skipping subtitle '{filename}' as it's suspected of containing multiple languages")
continue
# Check if file is obviously corrupt. If no size is returned, assume OK
filesize = item.get('size', self.corrupted_file_size_threshold)
if filesize < self.corrupted_file_size_threshold:
logger.warning(f"Skipping possibly corrupt file '{filename}': Filesize is just {filesize} bytes")
continue
if not filename.endswith(unhandled_archive_formats):
lang = sub_languages[0] if len(sub_languages) > 1 else Language("jpn")
list_of_subtitles.append(JimakuSubtitle(lang, video, download_url, filename))
else:
logger.debug(f"Skipping archive '{filename}' as it's not a supported format")
return list_of_subtitles
def list_subtitles(self, video, languages=None):
subtitles = self._query(video)
if not subtitles:
return []
return [s for s in subtitles]
def download_subtitle(self, subtitle: JimakuSubtitle):
target_url = subtitle.download_url
response = self.session.get(target_url, timeout=10)
response.raise_for_status()
if subtitle.is_archive:
archive = get_archive_from_bytes(response.content)
if archive:
if isinstance(subtitle.video, Episode):
subtitle.content = get_subtitle_from_archive(
archive,
episode=subtitle.video.episode,
episode_title=subtitle.video.title
)
else:
subtitle.content = get_subtitle_from_archive(
archive
)
else:
logger.warning("Archive seems to not be an archive! File possibly corrupt?")
return None
else:
subtitle.content = response.content
def _do_jimaku_request(self, url_path, url_params={}):
url = urljoin(f"{self.api_url}/{url_path}", '?' + urlencode(url_params))
retry_count = 0
while retry_count < self.api_ratelimit_backoff_limit:
response = self.session.get(url, timeout=10)
if response.status_code == 429:
reset_time = 5
retry_count + 1
logger.warning(f"Jimaku ratelimit hit, waiting for '{reset_time}' seconds ({retry_count}/{self.api_ratelimit_backoff_limit} tries)")
time.sleep(reset_time)
continue
elif response.status_code == 401:
raise AuthenticationError("Unauthorized. API key possibly invalid")
else:
response.raise_for_status()
data = response.json()
logger.debug(f"Length of response on {url}: {len(data)}")
if len(data) == 0:
logger.error(f"Jimaku returned no items for our our query: {url}")
return None
elif 'error' in data:
raise ServiceUnavailable(f"Jimaku returned an error: '{data.get('error')}', Code: '{data.get('code')}'")
else:
return data
raise APIThrottled(f"Jimaku ratelimit max backoff limit of {self.api_ratelimit_backoff_limit} reached, aborting")
# Wrapper functions to indirectly call _do_jimaku_request with different cache configs
@region.cache_on_arguments(expiration_time=REFINER_EXPIRATION_TIME)
def _search_for_entry(self, url_path, url_params={}):
return self._do_jimaku_request(url_path, url_params)
@region.cache_on_arguments(expiration_time=timedelta(minutes=1).total_seconds())
def _search_for_subtitles(self, url_path, url_params={}):
return self._do_jimaku_request(url_path, url_params)
@staticmethod
def _try_determine_subtitle_languages(filename):
# This is more like a guess and not a 100% fool-proof way of detecting multi-lang subs:
# It assumes that language codes, if present, are in the last metadata group of the subs filename.
# If such codes are not present, or we failed to match any at all, then we'll just assume that the sub is purely Japanese.
default_language = Language("jpn")
dot_delimit = filename.split(".")
bracket_delimit = re.split(r'[\[\]\(\)]+', filename)
candidate_list = list()
if len(dot_delimit) > 2:
candidate_list = dot_delimit[-2]
elif len(bracket_delimit) > 2:
candidate_list = bracket_delimit[-2]
candidates = [] if len(candidate_list) == 0 else re.split(r'[,\-\+\& ]+', candidate_list)
# Discard match group if any candidate...
# ...contains any numbers, as the group is likely encoding information
if any(re.compile(r'\d').search(string) for string in candidates):
return [default_language]
# ...is >= 5 chars long, as the group is likely other unrelated metadata
if any(len(string) >= 5 for string in candidates):
return [default_language]
languages = list()
for candidate in candidates:
candidate = candidate.lower()
if candidate in ["ass", "srt"]:
continue
# Sometimes, languages are hidden in 4 character blocks, i.e. "JPSC"
if len(candidate) == 4:
for addendum in [candidate[:2], candidate[2:]]:
candidates.append(addendum)
continue
# Sometimes, language codes can have additional info such as 'cc' or 'sdh'. For example: "ja[cc]"
if len(dot_delimit) > 2 and any(c in candidate for c in '[]()'):
candidate = re.split(r'[\[\]\(\)]+', candidate)[0]
try:
language_squash = {
"jp": "ja",
"jap": "ja",
"chs": "zho",
"cht": "zho",
"zhi": "zho",
"cn": "zho"
}
candidate = language_squash[candidate] if candidate in language_squash else candidate
if len(candidate) > 2:
language = Language(candidate)
else:
language = Language.fromietf(candidate)
if not any(l.alpha3 == language.alpha3 for l in languages):
languages.append(language)
except:
if candidate in FULL_LANGUAGE_LIST:
# Create a dummy for the unknown language
languages.append(Language("zul"))
if len(languages) > 1:
# Sometimes a metadata group that actually contains info about codecs gets processed as valid languages.
# To prevent false positives, we'll check if Japanese language codes are in the processed languages list.
# If not, then it's likely that we didn't actually match language codes -> Assume Japanese only subtitle.
contains_jpn = any([l for l in languages if l.alpha3 == "jpn"])
return languages if contains_jpn else [Language("jpn")]
else:
return [default_language]
def _assemble_jimaku_search_url(self, video, media_name, additional_params={}):
endpoint = "entries/search"
anilist_id = video.anilist_id
params = {}
if anilist_id:
params = {'anilist_id': anilist_id}
else:
if self.enable_name_search_fallback or isinstance(video, Movie):
params = {'query': media_name}
else:
logger.error(f"Skipping '{media_name}': Got no AniList ID and fuzzy matching using name is disabled")
return None
if additional_params:
params.update(additional_params)
logger.info(f"Will search for entry based on params: {params}")
return urljoin(endpoint, '?' + urlencode(params))

View file

@ -29,6 +29,7 @@ from dogpile.cache.api import NO_VALUE
logger = logging.getLogger(__name__)
class LegendasdivxSubtitle(Subtitle):
"""Legendasdivx Subtitle."""
provider_name = 'legendasdivx'
@ -69,10 +70,12 @@ class LegendasdivxSubtitle(Subtitle):
self.wrong_fps = True
if self.skip_wrong_fps:
logger.debug("Legendasdivx :: Skipping subtitle due to FPS mismatch (expected: %s, got: %s)", video.fps, self.sub_frame_rate)
logger.debug("Legendasdivx :: Skipping subtitle due to FPS mismatch (expected: %s, got: %s)", video.fps,
self.sub_frame_rate)
# not a single match :)
return set()
logger.debug("Legendasdivx :: Frame rate mismatch (expected: %s, got: %s, but continuing...)", video.fps, self.sub_frame_rate)
logger.debug("Legendasdivx :: Frame rate mismatch (expected: %s, got: %s, but continuing...)", video.fps,
self.sub_frame_rate)
description = sanitize(self.description)
@ -112,6 +115,11 @@ class LegendasdivxSubtitle(Subtitle):
matches.update(['season'])
if video.episode and 'e{:02d}'.format(video.episode) in description:
matches.update(['episode'])
# All the search is already based on the series_imdb_id when present in the video and controlled via the
# the legendasdivx backend it, so if there is a result, it matches, either inside of a pack or a specific
# series and episode, so we can assume the season and episode matches.
if video.series_imdb_id:
matches.update(['series', 'series_imdb_id', 'season', 'episode'])
# release_group
if video.release_group and sanitize_release_group(video.release_group) in sanitize_release_group(description):
@ -121,6 +129,7 @@ class LegendasdivxSubtitle(Subtitle):
return matches
class LegendasdivxProvider(Provider):
"""Legendasdivx Provider."""
languages = {Language('por', 'BR')} | {Language('por')}
@ -135,7 +144,7 @@ class LegendasdivxProvider(Provider):
'Referer': 'https://www.legendasdivx.pt'
}
loginpage = site + '/forum/ucp.php?mode=login'
searchurl = site + '/modules.php?name=Downloads&file=jz&d_op=search&op=_jz00&query={query}'
searchurl = site + '/modules.php?name=Downloads&file=jz&d_op={d_op}&op={op}&query={query}&temporada={season}&episodio={episode}&imdb={imdbid}'
download_link = site + '/modules.php{link}'
def __init__(self, username, password, skip_wrong_fps=True):
@ -186,7 +195,8 @@ class LegendasdivxProvider(Provider):
res = self.session.post(self.loginpage, data)
res.raise_for_status()
# make sure we're logged in
logger.debug('Legendasdivx.pt :: Logged in successfully: PHPSESSID: %s', self.session.cookies.get_dict()['PHPSESSID'])
logger.debug('Legendasdivx.pt :: Logged in successfully: PHPSESSID: %s',
self.session.cookies.get_dict()['PHPSESSID'])
cj = self.session.cookies.copy()
store_cks = ("PHPSESSID", "phpbb3_2z8zs_sid", "phpbb3_2z8zs_k", "phpbb3_2z8zs_u", "lang")
for cn in iter(self.session.cookies.keys()):
@ -252,7 +262,7 @@ class LegendasdivxProvider(Provider):
continue
# get subtitle uploader
sub_header = _subbox.find("div", {"class" :"sub_header"})
sub_header = _subbox.find("div", {"class": "sub_header"})
uploader = sub_header.find("a").text if sub_header else 'anonymous'
exact_match = False
@ -278,12 +288,24 @@ class LegendasdivxProvider(Provider):
subtitles = []
# Set the default search criteria
d_op = 'search'
op = '_jz00'
lang_filter_key = 'form_cat'
if isinstance(video, Movie):
querytext = video.imdb_id if video.imdb_id else video.title
if isinstance(video, Episode):
querytext = '%22{}%20S{:02d}E{:02d}%22'.format(video.series, video.season, video.episode)
querytext = quote(querytext.lower())
# Overwrite the parameters to refine via imdb_id
if video.series_imdb_id:
querytext = '&faz=pesquisa_episodio'
lang_filter_key = 'idioma'
d_op = 'jz_00'
op = ''
else:
querytext = '%22{}%22%20S{:02d}E{:02d}'.format(video.series, video.season, video.episode)
querytext = quote(querytext.lower())
# language query filter
if not isinstance(languages, (tuple, list, set)):
@ -293,21 +315,30 @@ class LegendasdivxProvider(Provider):
logger.debug("Legendasdivx.pt :: searching for %s subtitles.", language)
language_id = language.opensubtitles
if 'por' in language_id:
lang_filter = '&form_cat=28'
lang_filter = '&{}=28'.format(lang_filter_key)
elif 'pob' in language_id:
lang_filter = '&form_cat=29'
lang_filter = '&{}=29'.format(lang_filter_key)
else:
lang_filter = ''
querytext = querytext + lang_filter if lang_filter else querytext
search_url = _searchurl.format(
query=querytext,
season='' if isinstance(video, Movie) else video.season,
episode='' if isinstance(video, Movie) else video.episode,
imdbid='' if isinstance(video, Movie) else video.series_imdb_id.replace('tt', '') if video.series_imdb_id else None,
op=op,
d_op=d_op,
)
try:
# sleep for a 1 second before another request
sleep(1)
searchLimitReached = False
self.headers['Referer'] = self.site + '/index.php'
self.session.headers.update(self.headers)
res = self.session.get(_searchurl.format(query=querytext), allow_redirects=False)
res = self.session.get(search_url, allow_redirects=False)
res.raise_for_status()
if res.status_code == 200 and "<!--pesquisas:" in res.text:
searches_count_groups = re.search(r'<!--pesquisas: (\d*)-->', res.text)
@ -327,7 +358,7 @@ class LegendasdivxProvider(Provider):
querytext = re.sub(r"(e|E)(\d{2})", "", querytext)
# sleep for a 1 second before another request
sleep(1)
res = self.session.get(_searchurl.format(query=querytext), allow_redirects=False)
res = self.session.get(search_url, allow_redirects=False)
res.raise_for_status()
if res.status_code == 200 and "<!--pesquisas:" in res.text:
searches_count_groups = re.search(r'<!--pesquisas: (\d*)-->', res.text)
@ -340,9 +371,11 @@ class LegendasdivxProvider(Provider):
if searches_count >= self.SAFE_SEARCH_LIMIT:
searchLimitReached = True
if (res.status_code == 200 and "A legenda não foi encontrada" in res.text):
logger.warning('Legendasdivx.pt :: query {0} return no results for language {1}(for series and season only).'.format(querytext, language_id))
logger.warning(
'Legendasdivx.pt :: query {0} return no results for language {1}(for series and season only).'.format(
querytext, language_id))
continue
if res.status_code == 302: # got redirected to login page.
if res.status_code == 302: # got redirected to login page.
# seems that our session cookies are no longer valid... clean them from cache
region.delete("legendasdivx_cookies2")
logger.debug("Legendasdivx.pt :: Logging in again. Cookies have expired!")
@ -350,7 +383,7 @@ class LegendasdivxProvider(Provider):
self.login()
# sleep for a 1 second before another request
sleep(1)
res = self.session.get(_searchurl.format(query=querytext))
res = self.session.get(search_url, allow_redirects=False)
res.raise_for_status()
if res.status_code == 200 and "<!--pesquisas:" in res.text:
searches_count_groups = re.search(r'<!--pesquisas: (\d*)-->', res.text)
@ -394,9 +427,9 @@ class LegendasdivxProvider(Provider):
# more pages?
if num_pages > 1:
for num_page in range(2, num_pages+1):
for num_page in range(2, num_pages + 1):
sleep(1) # another 1 sec before requesting...
_search_next = self.searchurl.format(query=querytext) + "&page={0}".format(str(num_page))
_search_next = search_url + "&page={0}".format(str(num_page))
logger.debug("Legendasdivx.pt :: Moving on to next page: %s", _search_next)
# sleep for a 1 second before another request
sleep(1)
@ -409,7 +442,7 @@ class LegendasdivxProvider(Provider):
def list_subtitles(self, video, languages):
return self.query(video, languages)
@reinitialize_on_error((RequestException,), attempts=1)
def download_subtitle(self, subtitle):
@ -478,7 +511,8 @@ class LegendasdivxProvider(Provider):
if isinstance(subtitle.video, Episode):
if all(key in _guess for key in ('season', 'episode')):
logger.debug("Legendasdivx.pt :: guessing %s", name)
logger.debug("Legendasdivx.pt :: subtitle S%sE%s video S%sE%s", _guess['season'], _guess['episode'], subtitle.video.season, subtitle.video.episode)
logger.debug("Legendasdivx.pt :: subtitle S%sE%s video S%sE%s", _guess['season'], _guess['episode'],
subtitle.video.season, subtitle.video.episode)
if subtitle.video.episode != _guess['episode'] or subtitle.video.season != _guess['season']:
logger.debug('Legendasdivx.pt :: subtitle does not match video, skipping')

View file

@ -0,0 +1,264 @@
# -*- coding: utf-8 -*-
import logging
import os
import time
import io
import json
from zipfile import ZipFile, is_zipfile
from urllib.parse import urljoin
from requests import Session
from subzero.language import Language
from subliminal import Episode, Movie
from subliminal.exceptions import ConfigurationError, ProviderError, DownloadLimitExceeded
from subliminal_patch.exceptions import APIThrottled
from .mixins import ProviderRetryMixin
from subliminal_patch.subtitle import Subtitle
from subliminal.subtitle import fix_line_ending
from subliminal_patch.providers import Provider
from subliminal_patch.providers import utils
logger = logging.getLogger(__name__)
retry_amount = 3
retry_timeout = 5
class LegendasNetSubtitle(Subtitle):
provider_name = 'legendasnet'
hash_verifiable = False
def __init__(self, language, forced, page_link, download_link, file_id, release_names, uploader,
season=None, episode=None):
super().__init__(language)
language = Language.rebuild(language, forced=forced)
self.season = season
self.episode = episode
self.releases = release_names
self.release_info = ', '.join(release_names)
self.language = language
self.forced = forced
self.file_id = file_id
self.page_link = page_link
self.download_link = download_link
self.uploader = uploader
self.matches = None
@property
def id(self):
return self.file_id
def get_matches(self, video):
matches = set()
# handle movies and series separately
if isinstance(video, Episode):
# series
matches.add('series')
# season
if video.season == self.season:
matches.add('season')
# episode
if video.episode == self.episode:
matches.add('episode')
# imdb
matches.add('series_imdb_id')
else:
# title
matches.add('title')
# imdb
matches.add('imdb_id')
utils.update_matches(matches, video, self.release_info)
self.matches = matches
return matches
class LegendasNetProvider(ProviderRetryMixin, Provider):
"""Legendas.Net Provider"""
server_hostname = 'legendas.net/api'
languages = {Language('por', 'BR')}
video_types = (Episode, Movie)
def __init__(self, username, password):
self.session = Session()
self.session.headers = {'User-Agent': os.environ.get("SZ_USER_AGENT", "Sub-Zero/2")}
self.username = username
self.password = password
self.access_token = None
self.video = None
self._started = None
self.login()
def login(self):
headersList = {
"Accept": "*/*",
"User-Agent": self.session.headers['User-Agent'],
"Content-Type": "application/json"
}
payload = json.dumps({
"email": self.username,
"password": self.password
})
response = self.session.request("POST", self.server_url() + 'login', data=payload, headers=headersList)
if response.status_code != 200:
raise ConfigurationError('Failed to login and retrieve access token')
self.access_token = response.json().get('access_token')
if not self.access_token:
raise ConfigurationError('Access token not found in login response')
self.session.headers.update({'Authorization': f'Bearer {self.access_token}'})
def initialize(self):
self._started = time.time()
def terminate(self):
self.session.close()
def server_url(self):
return f'https://{self.server_hostname}/v1/'
def query(self, languages, video):
self.video = video
# query the server
if isinstance(self.video, Episode):
res = self.retry(
lambda: self.session.get(self.server_url() + 'search/tv',
json={
'name': video.series,
'page': 1,
'per_page': 25,
'tv_episode': video.episode,
'tv_season': video.season,
'imdb_id': video.series_imdb_id
},
headers={'Content-Type': 'application/json'},
timeout=30),
amount=retry_amount,
retry_timeout=retry_timeout
)
else:
res = self.retry(
lambda: self.session.get(self.server_url() + 'search/movie',
json={
'name': video.title,
'page': 1,
'per_page': 25,
'imdb_id': video.imdb_id
},
headers={'Content-Type': 'application/json'},
timeout=30),
amount=retry_amount,
retry_timeout=retry_timeout
)
if res.status_code == 404:
logger.error(f"Endpoint not found: {res.url}")
raise ProviderError("Endpoint not found")
elif res.status_code == 429:
raise APIThrottled("Too many requests")
elif res.status_code == 403:
raise ConfigurationError("Invalid access token")
elif res.status_code != 200:
res.raise_for_status()
subtitles = []
result = res.json()
if ('success' in result and not result['success']) or ('status' in result and not result['status']):
logger.debug(result["error"])
return []
if isinstance(self.video, Episode):
if len(result['tv_shows']):
for item in result['tv_shows']:
subtitle = LegendasNetSubtitle(
language=Language('por', 'BR'),
forced=self._is_forced(item),
page_link=f"https://legendas.net/tv_legenda?movie_id={result['tv_shows'][0]['tmdb_id']}&"
f"legenda_id={item['id']}",
download_link=item['path'],
file_id=item['id'],
release_names=[item.get('release_name', '')],
uploader=item['uploader'],
season=item.get('season', ''),
episode=item.get('episode', '')
)
subtitle.get_matches(self.video)
if subtitle.language in languages:
subtitles.append(subtitle)
else:
if len(result['movies']):
for item in result['movies']:
subtitle = LegendasNetSubtitle(
language=Language('por', 'BR'),
forced=self._is_forced(item),
page_link=f"https://legendas.net/legenda?movie_id={result['movies'][0]['tmdb_id']}&"
f"legenda_id={item['id']}",
download_link=item['path'],
file_id=item['id'],
release_names=[item.get('release_name', '')],
uploader=item['uploader'],
season=None,
episode=None
)
subtitle.get_matches(self.video)
if subtitle.language in languages:
subtitles.append(subtitle)
return subtitles
@staticmethod
def _is_forced(item):
forced_tags = ['forced', 'foreign']
for tag in forced_tags:
if tag in item.get('comment', '').lower():
return True
# nothing match so we consider it as normal subtitles
return False
def list_subtitles(self, video, languages):
return self.query(languages, video)
def download_subtitle(self, subtitle):
logger.debug('Downloading subtitle %r', subtitle)
download_link = urljoin("https://legendas.net", subtitle.download_link)
r = self.retry(
lambda: self.session.get(download_link, timeout=30),
amount=retry_amount,
retry_timeout=retry_timeout
)
if r.status_code == 429:
raise DownloadLimitExceeded("Daily download limit exceeded")
elif r.status_code == 403:
raise ConfigurationError("Invalid access token")
elif r.status_code != 200:
r.raise_for_status()
if not r:
logger.error(f'Could not download subtitle from {download_link}')
subtitle.content = None
return
else:
archive_stream = io.BytesIO(r.content)
if is_zipfile(archive_stream):
archive = ZipFile(archive_stream)
for name in archive.namelist():
subtitle_content = archive.read(name)
subtitle.content = fix_line_ending(subtitle_content)
return
else:
subtitle_content = r.content
subtitle.content = fix_line_ending(subtitle_content)
return

View file

@ -218,7 +218,7 @@ class OpenSubtitlesComProvider(ProviderRetryMixin, Provider):
try:
self.token = r.json()['token']
except (ValueError, JSONDecodeError):
except (ValueError, JSONDecodeError, AttributeError):
log_request_response(r)
raise ProviderError("Cannot get token from provider login response")
else:
@ -543,10 +543,6 @@ class OpenSubtitlesComProvider(ProviderRetryMixin, Provider):
elif status_code == 429:
log_request_response(response)
raise TooManyRequests()
elif status_code == 500:
logger.debug("Server side exception raised while downloading from opensubtitles.com website. They "
"should mitigate this soon.")
return None
elif status_code == 502:
# this one should deal with Bad Gateway issue on their side.
raise APIThrottled()

View file

@ -209,7 +209,8 @@ class PodnapisiProvider(_PodnapisiProvider, ProviderSubtitleArchiveMixin):
break
# exit if no results
if not xml.find('pagination/results') or not int(xml.find('pagination/results').text):
if (not xml.find('pagination/results') or not xml.find('pagination/results').text or not
int(xml.find('pagination/results').text)):
logger.debug('No subtitles found')
break

View file

@ -277,7 +277,11 @@ class SoustitreseuProvider(Provider, ProviderSubtitleArchiveMixin):
release = name[:-4].lower().rstrip('tag').rstrip('en').rstrip('fr')
_guess = guessit(release)
if isinstance(video, Episode):
if video.episode != _guess['episode'] or video.season != _guess['season']:
try:
if video.episode != _guess['episode'] or video.season != _guess['season']:
continue
except KeyError:
# episode or season are missing from guessit result
continue
matches = set()

View file

@ -172,7 +172,7 @@ class SubdivxSubtitlesProvider(Provider):
logger.debug("Query: %s", query)
response = self.session.post(search_link, data=payload)
response = self.session.post(search_link, data=payload, timeout=30)
if response.status_code == 500:
logger.debug(

View file

@ -0,0 +1,279 @@
# -*- coding: utf-8 -*-
import logging
import os
import time
import io
from zipfile import ZipFile, is_zipfile
from urllib.parse import urljoin
from requests import Session
from babelfish import language_converters
from subzero.language import Language
from subliminal import Episode, Movie
from subliminal.exceptions import ConfigurationError, ProviderError, DownloadLimitExceeded
from subliminal_patch.exceptions import APIThrottled
from .mixins import ProviderRetryMixin
from subliminal_patch.subtitle import Subtitle
from subliminal.subtitle import fix_line_ending
from subliminal_patch.providers import Provider
from subliminal_patch.providers import utils
logger = logging.getLogger(__name__)
retry_amount = 3
retry_timeout = 5
language_converters.register('subdl = subliminal_patch.converters.subdl:SubdlConverter')
class SubdlSubtitle(Subtitle):
provider_name = 'subdl'
hash_verifiable = False
hearing_impaired_verifiable = True
def __init__(self, language, forced, hearing_impaired, page_link, download_link, file_id, release_names, uploader,
season=None, episode=None):
super().__init__(language)
language = Language.rebuild(language, hi=hearing_impaired, forced=forced)
self.season = season
self.episode = episode
self.releases = release_names
self.release_info = ', '.join(release_names)
self.language = language
self.forced = forced
self.hearing_impaired = hearing_impaired
self.file_id = file_id
self.page_link = page_link
self.download_link = download_link
self.uploader = uploader
self.matches = None
@property
def id(self):
return self.file_id
def get_matches(self, video):
matches = set()
# handle movies and series separately
if isinstance(video, Episode):
# series
matches.add('series')
# season
if video.season == self.season:
matches.add('season')
# episode
if video.episode == self.episode:
matches.add('episode')
# imdb
matches.add('series_imdb_id')
else:
# title
matches.add('title')
# imdb
matches.add('imdb_id')
utils.update_matches(matches, video, self.release_info)
self.matches = matches
return matches
class SubdlProvider(ProviderRetryMixin, Provider):
"""Subdl Provider"""
server_hostname = 'api.subdl.com'
languages = {Language(*lang) for lang in list(language_converters['subdl'].to_subdl.keys())}
languages.update(set(Language.rebuild(lang, forced=True) for lang in languages))
languages.update(set(Language.rebuild(l, hi=True) for l in languages))
video_types = (Episode, Movie)
def __init__(self, api_key=None):
if not api_key:
raise ConfigurationError('Api_key must be specified')
self.session = Session()
self.session.headers = {'User-Agent': os.environ.get("SZ_USER_AGENT", "Sub-Zero/2")}
self.api_key = api_key
self.video = None
self._started = None
def initialize(self):
self._started = time.time()
def terminate(self):
self.session.close()
def server_url(self):
return f'https://{self.server_hostname}/api/v1/'
def query(self, languages, video):
self.video = video
if isinstance(self.video, Episode):
title = self.video.series
else:
title = self.video.title
imdb_id = None
if isinstance(self.video, Episode) and self.video.series_imdb_id:
imdb_id = self.video.series_imdb_id
elif isinstance(self.video, Movie) and self.video.imdb_id:
imdb_id = self.video.imdb_id
# be sure to remove duplicates using list(set())
langs_list = sorted(list(set([language_converters['subdl'].convert(lang.alpha3, lang.country, lang.script) for
lang in languages])))
langs = ','.join(langs_list)
logger.debug(f'Searching for those languages: {langs}')
# query the server
if isinstance(self.video, Episode):
res = self.retry(
lambda: self.session.get(self.server_url() + 'subtitles',
params=(('api_key', self.api_key),
('episode_number', self.video.episode),
('film_name', title if not imdb_id else None),
('imdb_id', imdb_id if imdb_id else None),
('languages', langs),
('season_number', self.video.season),
('subs_per_page', 30),
('type', 'tv'),
('comment', 1),
('releases', 1),
('bazarr', 1)), # this argument filter incompatible image based or
# txt subtitles
timeout=30),
amount=retry_amount,
retry_timeout=retry_timeout
)
else:
res = self.retry(
lambda: self.session.get(self.server_url() + 'subtitles',
params=(('api_key', self.api_key),
('film_name', title if not imdb_id else None),
('imdb_id', imdb_id if imdb_id else None),
('languages', langs),
('subs_per_page', 30),
('type', 'movie'),
('comment', 1),
('releases', 1),
('bazarr', 1)), # this argument filter incompatible image based or
# txt subtitles
timeout=30),
amount=retry_amount,
retry_timeout=retry_timeout
)
if res.status_code == 429:
raise APIThrottled("Too many requests")
elif res.status_code == 403:
raise ConfigurationError("Invalid API key")
elif res.status_code != 200:
res.raise_for_status()
subtitles = []
result = res.json()
if ('success' in result and not result['success']) or ('status' in result and not result['status']):
logger.debug(result["error"])
return []
logger.debug(f"Query returned {len(result['subtitles'])} subtitles")
if len(result['subtitles']):
for item in result['subtitles']:
if item.get('episode_from', False) == item.get('episode_end', False): # ignore season packs
subtitle = SubdlSubtitle(
language=Language.fromsubdl(item['language']),
forced=self._is_forced(item),
hearing_impaired=item.get('hi', False) or self._is_hi(item),
page_link=urljoin("https://subdl.com", item.get('subtitlePage', '')),
download_link=item['url'],
file_id=item['name'],
release_names=item.get('releases', []),
uploader=item.get('author', ''),
season=item.get('season', None),
episode=item.get('episode', None),
)
subtitle.get_matches(self.video)
if subtitle.language in languages: # make sure only desired subtitles variants are returned
subtitles.append(subtitle)
return subtitles
@staticmethod
def _is_hi(item):
# Comments include specific mention of removed or non HI
non_hi_tag = ['hi remove', 'non hi', 'nonhi', 'non-hi', 'non-sdh', 'non sdh', 'nonsdh', 'sdh remove']
for tag in non_hi_tag:
if tag in item.get('comment', '').lower():
return False
# Archive filename include _HI_
if '_hi_' in item.get('name', '').lower():
return True
# Comments or release names include some specific strings
hi_keys = [item.get('comment', '').lower(), [x.lower() for x in item.get('releases', [])]]
hi_tag = ['_hi_', ' hi ', '.hi.', 'hi ', ' hi', 'sdh', '𝓢𝓓𝓗']
for key in hi_keys:
if any(x in key for x in hi_tag):
return True
# nothing match so we consider it as non-HI
return False
@staticmethod
def _is_forced(item):
# Comments include specific mention of forced subtitles
forced_tags = ['forced', 'foreign']
for tag in forced_tags:
if tag in item.get('comment', '').lower():
return True
# nothing match so we consider it as normal subtitles
return False
def list_subtitles(self, video, languages):
return self.query(languages, video)
def download_subtitle(self, subtitle):
logger.debug('Downloading subtitle %r', subtitle)
download_link = urljoin("https://dl.subdl.com", subtitle.download_link)
r = self.retry(
lambda: self.session.get(download_link, timeout=30),
amount=retry_amount,
retry_timeout=retry_timeout
)
if r.status_code == 429:
raise DownloadLimitExceeded("Daily download limit exceeded")
elif r.status_code == 403:
raise ConfigurationError("Invalid API key")
elif r.status_code != 200:
r.raise_for_status()
if not r:
logger.error(f'Could not download subtitle from {download_link}')
subtitle.content = None
return
else:
archive_stream = io.BytesIO(r.content)
if is_zipfile(archive_stream):
archive = ZipFile(archive_stream)
for name in archive.namelist():
# TODO when possible, deal with season pack / multiple files archive
subtitle_content = archive.read(name)
subtitle.content = fix_line_ending(subtitle_content)
return
else:
logger.error(f'Could not unzip subtitle from {download_link}')
subtitle.content = None
return

View file

@ -132,9 +132,9 @@ _DEFAULT_HEADERS = {
class Subf2mProvider(Provider):
provider_name = "subf2m"
_movie_title_regex = re.compile(r"^(.+?)( \((\d{4})\))?$")
_movie_title_regex = re.compile(r"^(.+?)(\s+\((\d{4})\))?$")
_tv_show_title_regex = re.compile(
r"^(.+?) [-\(]\s?(.*?) (season|series)\)?( \((\d{4})\))?$"
r"^(.+?)\s+[-\(]\s?(.*?)\s+(season|series)\)?(\s+\((\d{4})\))?$"
)
_tv_show_title_alt_regex = re.compile(r"(.+)\s(\d{1,2})(?:\s|$)")
_supported_languages = {}
@ -220,7 +220,7 @@ class Subf2mProvider(Provider):
results = []
for result in self._gen_results(title):
text = result.text.lower()
text = result.text.strip().lower()
match = self._movie_title_regex.match(text)
if not match:
continue
@ -254,7 +254,7 @@ class Subf2mProvider(Provider):
results = []
for result in self._gen_results(title):
text = result.text.lower()
text = result.text.strip().lower()
match = self._tv_show_title_regex.match(text)
if not match:

View file

@ -455,7 +455,13 @@ class SuperSubtitlesProvider(Provider, ProviderSubtitleArchiveMixin):
soup = ParserBeautifulSoup(r, ['lxml'])
tables = soup.find_all("table")
tables = tables[0].find_all("tr")
try:
tables = tables[0].find_all("tr")
except IndexError:
logger.debug("No tables found for %s", url)
return []
i = 0
for table in tables:

View file

@ -65,7 +65,7 @@ def _get_matching_sub(
guess = guessit(sub_name, options=guess_options)
matched_episode_num = guess.get("episode")
if matched_episode_num:
if not matched_episode_num:
logger.debug("No episode number found in file: %s", sub_name)
if episode_title is not None:
@ -86,11 +86,13 @@ def _get_matching_sub(
return None
def _analize_sub_name(sub_name: str, title_):
titles = re.split(r"[.-]", os.path.splitext(sub_name)[0])
def _analize_sub_name(sub_name: str, title_: str):
titles = re.split(r"[\s_\.\+]?[.-][\s_\.\+]?", os.path.splitext(sub_name)[0])
for title in titles:
title = title.strip()
ratio = SequenceMatcher(None, title, title_).ratio()
ratio = SequenceMatcher(None, title.lower(), title_.lower()).ratio()
if ratio > 0.85:
logger.debug(
"Episode title matched: '%s' -> '%s' [%s]", title, sub_name, ratio

View file

@ -316,7 +316,7 @@ class ZimukuProvider(Provider):
r = self.yunsuo_bypass(download_link, headers={'Referer': subtitle.page_link}, timeout=30)
r.raise_for_status()
try:
filename = r.headers["Content-Disposition"]
filename = r.headers["Content-Disposition"].lower()
except KeyError:
logger.debug("Unable to parse subtitles filename. Dropping this subtitles.")
return

View file

@ -12,8 +12,9 @@ import chardet
import pysrt
import pysubs2
from bs4 import UnicodeDammit
from copy import deepcopy
from pysubs2 import SSAStyle
from pysubs2.subrip import parse_tags, MAX_REPRESENTABLE_TIME
from pysubs2.formats.subrip import parse_tags, MAX_REPRESENTABLE_TIME
from pysubs2.time import ms_to_times
from subzero.modification import SubtitleModifications
from subzero.language import Language
@ -62,9 +63,14 @@ class Subtitle(Subtitle_):
_guessed_encoding = None
_is_valid = False
use_original_format = False
format = "srt" # default format is srt
# format = "srt" # default format is srt
def __init__(self, language, hearing_impaired=False, page_link=None, encoding=None, mods=None, original_format=False):
# language needs to be cloned because it is actually a reference to the provider language object
# if a new copy is not created then all subsequent subtitles for this provider will incorrectly be modified
# at least until Bazarr is restarted or the provider language object is recreated somehow
language = deepcopy(language)
# set subtitle language to hi if it's hearing_impaired
if hearing_impaired:
language = Language.rebuild(language, hi=True)
@ -74,6 +80,21 @@ class Subtitle(Subtitle_):
self.mods = mods
self._is_valid = False
self.use_original_format = original_format
self._og_format = None
@property
def format(self):
if self.use_original_format and self._og_format is not None:
logger.debug("Original format requested [%s]", self._og_format)
return self._og_format
logger.debug("Will assume srt format")
return "srt"
# Compatibility
@format.setter
def format(self, val):
self._og_format = val
def __repr__(self):
r_info = str(self.release_info or "").replace("\n", " | ").strip()
@ -260,7 +281,7 @@ class Subtitle(Subtitle_):
return encoding
def is_valid(self):
"""Check if a :attr:`text` is a valid SubRip format. Note that orignal format will pypass the checking
"""Check if a :attr:`text` is a valid SubRip format. Note that original format will bypass the checking
:return: whether or not the subtitle is valid.
:rtype: bool
@ -292,11 +313,13 @@ class Subtitle(Subtitle_):
logger.info("Got FPS from MicroDVD subtitle: %s", subs.fps)
else:
logger.info("Got format: %s", subs.format)
if self.use_original_format:
self.format = subs.format
self._is_valid = True
logger.debug("Using original format")
return True
self._og_format = subs.format
self._is_valid = True
# if self.use_original_format:
# self.format = subs.format
# self._is_valid = True
# logger.debug("Using original format")
return True
except pysubs2.UnknownFPSError:
# if parsing failed, use frame rate from provider
@ -340,7 +363,7 @@ class Subtitle(Subtitle_):
fragment = fragment.replace(r"\n", u"\n")
fragment = fragment.replace(r"\N", u"\n")
if sty.drawing:
raise pysubs2.ContentNotUsable
return None
if format == "srt":
if sty.italic:
@ -373,9 +396,10 @@ class Subtitle(Subtitle_):
for i, line in enumerate(visible_lines, 1):
start = ms_to_timestamp(line.start, mssep=mssep)
end = ms_to_timestamp(line.end, mssep=mssep)
try:
text = prepare_text(line.text, sub.styles.get(line.style, SSAStyle.DEFAULT_STYLE))
except pysubs2.ContentNotUsable:
text = prepare_text(line.text, sub.styles.get(line.style, SSAStyle.DEFAULT_STYLE))
if text is None:
continue
out.append(u"%d\n" % i)

View file

@ -35,6 +35,8 @@ class Video(Video_):
info_url=None,
series_anidb_id=None,
series_anidb_episode_id=None,
series_anidb_season_episode_offset=None,
anilist_id=None,
**kwargs
):
super(Video, self).__init__(
@ -61,3 +63,5 @@ class Video(Video_):
self.info_url = info_url
self.series_anidb_series_id = series_anidb_id,
self.series_anidb_episode_id = series_anidb_episode_id,
self.series_anidb_season_episode_offset = series_anidb_season_episode_offset,
self.anilist_id = anilist_id,

View file

@ -2,18 +2,24 @@
"rules": {
"no-console": "error",
"camelcase": "warn",
"no-restricted-imports": [
"error",
{
"patterns": ["..*"]
}
],
"simple-import-sort/imports": "error",
"@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/no-empty-function": "warn",
"@typescript-eslint/no-empty-interface": "off",
"@typescript-eslint/no-unused-vars": "warn"
},
"extends": [
"react-app",
"plugin:react-hooks/recommended",
"eslint:recommended",
"plugin:react-hooks/recommended",
"plugin:@typescript-eslint/recommended"
],
"plugins": ["testing-library"],
"plugins": ["testing-library", "simple-import-sort", "react-refresh"],
"overrides": [
{
"files": [
@ -21,6 +27,44 @@
"**/?(*.)+(spec|test).[jt]s?(x)"
],
"extends": ["plugin:testing-library/react"]
},
{
"files": ["*.ts", "*.tsx"],
"rules": {
"simple-import-sort/imports": [
"error",
{
"groups": [
[
// React Packages
"^react",
// Mantine Packages
"^@mantine/",
// Vendor Packages
"^(\\w|@\\w)",
// Side Effect Imports
"^\\u0000",
// Internal Packages
"^@/\\w",
// Parent Imports
"^\\.\\.(?!/?$)",
"^\\.\\./?$",
// Relative Imports
"^\\./(?=.*/)(?!/?$)",
"^\\.(?!/?$)",
"^\\./?$",
// Style Imports
"^.+\\.?(css)$"
]
]
}
]
}
}
]
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"sourceType": "module",
"ecmaVersion": "latest"
}
}

8
frontend/.gitignore vendored
View file

@ -1,7 +1,7 @@
node_modules
dist
*.local
*.tsbuildinfo
build
coverage
*.tsbuildinfo
dev-dist
dist
node_modules

1
frontend/.nvmrc Normal file
View file

@ -0,0 +1 @@
20.13

View file

@ -2,9 +2,12 @@
## Dependencies
- [Node.js](https://nodejs.org/)
- Either [Node.js](https://nodejs.org/) installed manually or using [Node Version Manager](https://github.com/nvm-sh/nvm)
- npm (included in Node.js)
> The recommended Node version to use and maintained is managed on the `.nvmrc` file. You can either install manually
> or use `nvm install` followed by `nvm use`.
## Getting Started
1. Clone or download this repository

View file

@ -1,10 +1,11 @@
// eslint-disable-next-line no-restricted-imports
import { dependencies } from "../package.json";
const vendors = [
"react",
"react-router-dom",
"react-dom",
"react-query",
"@tanstack/react-query",
"axios",
"socket.io-client",
];

View file

@ -5,7 +5,17 @@
<base href="{{baseUrl}}" />
<meta charset="utf-8" />
<link rel="icon" type="image/x-icon" href="./images/favicon.ico" />
<link rel="manifest" href="manifest.json" />
<link
rel="apple-touch-icon"
href="./images/apple-touch-icon-180x180.png"
sizes="180x180"
/>
<link
rel="mask-icon"
href="./images/maskable-icon-512x512.png"
color="#FFFFFF"
/>
<meta name="theme-color" content="#be4bdb" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, minimum-scale=1, maximum-scale=1"

File diff suppressed because it is too large Load diff

View file

@ -13,70 +13,81 @@
},
"private": true,
"dependencies": {
"@mantine/core": "^6.0.21",
"@mantine/dropzone": "^6.0.21",
"@mantine/form": "^6.0.21",
"@mantine/hooks": "^6.0.21",
"@mantine/modals": "^6.0.21",
"@mantine/notifications": "^6.0.21",
"@mantine/core": "^7.12.2",
"@mantine/dropzone": "^7.12.2",
"@mantine/form": "^7.12.2",
"@mantine/hooks": "^7.12.2",
"@mantine/modals": "^7.12.2",
"@mantine/notifications": "^7.12.2",
"@tanstack/react-query": "^5.40.1",
"@tanstack/react-table": "^8.19.2",
"axios": "^1.6.8",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-query": "^3.39.3",
"react-router-dom": "^6.22.3",
"braces": "^3.0.3",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-router-dom": "^6.23.1",
"socket.io-client": "^4.7.5"
},
"devDependencies": {
"@fontsource/roboto": "^5.0.12",
"@fortawesome/fontawesome-svg-core": "^6.5.2",
"@fortawesome/free-brands-svg-icons": "^6.5.2",
"@fortawesome/free-regular-svg-icons": "^6.5.2",
"@fortawesome/free-solid-svg-icons": "^6.5.2",
"@fortawesome/react-fontawesome": "^0.2.0",
"@fortawesome/fontawesome-svg-core": "^6.6.0",
"@fortawesome/free-brands-svg-icons": "^6.6.0",
"@fortawesome/free-regular-svg-icons": "^6.6.0",
"@fortawesome/free-solid-svg-icons": "^6.6.0",
"@fortawesome/react-fontawesome": "^0.2.2",
"@tanstack/react-query-devtools": "^5.40.1",
"@testing-library/jest-dom": "^6.4.2",
"@testing-library/react": "^15.0.5",
"@testing-library/user-event": "^14.5.2",
"@types/jest": "^29.5.12",
"@types/lodash": "^4.17.1",
"@types/node": "^20.12.6",
"@types/react": "^18.2.75",
"@types/react-dom": "^18.2.24",
"@types/react-table": "^7.7.20",
"@types/react": "^18.3.5",
"@types/react-dom": "^18.3.0",
"@typescript-eslint/eslint-plugin": "^7.16.0",
"@typescript-eslint/parser": "^7.16.0",
"@vite-pwa/assets-generator": "^0.2.4",
"@vitejs/plugin-react": "^4.2.1",
"@vitest/coverage-v8": "^1.4.0",
"@vitest/ui": "^1.2.2",
"clsx": "^2.1.0",
"eslint": "^8.57.0",
"eslint-config-react-app": "^7.0.1",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-react-refresh": "^0.4.7",
"eslint-plugin-simple-import-sort": "^12.1.0",
"eslint-plugin-testing-library": "^6.2.0",
"husky": "^9.0.11",
"jsdom": "^24.0.0",
"lodash": "^4.17.21",
"postcss-preset-mantine": "^1.14.4",
"postcss-simple-vars": "^7.0.1",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^3.2.4",
"pretty-quick": "^4.0.0",
"react-table": "^7.8.0",
"recharts": "^2.12.6",
"recharts": "^2.12.7",
"sass": "^1.74.1",
"typescript": "^5.4.4",
"vite": "^5.2.8",
"vite-plugin-checker": "^0.6.4",
"vite-plugin-pwa": "^0.20.0",
"vitest": "^1.2.2",
"yaml": "^2.4.1"
},
"scripts": {
"start": "vite",
"build": "vite build",
"build:ci": "vite build -m development",
"check": "eslint --ext .ts,.tsx src",
"check:fix": "eslint --ext .ts,.tsx src --fix",
"check:ts": "tsc --noEmit --incremental false",
"check:fmt": "prettier -c .",
"test": "vitest",
"test:ui": "vitest --ui",
"coverage": "vitest run --coverage",
"format": "prettier -w .",
"prepare": "cd .. && husky frontend/.husky"
"pwa-assets:generate": "pwa-assets-generator --preset minimal-2023 public/images/logo128.png",
"prepare": "cd .. && husky frontend/.husky",
"preview": "vite preview",
"start": "vite",
"test": "vitest",
"test:ui": "vitest --ui"
},
"browserslist": {
"production": [

View file

@ -0,0 +1,14 @@
module.exports = {
plugins: {
"postcss-preset-mantine": {},
"postcss-simple-vars": {
variables: {
"mantine-breakpoint-xs": "36em",
"mantine-breakpoint-sm": "48em",
"mantine-breakpoint-md": "62em",
"mantine-breakpoint-lg": "75em",
"mantine-breakpoint-xl": "88em",
},
},
},
};

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 866 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 82 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 93 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 132 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 241 KiB

View file

@ -1,26 +0,0 @@
{
"name": "Bazarr",
"short_name": "Bazarr",
"description": "Bazarr is a companion application to Sonarr and Radarr. It manages and downloads subtitles based on your requirements.",
"start_url": "/",
"display": "standalone",
"theme_color": "#be4bdb",
"background_color": "#ffffff",
"icons": [
{
"src": "/images/android-chrome-96x96.png",
"sizes": "96x96",
"type": "image/png"
},
{
"src": "/images/apple-touch-icon.png",
"sizes": "180x180",
"type": "image/png"
},
{
"src": "/images/mstile-150x150.png",
"sizes": "150x150",
"type": "image/png"
}
]
}

View file

@ -0,0 +1,9 @@
.header {
@include light {
color: var(--mantine-color-gray-0);
}
@include dark {
color: var(--mantine-color-dark-0);
}
}

View file

@ -1,38 +1,26 @@
import { useSystem, useSystemSettings } from "@/apis/hooks";
import { Action, Search } from "@/components";
import { Layout } from "@/constants";
import { useNavbar } from "@/contexts/Navbar";
import { useIsOnline } from "@/contexts/Online";
import { Environment, useGotoHomepage } from "@/utilities";
import { FunctionComponent } from "react";
import {
Anchor,
AppShell,
Avatar,
Badge,
Burger,
Divider,
Group,
Menu,
} from "@mantine/core";
import {
faArrowRotateLeft,
faGear,
faPowerOff,
} from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import {
Anchor,
Avatar,
Badge,
Burger,
Divider,
Group,
Header,
MediaQuery,
Menu,
createStyles,
} from "@mantine/core";
import { FunctionComponent } from "react";
const useStyles = createStyles((theme) => {
const headerBackgroundColor =
theme.colorScheme === "light" ? theme.colors.gray[0] : theme.colors.dark[4];
return {
header: {
backgroundColor: headerBackgroundColor,
},
};
});
import { useSystem, useSystemSettings } from "@/apis/hooks";
import { Action, Search } from "@/components";
import { useNavbar } from "@/contexts/Navbar";
import { useIsOnline } from "@/contexts/Online";
import { Environment, useGotoHomepage } from "@/utilities";
import styles from "./Header.module.scss";
const AppHeader: FunctionComponent = () => {
const { data: settings } = useSystemSettings();
@ -47,39 +35,28 @@ const AppHeader: FunctionComponent = () => {
const goHome = useGotoHomepage();
const { classes } = useStyles();
return (
<Header p="md" height={Layout.HEADER_HEIGHT} className={classes.header}>
<Group position="apart" noWrap>
<Group noWrap>
<MediaQuery
smallerThan={Layout.MOBILE_BREAKPOINT}
styles={{ display: "none" }}
>
<Anchor onClick={goHome}>
<Avatar
alt="brand"
size={32}
src={`${Environment.baseUrl}/images/logo64.png`}
></Avatar>
</Anchor>
</MediaQuery>
<MediaQuery
largerThan={Layout.MOBILE_BREAKPOINT}
styles={{ display: "none" }}
>
<Burger
opened={showed}
onClick={() => show(!showed)}
size="sm"
></Burger>
</MediaQuery>
<Badge size="lg" radius="sm">
<AppShell.Header p="md" className={styles.header}>
<Group justify="space-between" wrap="nowrap">
<Group wrap="nowrap">
<Anchor onClick={goHome} visibleFrom="sm">
<Avatar
alt="brand"
size={32}
src={`${Environment.baseUrl}/images/logo64.png`}
></Avatar>
</Anchor>
<Burger
opened={showed}
onClick={() => show(!showed)}
size="sm"
hiddenFrom="sm"
></Burger>
<Badge size="lg" radius="sm" variant="brand">
Bazarr
</Badge>
</Group>
<Group spacing="xs" position="right" noWrap>
<Group gap="xs" justify="right" wrap="nowrap">
<Search></Search>
<Menu>
<Menu.Target>
@ -87,21 +64,20 @@ const AppHeader: FunctionComponent = () => {
label="System"
tooltip={{ position: "left", openDelay: 2000 }}
loading={offline}
color={offline ? "yellow" : undefined}
c={offline ? "yellow" : undefined}
icon={faGear}
size="lg"
variant="light"
></Action>
</Menu.Target>
<Menu.Dropdown>
<Menu.Item
icon={<FontAwesomeIcon icon={faArrowRotateLeft} />}
leftSection={<FontAwesomeIcon icon={faArrowRotateLeft} />}
onClick={() => restart()}
>
Restart
</Menu.Item>
<Menu.Item
icon={<FontAwesomeIcon icon={faPowerOff} />}
leftSection={<FontAwesomeIcon icon={faPowerOff} />}
onClick={() => shutdown()}
>
Shutdown
@ -114,7 +90,7 @@ const AppHeader: FunctionComponent = () => {
</Menu>
</Group>
</Group>
</Header>
</AppShell.Header>
);
};

View file

@ -0,0 +1,56 @@
.anchor {
border-color: var(--mantine-color-gray-5);
text-decoration: none;
@include dark {
border-color: var(--mantine-color-dark-5);
}
&.active {
border-left: 2px solid $color-brand-4;
background-color: var(--mantine-color-gray-1);
@include dark {
border-left: 2px solid $color-brand-8;
background-color: var(--mantine-color-dark-8);
}
}
&.hover {
background-color: var(--mantine-color-gray-0);
@include dark {
background-color: var(--mantine-color-dark-7);
}
}
}
.badge {
margin-left: auto;
text-decoration: none;
box-shadow: var(--mantine-shadow-xs);
}
.icon {
width: 1.4rem;
margin-right: var(--mantine-spacing-xs);
}
.nav {
background-color: var(--mantine-color-gray-2);
@include dark {
background-color: var(--mantine-color-dark-8);
}
}
.text {
display: inline-flex;
align-items: center;
width: 100%;
color: var(--mantine-color-gray-8);
@include dark {
color: var(--mantine-color-gray-5);
}
}

View file

@ -1,32 +1,4 @@
import { Action } from "@/components";
import { Layout } from "@/constants";
import { useNavbar } from "@/contexts/Navbar";
import { useRouteItems } from "@/Router";
import { CustomRouteObject, Route } from "@/Router/type";
import { BuildKey, pathJoin } from "@/utilities";
import { LOG } from "@/utilities/console";
import {
faHeart,
faMoon,
faSun,
IconDefinition,
} from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import {
Anchor,
Badge,
Collapse,
createStyles,
Divider,
Group,
Navbar as MantineNavbar,
Stack,
Text,
useMantineColorScheme,
} from "@mantine/core";
import { useHover } from "@mantine/hooks";
import clsx from "clsx";
import {
import React, {
createContext,
FunctionComponent,
useContext,
@ -35,6 +7,34 @@ import {
useState,
} from "react";
import { matchPath, NavLink, RouteObject, useLocation } from "react-router-dom";
import {
Anchor,
AppShell,
Badge,
Collapse,
Divider,
Group,
Stack,
Text,
useComputedColorScheme,
useMantineColorScheme,
} from "@mantine/core";
import { useHover } from "@mantine/hooks";
import {
faHeart,
faMoon,
faSun,
IconDefinition,
} from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import clsx from "clsx";
import { Action } from "@/components";
import { useNavbar } from "@/contexts/Navbar";
import { useRouteItems } from "@/Router";
import { CustomRouteObject, Route } from "@/Router/type";
import { BuildKey, pathJoin } from "@/utilities";
import { LOG } from "@/utilities/console";
import styles from "./Navbar.module.scss";
const Selection = createContext<{
selection: string | null;
@ -97,11 +97,12 @@ function useIsActive(parent: string, route: RouteObject) {
}
const AppNavbar: FunctionComponent = () => {
const { showed } = useNavbar();
const [selection, select] = useState<string | null>(null);
const { colorScheme, toggleColorScheme } = useMantineColorScheme();
const dark = colorScheme === "dark";
const { toggleColorScheme } = useMantineColorScheme();
const computedColorScheme = useComputedColorScheme("light");
const dark = computedColorScheme === "dark";
const routes = useRouteItems();
@ -111,23 +112,10 @@ const AppNavbar: FunctionComponent = () => {
}, [pathname]);
return (
<MantineNavbar
p="xs"
hiddenBreakpoint={Layout.MOBILE_BREAKPOINT}
hidden={!showed}
width={{ [Layout.MOBILE_BREAKPOINT]: Layout.NAVBAR_WIDTH }}
styles={(theme) => ({
root: {
backgroundColor:
theme.colorScheme === "light"
? theme.colors.gray[2]
: theme.colors.dark[6],
},
})}
>
<AppShell.Navbar p="xs" className={styles.nav}>
<Selection.Provider value={{ selection, select }}>
<MantineNavbar.Section grow>
<Stack spacing={0}>
<AppShell.Section grow>
<Stack gap={0}>
{routes.map((route, idx) => (
<RouteItem
key={BuildKey("nav", idx)}
@ -136,14 +124,13 @@ const AppNavbar: FunctionComponent = () => {
></RouteItem>
))}
</Stack>
</MantineNavbar.Section>
</AppShell.Section>
<Divider></Divider>
<MantineNavbar.Section mt="xs">
<Group spacing="xs">
<AppShell.Section mt="xs">
<Group gap="xs">
<Action
label="Change Theme"
color={dark ? "yellow" : "indigo"}
variant="subtle"
c={dark ? "yellow" : "indigo"}
onClick={() => toggleColorScheme()}
icon={dark ? faSun : faMoon}
></Action>
@ -151,17 +138,12 @@ const AppNavbar: FunctionComponent = () => {
href="https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=XHHRWXT9YB7WE&source=url"
target="_blank"
>
<Action
label="Donate"
icon={faHeart}
variant="subtle"
color="red"
></Action>
<Action label="Donate" icon={faHeart} c="red"></Action>
</Anchor>
</Group>
</MantineNavbar.Section>
</AppShell.Section>
</Selection.Provider>
</MantineNavbar>
</AppShell.Navbar>
);
};
@ -186,7 +168,7 @@ const RouteItem: FunctionComponent<{
if (children !== undefined) {
const elements = (
<Stack spacing={0}>
<Stack gap={0}>
{children.map((child, idx) => (
<RouteItem
parent={link}
@ -199,7 +181,7 @@ const RouteItem: FunctionComponent<{
if (name) {
return (
<Stack spacing={0}>
<Stack gap={0}>
<NavbarItem
primary
name={name}
@ -244,53 +226,6 @@ const RouteItem: FunctionComponent<{
}
};
const useStyles = createStyles((theme) => {
const borderColor =
theme.colorScheme === "light" ? theme.colors.gray[5] : theme.colors.dark[4];
const activeBorderColor =
theme.colorScheme === "light"
? theme.colors.brand[4]
: theme.colors.brand[8];
const activeBackgroundColor =
theme.colorScheme === "light" ? theme.colors.gray[1] : theme.colors.dark[8];
const hoverBackgroundColor =
theme.colorScheme === "light" ? theme.colors.gray[0] : theme.colors.dark[7];
const textColor =
theme.colorScheme === "light" ? theme.colors.gray[8] : theme.colors.gray[5];
return {
text: {
display: "inline-flex",
alignItems: "center",
width: "100%",
color: textColor,
},
anchor: {
textDecoration: "none",
borderLeft: `2px solid ${borderColor}`,
},
active: {
backgroundColor: activeBackgroundColor,
borderLeft: `2px solid ${activeBorderColor}`,
boxShadow: theme.shadows.xs,
},
hover: {
backgroundColor: hoverBackgroundColor,
},
icon: { width: "1.4rem", marginRight: theme.spacing.xs },
badge: {
marginLeft: "auto",
textDecoration: "none",
boxShadow: theme.shadows.xs,
color: textColor,
},
};
});
interface NavbarItemProps {
name: string;
link: string;
@ -308,8 +243,6 @@ const NavbarItem: FunctionComponent<NavbarItemProps> = ({
onClick,
primary = false,
}) => {
const { classes } = useStyles();
const { show } = useNavbar();
const { ref, hovered } = useHover();
@ -335,9 +268,9 @@ const NavbarItem: FunctionComponent<NavbarItemProps> = ({
}}
className={({ isActive }) =>
clsx(
clsx(classes.anchor, {
[classes.active]: isActive,
[classes.hover]: hovered,
clsx(styles.anchor, {
[styles.active]: isActive,
[styles.hover]: hovered,
}),
)
}
@ -347,18 +280,19 @@ const NavbarItem: FunctionComponent<NavbarItemProps> = ({
inline
p="xs"
size="sm"
weight={primary ? "bold" : "normal"}
className={classes.text}
fw={primary ? "bold" : "normal"}
className={styles.text}
span
>
{icon && (
<FontAwesomeIcon
className={classes.icon}
className={styles.icon}
icon={icon}
></FontAwesomeIcon>
)}
{name}
{shouldHideBadge === false && (
<Badge className={classes.badge} radius="xs">
{!shouldHideBadge && (
<Badge className={styles.badge} radius="xs">
{badge}
</Badge>
)}

View file

@ -0,0 +1,39 @@
import { useCallback, useEffect, useState } from "react";
import { MantineColorScheme, useMantineColorScheme } from "@mantine/core";
import { useSystemSettings } from "@/apis/hooks";
const ThemeProvider = () => {
const [localScheme, setLocalScheme] = useState<MantineColorScheme | null>(
null,
);
const { setColorScheme } = useMantineColorScheme();
const settings = useSystemSettings();
const settingsColorScheme = settings.data?.general
.theme as MantineColorScheme;
const setScheme = useCallback(
(colorScheme: MantineColorScheme) => {
setColorScheme(colorScheme);
},
[setColorScheme],
);
useEffect(() => {
if (!settingsColorScheme) {
return;
}
if (localScheme === settingsColorScheme) {
return;
}
setScheme(settingsColorScheme);
setLocalScheme(settingsColorScheme);
}, [settingsColorScheme, setScheme, localScheme]);
return <></>;
};
export default ThemeProvider;

View file

@ -0,0 +1,61 @@
import { FunctionComponent, PropsWithChildren } from "react";
import {
ActionIcon,
Badge,
Button,
createTheme,
MantineProvider,
Pagination,
} from "@mantine/core";
import ThemeLoader from "@/App/ThemeLoader";
import "@mantine/core/styles.layer.css";
import "@mantine/notifications/styles.layer.css";
import styleVars from "@/assets/_variables.module.scss";
import actionIconClasses from "@/assets/action_icon.module.scss";
import badgeClasses from "@/assets/badge.module.scss";
import buttonClasses from "@/assets/button.module.scss";
import paginationClasses from "@/assets/pagination.module.scss";
const themeProvider = createTheme({
fontFamily: "Roboto, open sans, Helvetica Neue, Helvetica, Arial, sans-serif",
colors: {
brand: [
styleVars.colorBrand0,
styleVars.colorBrand1,
styleVars.colorBrand2,
styleVars.colorBrand3,
styleVars.colorBrand4,
styleVars.colorBrand5,
styleVars.colorBrand6,
styleVars.colorBrand7,
styleVars.colorBrand8,
styleVars.colorBrand9,
],
},
primaryColor: "brand",
components: {
ActionIcon: ActionIcon.extend({
classNames: actionIconClasses,
}),
Badge: Badge.extend({
classNames: badgeClasses,
}),
Button: Button.extend({
classNames: buttonClasses,
}),
Pagination: Pagination.extend({
classNames: paginationClasses,
}),
},
});
const ThemeProvider: FunctionComponent<PropsWithChildren> = ({ children }) => {
return (
<MantineProvider theme={themeProvider} defaultColorScheme="auto">
<ThemeLoader />
{children}
</MantineProvider>
);
};
export default ThemeProvider;

View file

@ -1,5 +1,5 @@
import { render } from "@/tests";
import { describe, it } from "vitest";
import { render } from "@/tests";
import App from ".";
describe("App", () => {

View file

@ -1,18 +1,18 @@
import { FunctionComponent, useEffect, useState } from "react";
import { Outlet, useNavigate } from "react-router-dom";
import { AppShell } from "@mantine/core";
import { useWindowEvent } from "@mantine/hooks";
import { showNotification } from "@mantine/notifications";
import AppNavbar from "@/App/Navbar";
import { RouterNames } from "@/Router/RouterNames";
import ErrorBoundary from "@/components/ErrorBoundary";
import { Layout } from "@/constants";
import NavbarProvider from "@/contexts/Navbar";
import OnlineProvider from "@/contexts/Online";
import { notification } from "@/modules/task";
import CriticalError from "@/pages/errors/CriticalError";
import { RouterNames } from "@/Router/RouterNames";
import { Environment } from "@/utilities";
import { AppShell } from "@mantine/core";
import { useWindowEvent } from "@mantine/hooks";
import { showNotification } from "@mantine/notifications";
import { FunctionComponent, useEffect, useState } from "react";
import { Outlet, useNavigate } from "react-router-dom";
import AppHeader from "./Header";
import styleVars from "@/assets/_variables.module.scss";
const App: FunctionComponent = () => {
const navigate = useNavigate();
@ -55,13 +55,19 @@ const App: FunctionComponent = () => {
<NavbarProvider value={{ showed: navbar, show: setNavbar }}>
<OnlineProvider value={{ online, setOnline }}>
<AppShell
navbarOffsetBreakpoint={Layout.MOBILE_BREAKPOINT}
header={<AppHeader></AppHeader>}
navbar={<AppNavbar></AppNavbar>}
navbar={{
width: styleVars.navBarWidth,
breakpoint: "sm",
collapsed: { mobile: !navbar },
}}
header={{ height: { base: styleVars.headerHeight } }}
padding={0}
fixed
>
<Outlet></Outlet>
<AppHeader></AppHeader>
<AppNavbar></AppNavbar>
<AppShell.Main>
<Outlet></Outlet>
</AppShell.Main>
</AppShell>
</OnlineProvider>
</NavbarProvider>

View file

@ -1,87 +0,0 @@
import { useSystemSettings } from "@/apis/hooks";
import {
ColorScheme,
ColorSchemeProvider,
createEmotionCache,
MantineProvider,
MantineThemeOverride,
} from "@mantine/core";
import { useColorScheme } from "@mantine/hooks";
import {
FunctionComponent,
PropsWithChildren,
useCallback,
useEffect,
useState,
} from "react";
const theme: MantineThemeOverride = {
fontFamily: "Roboto, open sans, Helvetica Neue, Helvetica, Arial, sans-serif",
colors: {
brand: [
"#F8F0FC",
"#F3D9FA",
"#EEBEFA",
"#E599F7",
"#DA77F2",
"#CC5DE8",
"#BE4BDB",
"#AE3EC9",
"#9C36B5",
"#862E9C",
],
},
primaryColor: "brand",
};
function useAutoColorScheme() {
const settings = useSystemSettings();
const settingsColorScheme = settings.data?.general.theme;
let preferredColorScheme: ColorScheme = useColorScheme();
switch (settingsColorScheme) {
case "light":
preferredColorScheme = "light" as ColorScheme;
break;
case "dark":
preferredColorScheme = "dark" as ColorScheme;
break;
}
const [colorScheme, setColorScheme] = useState(preferredColorScheme);
// automatically switch dark/light theme
useEffect(() => {
setColorScheme(preferredColorScheme);
}, [preferredColorScheme]);
const toggleColorScheme = useCallback((value?: ColorScheme) => {
setColorScheme((scheme) => value || (scheme === "dark" ? "light" : "dark"));
}, []);
return { colorScheme, setColorScheme, toggleColorScheme };
}
const emotionCache = createEmotionCache({ key: "bazarr" });
const ThemeProvider: FunctionComponent<PropsWithChildren> = ({ children }) => {
const { colorScheme, toggleColorScheme } = useAutoColorScheme();
return (
<ColorSchemeProvider
colorScheme={colorScheme}
toggleColorScheme={toggleColorScheme}
>
<MantineProvider
withGlobalStyles
withNormalizeCSS
theme={{ colorScheme, ...theme }}
emotionCache={emotionCache}
>
{children}
</MantineProvider>
</ColorSchemeProvider>
);
};
export default ThemeProvider;

View file

@ -1,7 +1,7 @@
import { useSystemSettings } from "@/apis/hooks";
import { LoadingOverlay } from "@mantine/core";
import { FunctionComponent, useEffect } from "react";
import { useNavigate } from "react-router-dom";
import { LoadingOverlay } from "@mantine/core";
import { useSystemSettings } from "@/apis/hooks";
const Redirector: FunctionComponent = () => {
const { data } = useSystemSettings();

View file

@ -1,11 +1,29 @@
import App from "@/App";
import {
createContext,
FunctionComponent,
lazy,
useContext,
useMemo,
} from "react";
import { createBrowserRouter, RouterProvider } from "react-router-dom";
import {
faClock,
faCogs,
faExclamationTriangle,
faFileExcel,
faFilm,
faLaptop,
faPlay,
} from "@fortawesome/free-solid-svg-icons";
import { useBadges } from "@/apis/hooks";
import { useEnabledStatus } from "@/apis/hooks/site";
import App from "@/App";
import { Lazy } from "@/components/async";
import Authentication from "@/pages/Authentication";
import BlacklistMoviesView from "@/pages/Blacklist/Movies";
import BlacklistSeriesView from "@/pages/Blacklist/Series";
import Episodes from "@/pages/Episodes";
import NotFound from "@/pages/errors/NotFound";
import MoviesHistoryView from "@/pages/History/Movies";
import SeriesHistoryView from "@/pages/History/Series";
import MovieView from "@/pages/Movies";
@ -30,30 +48,14 @@ import SystemReleasesView from "@/pages/System/Releases";
import SystemTasksView from "@/pages/System/Tasks";
import WantedMoviesView from "@/pages/Wanted/Movies";
import WantedSeriesView from "@/pages/Wanted/Series";
import NotFound from "@/pages/errors/NotFound";
import { Environment } from "@/utilities";
import {
faClock,
faCogs,
faExclamationTriangle,
faFileExcel,
faFilm,
faLaptop,
faPlay,
} from "@fortawesome/free-solid-svg-icons";
import {
FunctionComponent,
createContext,
lazy,
useContext,
useMemo,
} from "react";
import { RouterProvider, createBrowserRouter } from "react-router-dom";
import Redirector from "./Redirector";
import { RouterNames } from "./RouterNames";
import { CustomRouteObject } from "./type";
const HistoryStats = lazy(() => import("@/pages/History/Statistics"));
const HistoryStats = lazy(
() => import("@/pages/History/Statistics/HistoryStats"),
);
const SystemStatusView = lazy(() => import("@/pages/System/Status"));
function useRoutes(): CustomRouteObject[] {

View file

@ -1,5 +1,5 @@
import { IconDefinition } from "@fortawesome/free-solid-svg-icons";
import { RouteObject } from "react-router-dom";
import { IconDefinition } from "@fortawesome/free-solid-svg-icons";
declare namespace Route {
export type Item = {

View file

@ -1,12 +1,13 @@
import { useEffect } from "react";
import {
QueryClient,
useMutation,
useQuery,
useQueryClient,
} from "react-query";
import { usePaginationQuery } from "../queries/hooks";
import { QueryKeys } from "../queries/keys";
import api from "../raw";
} from "@tanstack/react-query";
import { usePaginationQuery } from "@/apis/queries/hooks";
import { QueryKeys } from "@/apis/queries/keys";
import api from "@/apis/raw";
const cacheEpisodes = (client: QueryClient, episodes: Item.Episode[]) => {
episodes.forEach((item) => {
@ -24,30 +25,21 @@ const cacheEpisodes = (client: QueryClient, episodes: Item.Episode[]) => {
});
};
export function useEpisodesByIds(ids: number[]) {
const client = useQueryClient();
return useQuery(
[QueryKeys.Series, QueryKeys.Episodes, ids],
() => api.episodes.byEpisodeId(ids),
{
onSuccess: (data) => {
cacheEpisodes(client, data);
},
},
);
}
export function useEpisodesBySeriesId(id: number) {
const client = useQueryClient();
return useQuery(
[QueryKeys.Series, id, QueryKeys.Episodes, QueryKeys.All],
() => api.episodes.bySeriesId([id]),
{
onSuccess: (data) => {
cacheEpisodes(client, data);
},
},
);
const query = useQuery({
queryKey: [QueryKeys.Series, id, QueryKeys.Episodes, QueryKeys.All],
queryFn: () => api.episodes.bySeriesId([id]),
});
useEffect(() => {
if (query.isSuccess && query.data) {
cacheEpisodes(client, query.data);
}
}, [query.isSuccess, query.data, client]);
return query;
}
export function useEpisodeWantedPagination() {
@ -57,17 +49,18 @@ export function useEpisodeWantedPagination() {
}
export function useEpisodeBlacklist() {
return useQuery(
[QueryKeys.Series, QueryKeys.Episodes, QueryKeys.Blacklist],
() => api.episodes.blacklist(),
);
return useQuery({
queryKey: [QueryKeys.Series, QueryKeys.Episodes, QueryKeys.Blacklist],
queryFn: () => api.episodes.blacklist(),
});
}
export function useEpisodeAddBlacklist() {
const client = useQueryClient();
return useMutation(
[QueryKeys.Series, QueryKeys.Episodes, QueryKeys.Blacklist],
(param: {
return useMutation({
mutationKey: [QueryKeys.Series, QueryKeys.Episodes, QueryKeys.Blacklist],
mutationFn: (param: {
seriesId: number;
episodeId: number;
form: FormType.AddBlacklist;
@ -75,35 +68,33 @@ export function useEpisodeAddBlacklist() {
const { seriesId, episodeId, form } = param;
return api.episodes.addBlacklist(seriesId, episodeId, form);
},
{
onSuccess: (_, { seriesId, episodeId }) => {
client.invalidateQueries([
QueryKeys.Series,
QueryKeys.Episodes,
QueryKeys.Blacklist,
]);
client.invalidateQueries([QueryKeys.Series, seriesId]);
},
onSuccess: (_, { seriesId }) => {
void client.invalidateQueries({
queryKey: [QueryKeys.Series, QueryKeys.Episodes, QueryKeys.Blacklist],
});
void client.invalidateQueries({
queryKey: [QueryKeys.Series, seriesId],
});
},
);
});
}
export function useEpisodeDeleteBlacklist() {
const client = useQueryClient();
return useMutation(
[QueryKeys.Series, QueryKeys.Episodes, QueryKeys.Blacklist],
(param: { all?: boolean; form?: FormType.DeleteBlacklist }) =>
return useMutation({
mutationKey: [QueryKeys.Series, QueryKeys.Episodes, QueryKeys.Blacklist],
mutationFn: (param: { all?: boolean; form?: FormType.DeleteBlacklist }) =>
api.episodes.deleteBlacklist(param.all, param.form),
{
onSuccess: (_, param) => {
client.invalidateQueries([
QueryKeys.Series,
QueryKeys.Episodes,
QueryKeys.Blacklist,
]);
},
onSuccess: () => {
void client.invalidateQueries({
queryKey: [QueryKeys.Series, QueryKeys.Episodes, QueryKeys.Blacklist],
});
},
);
});
}
export function useEpisodeHistoryPagination() {
@ -115,12 +106,20 @@ export function useEpisodeHistoryPagination() {
}
export function useEpisodeHistory(episodeId?: number) {
return useQuery(
[QueryKeys.Series, QueryKeys.Episodes, QueryKeys.History, episodeId],
() => {
return useQuery({
queryKey: [
QueryKeys.Series,
QueryKeys.Episodes,
QueryKeys.History,
episodeId,
],
queryFn: () => {
if (episodeId) {
return api.episodes.historyBy(episodeId);
}
return [];
},
);
});
}

View file

@ -1,6 +1,6 @@
import { useQuery } from "react-query";
import { QueryKeys } from "../queries/keys";
import api from "../raw";
import { useQuery } from "@tanstack/react-query";
import { QueryKeys } from "@/apis/queries/keys";
import api from "@/apis/raw";
export function useHistoryStats(
time: History.TimeFrameOptions,
@ -8,14 +8,19 @@ export function useHistoryStats(
provider: System.Provider | null,
language: Language.Info | null,
) {
return useQuery(
[QueryKeys.System, QueryKeys.History, { time, action, provider, language }],
() =>
return useQuery({
queryKey: [
QueryKeys.System,
QueryKeys.History,
{ time, action, provider, language },
],
queryFn: () =>
api.history.stats(
time,
action ?? undefined,
provider?.name,
language?.code2,
),
);
});
}

View file

@ -1,23 +1,19 @@
import { useQuery } from "react-query";
import { QueryKeys } from "../queries/keys";
import api from "../raw";
import { useQuery } from "@tanstack/react-query";
import { QueryKeys } from "@/apis/queries/keys";
import api from "@/apis/raw";
export function useLanguages(history?: boolean) {
return useQuery(
[QueryKeys.System, QueryKeys.Languages, history ?? false],
() => api.system.languages(history),
{
staleTime: Infinity,
},
);
return useQuery({
queryKey: [QueryKeys.System, QueryKeys.Languages, history ?? false],
queryFn: () => api.system.languages(history),
staleTime: Infinity,
});
}
export function useLanguageProfiles() {
return useQuery(
[QueryKeys.System, QueryKeys.LanguagesProfiles],
() => api.system.languagesProfileList(),
{
staleTime: Infinity,
},
);
return useQuery({
queryKey: [QueryKeys.System, QueryKeys.LanguagesProfiles],
queryFn: () => api.system.languagesProfileList(),
staleTime: Infinity,
});
}

View file

@ -1,12 +1,13 @@
import { useEffect } from "react";
import {
QueryClient,
useMutation,
useQuery,
useQueryClient,
} from "react-query";
import { usePaginationQuery } from "../queries/hooks";
import { QueryKeys } from "../queries/keys";
import api from "../raw";
} from "@tanstack/react-query";
import { usePaginationQuery } from "@/apis/queries/hooks";
import { QueryKeys } from "@/apis/queries/keys";
import api from "@/apis/raw";
const cacheMovies = (client: QueryClient, movies: Item.Movie[]) => {
movies.forEach((item) => {
@ -14,33 +15,32 @@ const cacheMovies = (client: QueryClient, movies: Item.Movie[]) => {
});
};
export function useMoviesByIds(ids: number[]) {
const client = useQueryClient();
return useQuery([QueryKeys.Movies, ...ids], () => api.movies.movies(ids), {
onSuccess: (data) => {
cacheMovies(client, data);
},
});
}
export function useMovieById(id: number) {
return useQuery([QueryKeys.Movies, id], async () => {
const response = await api.movies.movies([id]);
return response.length > 0 ? response[0] : undefined;
return useQuery({
queryKey: [QueryKeys.Movies, id],
queryFn: async () => {
const response = await api.movies.movies([id]);
return response.length > 0 ? response[0] : undefined;
},
});
}
export function useMovies() {
const client = useQueryClient();
return useQuery(
[QueryKeys.Movies, QueryKeys.All],
() => api.movies.movies(),
{
onSuccess: (data) => {
cacheMovies(client, data);
},
},
);
const query = useQuery({
queryKey: [QueryKeys.Movies, QueryKeys.All],
queryFn: () => api.movies.movies(),
});
useEffect(() => {
if (query.isSuccess && query.data) {
cacheMovies(client, query.data);
}
}, [query.isSuccess, query.data, client]);
return query;
}
export function useMoviesPagination() {
@ -51,32 +51,37 @@ export function useMoviesPagination() {
export function useMovieModification() {
const client = useQueryClient();
return useMutation(
[QueryKeys.Movies],
(form: FormType.ModifyItem) => api.movies.modify(form),
{
onSuccess: (_, form) => {
form.id.forEach((v) => {
client.invalidateQueries([QueryKeys.Movies, v]);
return useMutation({
mutationKey: [QueryKeys.Movies],
mutationFn: (form: FormType.ModifyItem) => api.movies.modify(form),
onSuccess: (_, form) => {
form.id.forEach((v) => {
void client.invalidateQueries({
queryKey: [QueryKeys.Movies, v],
});
// TODO: query less
client.invalidateQueries([QueryKeys.Movies]);
},
});
// TODO: query less
void client.invalidateQueries({
queryKey: [QueryKeys.Movies],
});
},
);
});
}
export function useMovieAction() {
const client = useQueryClient();
return useMutation(
[QueryKeys.Actions, QueryKeys.Movies],
(form: FormType.MoviesAction) => api.movies.action(form),
{
onSuccess: () => {
client.invalidateQueries([QueryKeys.Movies]);
},
return useMutation({
mutationKey: [QueryKeys.Actions, QueryKeys.Movies],
mutationFn: (form: FormType.MoviesAction) => api.movies.action(form),
onSuccess: () => {
void client.invalidateQueries({
queryKey: [QueryKeys.Movies],
});
},
);
});
}
export function useMovieWantedPagination() {
@ -86,40 +91,49 @@ export function useMovieWantedPagination() {
}
export function useMovieBlacklist() {
return useQuery([QueryKeys.Movies, QueryKeys.Blacklist], () =>
api.movies.blacklist(),
);
return useQuery({
queryKey: [QueryKeys.Movies, QueryKeys.Blacklist],
queryFn: () => api.movies.blacklist(),
});
}
export function useMovieAddBlacklist() {
const client = useQueryClient();
return useMutation(
[QueryKeys.Movies, QueryKeys.Blacklist],
(param: { id: number; form: FormType.AddBlacklist }) => {
return useMutation({
mutationKey: [QueryKeys.Movies, QueryKeys.Blacklist],
mutationFn: (param: { id: number; form: FormType.AddBlacklist }) => {
const { id, form } = param;
return api.movies.addBlacklist(id, form);
},
{
onSuccess: (_, { id }) => {
client.invalidateQueries([QueryKeys.Movies, QueryKeys.Blacklist]);
client.invalidateQueries([QueryKeys.Movies, id]);
},
onSuccess: (_, { id }) => {
void client.invalidateQueries({
queryKey: [QueryKeys.Movies, QueryKeys.Blacklist],
});
void client.invalidateQueries({
queryKey: [QueryKeys.Movies, id],
});
},
);
});
}
export function useMovieDeleteBlacklist() {
const client = useQueryClient();
return useMutation(
[QueryKeys.Movies, QueryKeys.Blacklist],
(param: { all?: boolean; form?: FormType.DeleteBlacklist }) =>
return useMutation({
mutationKey: [QueryKeys.Movies, QueryKeys.Blacklist],
mutationFn: (param: { all?: boolean; form?: FormType.DeleteBlacklist }) =>
api.movies.deleteBlacklist(param.all, param.form),
{
onSuccess: (_, param) => {
client.invalidateQueries([QueryKeys.Movies, QueryKeys.Blacklist]);
},
onSuccess: () => {
void client.invalidateQueries({
queryKey: [QueryKeys.Movies, QueryKeys.Blacklist],
});
},
);
});
}
export function useMovieHistoryPagination() {
@ -131,9 +145,15 @@ export function useMovieHistoryPagination() {
}
export function useMovieHistory(radarrId?: number) {
return useQuery([QueryKeys.Movies, QueryKeys.History, radarrId], () => {
if (radarrId) {
return api.movies.historyBy(radarrId);
}
return useQuery({
queryKey: [QueryKeys.Movies, QueryKeys.History, radarrId],
queryFn: () => {
if (radarrId) {
return api.movies.historyBy(radarrId);
}
return [];
},
});
}

View file

@ -1,66 +1,82 @@
import { useMutation, useQuery, useQueryClient } from "react-query";
import { QueryKeys } from "../queries/keys";
import api from "../raw";
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import { QueryKeys } from "@/apis/queries/keys";
import api from "@/apis/raw";
export function useSystemProviders(history?: boolean) {
return useQuery(
[QueryKeys.System, QueryKeys.Providers, history ?? false],
() => api.providers.providers(history),
);
return useQuery({
queryKey: [QueryKeys.System, QueryKeys.Providers, history ?? false],
queryFn: () => api.providers.providers(history),
});
}
export function useMoviesProvider(radarrId?: number) {
return useQuery(
[QueryKeys.System, QueryKeys.Providers, QueryKeys.Movies, radarrId],
() => {
return useQuery({
queryKey: [
QueryKeys.System,
QueryKeys.Providers,
QueryKeys.Movies,
radarrId,
],
queryFn: () => {
if (radarrId) {
return api.providers.movies(radarrId);
}
return [];
},
{
staleTime: 0,
},
);
staleTime: 0,
});
}
export function useEpisodesProvider(episodeId?: number) {
return useQuery(
[QueryKeys.System, QueryKeys.Providers, QueryKeys.Episodes, episodeId],
() => {
return useQuery({
queryKey: [
QueryKeys.System,
QueryKeys.Providers,
QueryKeys.Episodes,
episodeId,
],
queryFn: () => {
if (episodeId) {
return api.providers.episodes(episodeId);
}
return [];
},
{
staleTime: 0,
},
);
staleTime: 0,
});
}
export function useResetProvider() {
const client = useQueryClient();
return useMutation(
[QueryKeys.System, QueryKeys.Providers],
() => api.providers.reset(),
{
onSuccess: () => {
client.invalidateQueries([QueryKeys.System, QueryKeys.Providers]);
},
return useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Providers],
mutationFn: () => api.providers.reset(),
onSuccess: () => {
client.invalidateQueries({
queryKey: [QueryKeys.System, QueryKeys.Providers],
});
},
);
});
}
export function useDownloadEpisodeSubtitles() {
const client = useQueryClient();
return useMutation(
[
return useMutation({
mutationKey: [
QueryKeys.System,
QueryKeys.Providers,
QueryKeys.Subtitles,
QueryKeys.Episodes,
],
(param: {
mutationFn: (param: {
seriesId: number;
episodeId: number;
form: FormType.ManualDownload;
@ -70,30 +86,33 @@ export function useDownloadEpisodeSubtitles() {
param.episodeId,
param.form,
),
{
onSuccess: (_, param) => {
client.invalidateQueries([QueryKeys.Series, param.seriesId]);
},
onSuccess: (_, param) => {
client.invalidateQueries({
queryKey: [QueryKeys.Series, param.seriesId],
});
},
);
});
}
export function useDownloadMovieSubtitles() {
const client = useQueryClient();
return useMutation(
[
return useMutation({
mutationKey: [
QueryKeys.System,
QueryKeys.Providers,
QueryKeys.Subtitles,
QueryKeys.Movies,
],
(param: { radarrId: number; form: FormType.ManualDownload }) =>
mutationFn: (param: { radarrId: number; form: FormType.ManualDownload }) =>
api.providers.downloadMovieSubtitle(param.radarrId, param.form),
{
onSuccess: (_, param) => {
client.invalidateQueries([QueryKeys.Movies, param.radarrId]);
},
onSuccess: (_, param) => {
client.invalidateQueries({
queryKey: [QueryKeys.Movies, param.radarrId],
});
},
);
});
}

View file

@ -1,12 +1,13 @@
import { useEffect } from "react";
import {
QueryClient,
useMutation,
useQuery,
useQueryClient,
} from "react-query";
import { usePaginationQuery } from "../queries/hooks";
import { QueryKeys } from "../queries/keys";
import api from "../raw";
} from "@tanstack/react-query";
import { usePaginationQuery } from "@/apis/queries/hooks";
import { QueryKeys } from "@/apis/queries/keys";
import api from "@/apis/raw";
function cacheSeries(client: QueryClient, series: Item.Series[]) {
series.forEach((item) => {
@ -16,31 +17,47 @@ function cacheSeries(client: QueryClient, series: Item.Series[]) {
export function useSeriesByIds(ids: number[]) {
const client = useQueryClient();
return useQuery([QueryKeys.Series, ...ids], () => api.series.series(ids), {
onSuccess: (data) => {
cacheSeries(client, data);
},
const query = useQuery({
queryKey: [QueryKeys.Series, ...ids],
queryFn: () => api.series.series(ids),
});
useEffect(() => {
if (query.isSuccess && query.data) {
cacheSeries(client, query.data);
}
}, [query.isSuccess, query.data, client]);
return query;
}
export function useSeriesById(id: number) {
return useQuery([QueryKeys.Series, id], async () => {
const response = await api.series.series([id]);
return response.length > 0 ? response[0] : undefined;
return useQuery({
queryKey: [QueryKeys.Series, id],
queryFn: async () => {
const response = await api.series.series([id]);
return response.length > 0 ? response[0] : undefined;
},
});
}
export function useSeries() {
const client = useQueryClient();
return useQuery(
[QueryKeys.Series, QueryKeys.All],
() => api.series.series(),
{
onSuccess: (data) => {
cacheSeries(client, data);
},
},
);
const query = useQuery({
queryKey: [QueryKeys.Series, QueryKeys.All],
queryFn: () => api.series.series(),
});
useEffect(() => {
if (query.isSuccess && query.data) {
cacheSeries(client, query.data);
}
}, [query.isSuccess, query.data, client]);
return query;
}
export function useSeriesPagination() {
@ -51,29 +68,33 @@ export function useSeriesPagination() {
export function useSeriesModification() {
const client = useQueryClient();
return useMutation(
[QueryKeys.Series],
(form: FormType.ModifyItem) => api.series.modify(form),
{
onSuccess: (_, form) => {
form.id.forEach((v) => {
client.invalidateQueries([QueryKeys.Series, v]);
return useMutation({
mutationKey: [QueryKeys.Series],
mutationFn: (form: FormType.ModifyItem) => api.series.modify(form),
onSuccess: (_, form) => {
form.id.forEach((v) => {
client.invalidateQueries({
queryKey: [QueryKeys.Series, v],
});
client.invalidateQueries([QueryKeys.Series]);
},
});
client.invalidateQueries({
queryKey: [QueryKeys.Series],
});
},
);
});
}
export function useSeriesAction() {
const client = useQueryClient();
return useMutation(
[QueryKeys.Actions, QueryKeys.Series],
(form: FormType.SeriesAction) => api.series.action(form),
{
onSuccess: () => {
client.invalidateQueries([QueryKeys.Series]);
},
return useMutation({
mutationKey: [QueryKeys.Actions, QueryKeys.Series],
mutationFn: (form: FormType.SeriesAction) => api.series.action(form),
onSuccess: () => {
client.invalidateQueries({
queryKey: [QueryKeys.Series],
});
},
);
});
}

View file

@ -1,16 +1,28 @@
import { useIsMutating } from "react-query";
import { QueryKeys } from "../queries/keys";
import { useIsMutating } from "@tanstack/react-query";
import { QueryKeys } from "@/apis/queries/keys";
export function useIsAnyActionRunning() {
return useIsMutating([QueryKeys.Actions]) > 0;
return (
useIsMutating({
mutationKey: [QueryKeys.Actions],
}) > 0
);
}
export function useIsMovieActionRunning() {
return useIsMutating([QueryKeys.Actions, QueryKeys.Movies]) > 0;
return (
useIsMutating({
mutationKey: [QueryKeys.Actions, QueryKeys.Movies],
}) > 0
);
}
export function useIsSeriesActionRunning() {
return useIsMutating([QueryKeys.Actions, QueryKeys.Series]) > 0;
return (
useIsMutating({
mutationKey: [QueryKeys.Actions, QueryKeys.Series],
}) > 0
);
}
export function useIsAnyMutationRunning() {

View file

@ -1,6 +1,6 @@
import { useMutation, useQuery, useQueryClient } from "react-query";
import { QueryKeys } from "../queries/keys";
import api from "../raw";
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import { QueryKeys } from "@/apis/queries/keys";
import api from "@/apis/raw";
export function useSubtitleAction() {
const client = useQueryClient();
@ -8,23 +8,29 @@ export function useSubtitleAction() {
action: string;
form: FormType.ModifySubtitle;
}
return useMutation(
[QueryKeys.Subtitles],
(param: Param) => api.subtitles.modify(param.action, param.form),
{
onSuccess: (_, param) => {
client.invalidateQueries([QueryKeys.History]);
return useMutation({
mutationKey: [QueryKeys.Subtitles],
mutationFn: (param: Param) =>
api.subtitles.modify(param.action, param.form),
// TODO: Query less
const { type, id } = param.form;
if (type === "episode") {
client.invalidateQueries([QueryKeys.Series, id]);
} else {
client.invalidateQueries([QueryKeys.Movies, id]);
}
},
onSuccess: (_, param) => {
client.invalidateQueries({
queryKey: [QueryKeys.History],
});
// TODO: Query less
const { type, id } = param.form;
if (type === "episode") {
client.invalidateQueries({
queryKey: [QueryKeys.Series, id],
});
} else {
client.invalidateQueries({
queryKey: [QueryKeys.Movies, id],
});
}
},
);
});
}
export function useEpisodeSubtitleModification() {
@ -36,42 +42,48 @@ export function useEpisodeSubtitleModification() {
form: T;
}
const download = useMutation(
[QueryKeys.Subtitles, QueryKeys.Episodes],
(param: Param<FormType.Subtitle>) =>
const download = useMutation({
mutationKey: [QueryKeys.Subtitles, QueryKeys.Episodes],
mutationFn: (param: Param<FormType.Subtitle>) =>
api.episodes.downloadSubtitles(
param.seriesId,
param.episodeId,
param.form,
),
{
onSuccess: (_, param) => {
client.invalidateQueries([QueryKeys.Series, param.seriesId]);
},
},
);
const remove = useMutation(
[QueryKeys.Subtitles, QueryKeys.Episodes],
(param: Param<FormType.DeleteSubtitle>) =>
onSuccess: (_, param) => {
client.invalidateQueries({
queryKey: [QueryKeys.Series, param.seriesId],
});
},
});
const remove = useMutation({
mutationKey: [QueryKeys.Subtitles, QueryKeys.Episodes],
mutationFn: (param: Param<FormType.DeleteSubtitle>) =>
api.episodes.deleteSubtitles(param.seriesId, param.episodeId, param.form),
{
onSuccess: (_, param) => {
client.invalidateQueries([QueryKeys.Series, param.seriesId]);
},
},
);
const upload = useMutation(
[QueryKeys.Subtitles, QueryKeys.Episodes],
(param: Param<FormType.UploadSubtitle>) =>
api.episodes.uploadSubtitles(param.seriesId, param.episodeId, param.form),
{
onSuccess: (_, { seriesId }) => {
client.invalidateQueries([QueryKeys.Series, seriesId]);
},
onSuccess: (_, param) => {
client.invalidateQueries({
queryKey: [QueryKeys.Series, param.seriesId],
});
},
);
});
const upload = useMutation({
mutationKey: [QueryKeys.Subtitles, QueryKeys.Episodes],
mutationFn: (param: Param<FormType.UploadSubtitle>) =>
api.episodes.uploadSubtitles(param.seriesId, param.episodeId, param.form),
onSuccess: (_, { seriesId }) => {
client.invalidateQueries({
queryKey: [QueryKeys.Series, seriesId],
});
},
});
return { download, remove, upload };
}
@ -84,46 +96,54 @@ export function useMovieSubtitleModification() {
form: T;
}
const download = useMutation(
[QueryKeys.Subtitles, QueryKeys.Movies],
(param: Param<FormType.Subtitle>) =>
const download = useMutation({
mutationKey: [QueryKeys.Subtitles, QueryKeys.Movies],
mutationFn: (param: Param<FormType.Subtitle>) =>
api.movies.downloadSubtitles(param.radarrId, param.form),
{
onSuccess: (_, param) => {
client.invalidateQueries([QueryKeys.Movies, param.radarrId]);
},
},
);
const remove = useMutation(
[QueryKeys.Subtitles, QueryKeys.Movies],
(param: Param<FormType.DeleteSubtitle>) =>
onSuccess: (_, param) => {
client.invalidateQueries({
queryKey: [QueryKeys.Movies, param.radarrId],
});
},
});
const remove = useMutation({
mutationKey: [QueryKeys.Subtitles, QueryKeys.Movies],
mutationFn: (param: Param<FormType.DeleteSubtitle>) =>
api.movies.deleteSubtitles(param.radarrId, param.form),
{
onSuccess: (_, param) => {
client.invalidateQueries([QueryKeys.Movies, param.radarrId]);
},
},
);
const upload = useMutation(
[QueryKeys.Subtitles, QueryKeys.Movies],
(param: Param<FormType.UploadSubtitle>) =>
api.movies.uploadSubtitles(param.radarrId, param.form),
{
onSuccess: (_, { radarrId }) => {
client.invalidateQueries([QueryKeys.Movies, radarrId]);
},
onSuccess: (_, param) => {
client.invalidateQueries({
queryKey: [QueryKeys.Movies, param.radarrId],
});
},
);
});
const upload = useMutation({
mutationKey: [QueryKeys.Subtitles, QueryKeys.Movies],
mutationFn: (param: Param<FormType.UploadSubtitle>) =>
api.movies.uploadSubtitles(param.radarrId, param.form),
onSuccess: (_, { radarrId }) => {
client.invalidateQueries({
queryKey: [QueryKeys.Movies, radarrId],
});
},
});
return { download, remove, upload };
}
export function useSubtitleInfos(names: string[]) {
return useQuery([QueryKeys.Subtitles, QueryKeys.Infos, names], () =>
api.subtitles.info(names),
);
return useQuery({
queryKey: [QueryKeys.Subtitles, QueryKeys.Infos, names],
queryFn: () => api.subtitles.info(names),
});
}
export function useRefTracksByEpisodeId(
@ -131,11 +151,17 @@ export function useRefTracksByEpisodeId(
sonarrEpisodeId: number,
isEpisode: boolean,
) {
return useQuery(
[QueryKeys.Episodes, sonarrEpisodeId, QueryKeys.Subtitles, subtitlesPath],
() => api.subtitles.getRefTracksByEpisodeId(subtitlesPath, sonarrEpisodeId),
{ enabled: isEpisode },
);
return useQuery({
queryKey: [
QueryKeys.Episodes,
sonarrEpisodeId,
QueryKeys.Subtitles,
subtitlesPath,
],
queryFn: () =>
api.subtitles.getRefTracksByEpisodeId(subtitlesPath, sonarrEpisodeId),
enabled: isEpisode,
});
}
export function useRefTracksByMovieId(
@ -143,9 +169,15 @@ export function useRefTracksByMovieId(
radarrMovieId: number,
isMovie: boolean,
) {
return useQuery(
[QueryKeys.Movies, radarrMovieId, QueryKeys.Subtitles, subtitlesPath],
() => api.subtitles.getRefTracksByMovieId(subtitlesPath, radarrMovieId),
{ enabled: isMovie },
);
return useQuery({
queryKey: [
QueryKeys.Movies,
radarrMovieId,
QueryKeys.Subtitles,
subtitlesPath,
],
queryFn: () =>
api.subtitles.getRefTracksByMovieId(subtitlesPath, radarrMovieId),
enabled: isMovie,
});
}

View file

@ -1,20 +1,18 @@
import { useMemo } from "react";
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
import { QueryKeys } from "@/apis/queries/keys";
import api from "@/apis/raw";
import { Environment } from "@/utilities";
import { setAuthenticated } from "@/utilities/event";
import { useMemo } from "react";
import { useMutation, useQuery, useQueryClient } from "react-query";
import { QueryKeys } from "../queries/keys";
import api from "../raw";
export function useBadges() {
return useQuery(
[QueryKeys.System, QueryKeys.Badges],
() => api.badges.all(),
{
refetchOnWindowFocus: "always",
refetchInterval: 1000 * 60,
staleTime: 1000 * 10,
},
);
return useQuery({
queryKey: [QueryKeys.System, QueryKeys.Badges],
queryFn: () => api.badges.all(),
refetchOnWindowFocus: "always",
refetchInterval: 1000 * 60,
staleTime: 1000 * 10,
});
}
export function useFileSystem(
@ -22,9 +20,10 @@ export function useFileSystem(
path: string,
enabled: boolean,
) {
return useQuery(
[QueryKeys.FileSystem, type, path],
() => {
return useQuery({
queryKey: [QueryKeys.FileSystem, type, path],
queryFn: () => {
if (type === "bazarr") {
return api.files.bazarr(path);
} else if (type === "radarr") {
@ -32,53 +31,68 @@ export function useFileSystem(
} else if (type === "sonarr") {
return api.files.sonarr(path);
}
return [];
},
{
enabled,
},
);
enabled,
});
}
export function useSystemSettings() {
return useQuery(
[QueryKeys.System, QueryKeys.Settings],
() => api.system.settings(),
{
staleTime: Infinity,
},
);
return useQuery({
queryKey: [QueryKeys.System, QueryKeys.Settings],
queryFn: () => api.system.settings(),
staleTime: Infinity,
});
}
export function useSettingsMutation() {
const client = useQueryClient();
return useMutation(
[QueryKeys.System, QueryKeys.Settings],
(data: LooseObject) => api.system.updateSettings(data),
{
onSuccess: () => {
client.invalidateQueries([QueryKeys.System]);
client.invalidateQueries([QueryKeys.Series]);
client.invalidateQueries([QueryKeys.Episodes]);
client.invalidateQueries([QueryKeys.Movies]);
client.invalidateQueries([QueryKeys.Wanted]);
client.invalidateQueries([QueryKeys.Badges]);
},
return useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Settings],
mutationFn: (data: LooseObject) => api.system.updateSettings(data),
onSuccess: () => {
void client.invalidateQueries({
queryKey: [QueryKeys.System],
});
void client.invalidateQueries({
queryKey: [QueryKeys.Series],
});
void client.invalidateQueries({
queryKey: [QueryKeys.Episodes],
});
void client.invalidateQueries({
queryKey: [QueryKeys.Movies],
});
void client.invalidateQueries({
queryKey: [QueryKeys.Wanted],
});
void client.invalidateQueries({
queryKey: [QueryKeys.Badges],
});
},
);
});
}
export function useServerSearch(query: string, enabled: boolean) {
return useQuery(
[QueryKeys.System, QueryKeys.Search, query],
() => api.system.search(query),
{
enabled,
},
);
return useQuery({
queryKey: [QueryKeys.System, QueryKeys.Search, query],
queryFn: () => api.system.search(query),
enabled,
});
}
export function useSystemLogs() {
return useQuery([QueryKeys.System, QueryKeys.Logs], () => api.system.logs(), {
return useQuery({
queryKey: [QueryKeys.System, QueryKeys.Logs],
queryFn: () => api.system.logs(),
refetchOnWindowFocus: "always",
refetchInterval: 1000 * 60,
staleTime: 1000 * 10,
@ -87,171 +101,189 @@ export function useSystemLogs() {
export function useDeleteLogs() {
const client = useQueryClient();
return useMutation(
[QueryKeys.System, QueryKeys.Logs],
() => api.system.deleteLogs(),
{
onSuccess: () => {
client.invalidateQueries([QueryKeys.System, QueryKeys.Logs]);
},
return useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Logs],
mutationFn: () => api.system.deleteLogs(),
onSuccess: () => {
void client.invalidateQueries({
queryKey: [QueryKeys.System, QueryKeys.Logs],
});
},
);
});
}
export function useSystemAnnouncements() {
return useQuery(
[QueryKeys.System, QueryKeys.Announcements],
() => api.system.announcements(),
{
refetchOnWindowFocus: "always",
refetchInterval: 1000 * 60,
staleTime: 1000 * 10,
},
);
return useQuery({
queryKey: [QueryKeys.System, QueryKeys.Announcements],
queryFn: () => api.system.announcements(),
refetchOnWindowFocus: "always",
refetchInterval: 1000 * 60,
staleTime: 1000 * 10,
});
}
export function useSystemAnnouncementsAddDismiss() {
const client = useQueryClient();
return useMutation(
[QueryKeys.System, QueryKeys.Announcements],
(param: { hash: string }) => {
return useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Announcements],
mutationFn: (param: { hash: string }) => {
const { hash } = param;
return api.system.addAnnouncementsDismiss(hash);
},
{
onSuccess: (_, { hash }) => {
client.invalidateQueries([QueryKeys.System, QueryKeys.Announcements]);
client.invalidateQueries([QueryKeys.System, QueryKeys.Badges]);
},
onSuccess: () => {
void client.invalidateQueries({
queryKey: [QueryKeys.System, QueryKeys.Announcements],
});
void client.invalidateQueries({
queryKey: [QueryKeys.System, QueryKeys.Badges],
});
},
);
});
}
export function useSystemTasks() {
return useQuery(
[QueryKeys.System, QueryKeys.Tasks],
() => api.system.tasks(),
{
refetchOnWindowFocus: "always",
refetchInterval: 1000 * 60,
staleTime: 1000 * 10,
},
);
return useQuery({
queryKey: [QueryKeys.System, QueryKeys.Tasks],
queryFn: () => api.system.tasks(),
refetchOnWindowFocus: "always",
refetchInterval: 1000 * 60,
staleTime: 1000 * 10,
});
}
export function useRunTask() {
const client = useQueryClient();
return useMutation(
[QueryKeys.System, QueryKeys.Tasks],
(id: string) => api.system.runTask(id),
{
onSuccess: () => {
client.invalidateQueries([QueryKeys.System, QueryKeys.Tasks]);
client.invalidateQueries([QueryKeys.System, QueryKeys.Backups]);
},
return useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Tasks],
mutationFn: (id: string) => api.system.runTask(id),
onSuccess: () => {
void client.invalidateQueries({
queryKey: [QueryKeys.System, QueryKeys.Tasks],
});
void client.invalidateQueries({
queryKey: [QueryKeys.System, QueryKeys.Backups],
});
},
);
});
}
export function useSystemBackups() {
return useQuery([QueryKeys.System, "backups"], () => api.system.backups());
return useQuery({
queryKey: [QueryKeys.System, "backups"],
queryFn: () => api.system.backups(),
});
}
export function useCreateBackups() {
const client = useQueryClient();
return useMutation(
[QueryKeys.System, QueryKeys.Backups],
() => api.system.createBackups(),
{
onSuccess: () => {
client.invalidateQueries([QueryKeys.System, QueryKeys.Backups]);
},
return useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Backups],
mutationFn: () => api.system.createBackups(),
onSuccess: () => {
void client.invalidateQueries({
queryKey: [QueryKeys.System, QueryKeys.Backups],
});
},
);
});
}
export function useRestoreBackups() {
const client = useQueryClient();
return useMutation(
[QueryKeys.System, QueryKeys.Backups],
(filename: string) => api.system.restoreBackups(filename),
{
onSuccess: () => {
client.invalidateQueries([QueryKeys.System, QueryKeys.Backups]);
},
return useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Backups],
mutationFn: (filename: string) => api.system.restoreBackups(filename),
onSuccess: () => {
void client.invalidateQueries({
queryKey: [QueryKeys.System, QueryKeys.Backups],
});
},
);
});
}
export function useDeleteBackups() {
const client = useQueryClient();
return useMutation(
[QueryKeys.System, QueryKeys.Backups],
(filename: string) => api.system.deleteBackups(filename),
{
onSuccess: () => {
client.invalidateQueries([QueryKeys.System, QueryKeys.Backups]);
},
return useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Backups],
mutationFn: (filename: string) => api.system.deleteBackups(filename),
onSuccess: () => {
void client.invalidateQueries({
queryKey: [QueryKeys.System, QueryKeys.Backups],
});
},
);
});
}
export function useSystemStatus() {
return useQuery([QueryKeys.System, "status"], () => api.system.status());
return useQuery({
queryKey: [QueryKeys.System, "status"],
queryFn: () => api.system.status(),
});
}
export function useSystemHealth() {
return useQuery([QueryKeys.System, "health"], () => api.system.health());
return useQuery({
queryKey: [QueryKeys.System, "health"],
queryFn: () => api.system.health(),
});
}
export function useSystemReleases() {
return useQuery([QueryKeys.System, "releases"], () => api.system.releases());
return useQuery({
queryKey: [QueryKeys.System, "releases"],
queryFn: () => api.system.releases(),
});
}
export function useSystem() {
const client = useQueryClient();
const { mutate: logout, isLoading: isLoggingOut } = useMutation(
[QueryKeys.System, QueryKeys.Actions],
() => api.system.logout(),
{
onSuccess: () => {
setAuthenticated(false);
client.clear();
},
},
);
const { mutate: logout, isPending: isLoggingOut } = useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Actions],
mutationFn: () => api.system.logout(),
const { mutate: login, isLoading: isLoggingIn } = useMutation(
[QueryKeys.System, QueryKeys.Actions],
(param: { username: string; password: string }) =>
onSuccess: () => {
setAuthenticated(false);
client.clear();
},
});
const { mutate: login, isPending: isLoggingIn } = useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Actions],
mutationFn: (param: { username: string; password: string }) =>
api.system.login(param.username, param.password),
{
onSuccess: () => {
// TODO: Hard-coded value
window.location.replace(Environment.baseUrl);
},
},
);
const { mutate: shutdown, isLoading: isShuttingDown } = useMutation(
[QueryKeys.System, QueryKeys.Actions],
() => api.system.shutdown(),
{
onSuccess: () => {
client.clear();
},
onSuccess: () => {
// TODO: Hard-coded value
window.location.replace(Environment.baseUrl);
},
);
});
const { mutate: restart, isLoading: isRestarting } = useMutation(
[QueryKeys.System, QueryKeys.Actions],
() => api.system.restart(),
{
onSuccess: () => {
client.clear();
},
const { mutate: shutdown, isPending: isShuttingDown } = useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Actions],
mutationFn: () => api.system.shutdown(),
onSuccess: () => {
client.clear();
},
);
});
const { mutate: restart, isPending: isRestarting } = useMutation({
mutationKey: [QueryKeys.System, QueryKeys.Actions],
mutationFn: () => api.system.restart(),
onSuccess: () => {
client.clear();
},
});
return useMemo(
() => ({

View file

@ -1,12 +1,12 @@
import { GetItemId, useOnValueChange } from "@/utilities";
import { usePageSize } from "@/utilities/storage";
import { useCallback, useEffect, useState } from "react";
import {
QueryKey,
UseQueryResult,
useQuery,
useQueryClient,
} from "react-query";
UseQueryResult,
} from "@tanstack/react-query";
import { GetItemId, useOnValueChange } from "@/utilities";
import { usePageSize } from "@/utilities/storage";
import { QueryKeys } from "./keys";
export type UsePaginationQueryResult<T extends object> = UseQueryResult<
@ -39,31 +39,31 @@ export function usePaginationQuery<
const start = page * pageSize;
const results = useQuery(
[...queryKey, QueryKeys.Range, { start, size: pageSize }],
() => {
const results = useQuery({
queryKey: [...queryKey, QueryKeys.Range, { start, size: pageSize }],
queryFn: () => {
const param: Parameter.Range = {
start,
length: pageSize,
};
return queryFn(param);
},
{
onSuccess: ({ data }) => {
if (cacheIndividual) {
data.forEach((item) => {
const id = GetItemId(item);
if (id) {
client.setQueryData([...queryKey, id], item);
}
});
}
},
},
);
});
const { data } = results;
useEffect(() => {
if (results.isSuccess && results.data && cacheIndividual) {
results.data.data.forEach((item) => {
const id = GetItemId(item);
if (id) {
client.setQueryData([...queryKey, id], item);
}
});
}
}, [results.isSuccess, results.data, client, cacheIndividual, queryKey]);
const totalCount = data?.total ?? 0;
const pageCount = Math.ceil(totalCount / pageSize);

View file

@ -1,4 +1,4 @@
import { QueryClient } from "react-query";
import { QueryClient } from "@tanstack/react-query";
const queryClient = new QueryClient({
defaultOptions: {
@ -6,7 +6,11 @@ const queryClient = new QueryClient({
refetchOnWindowFocus: false,
retry: false,
staleTime: 1000 * 60,
keepPreviousData: true,
networkMode: "offlineFirst",
placeholderData: (previousData: object) => previousData,
},
mutations: {
networkMode: "offlineFirst",
},
},
});

Some files were not shown because too many files have changed in this diff Show more