mirror of
https://github.com/tgbot-collection/ytdlbot.git
synced 2025-02-24 23:34:44 +08:00
bump
This commit is contained in:
parent
fb51f12eec
commit
1940f2c849
4 changed files with 9 additions and 15 deletions
|
@ -1,6 +1,6 @@
|
||||||
git+https://github.com/KurimuzonAkuma/pyrogram
|
git+https://github.com/KurimuzonAkuma/pyrogram
|
||||||
tgcrypto==1.2.5
|
tgcrypto==1.2.5
|
||||||
yt-dlp==2024.03.10
|
yt-dlp==2024.04.9
|
||||||
APScheduler==3.10.4
|
APScheduler==3.10.4
|
||||||
beautifultable==1.1.0
|
beautifultable==1.1.0
|
||||||
ffmpeg-python==0.2.0
|
ffmpeg-python==0.2.0
|
||||||
|
|
|
@ -37,12 +37,8 @@ AUTHORIZED_USER: str = os.getenv("AUTHORIZED_USER", "")
|
||||||
REQUIRED_MEMBERSHIP: str = os.getenv("REQUIRED_MEMBERSHIP", "")
|
REQUIRED_MEMBERSHIP: str = os.getenv("REQUIRED_MEMBERSHIP", "")
|
||||||
|
|
||||||
# celery related
|
# celery related
|
||||||
IS_BACKUP_BOT = os.getenv("IS_BACKUP_BOT")
|
|
||||||
ENABLE_CELERY = os.getenv("ENABLE_CELERY", False)
|
ENABLE_CELERY = os.getenv("ENABLE_CELERY", False)
|
||||||
if IS_BACKUP_BOT:
|
BROKER = os.getenv("BROKER", f"redis://{REDIS}:6379/1")
|
||||||
BROKER = os.getenv("BROKER", f"redis://{REDIS}:6379/1")
|
|
||||||
else:
|
|
||||||
BROKER = os.getenv("BROKER", f"redis://{REDIS}:6379/0")
|
|
||||||
|
|
||||||
MYSQL_HOST = os.getenv("MYSQL_HOST", "mysql")
|
MYSQL_HOST = os.getenv("MYSQL_HOST", "mysql")
|
||||||
MYSQL_USER = os.getenv("MYSQL_USER", "root")
|
MYSQL_USER = os.getenv("MYSQL_USER", "root")
|
||||||
|
|
|
@ -25,7 +25,7 @@ import requests
|
||||||
from beautifultable import BeautifulTable
|
from beautifultable import BeautifulTable
|
||||||
from influxdb import InfluxDBClient
|
from influxdb import InfluxDBClient
|
||||||
|
|
||||||
from config import IS_BACKUP_BOT, MYSQL_HOST, MYSQL_PASS, MYSQL_USER, REDIS
|
from config import MYSQL_HOST, MYSQL_PASS, MYSQL_USER, REDIS
|
||||||
|
|
||||||
init_con = sqlite3.connect(":memory:", check_same_thread=False)
|
init_con = sqlite3.connect(":memory:", check_same_thread=False)
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ class Cursor:
|
||||||
|
|
||||||
class Redis:
|
class Redis:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
db = 1 if IS_BACKUP_BOT else 0
|
db = 1
|
||||||
try:
|
try:
|
||||||
self.r = redis.StrictRedis(host=REDIS, db=db, decode_responses=True)
|
self.r = redis.StrictRedis(host=REDIS, db=db, decode_responses=True)
|
||||||
self.r.ping()
|
self.r.ping()
|
||||||
|
|
|
@ -37,7 +37,6 @@ from config import (
|
||||||
ENABLE_CELERY,
|
ENABLE_CELERY,
|
||||||
ENABLE_FFMPEG,
|
ENABLE_FFMPEG,
|
||||||
ENABLE_VIP,
|
ENABLE_VIP,
|
||||||
IS_BACKUP_BOT,
|
|
||||||
M3U8_SUPPORT,
|
M3U8_SUPPORT,
|
||||||
OWNER,
|
OWNER,
|
||||||
PLAYLIST_SUPPORT,
|
PLAYLIST_SUPPORT,
|
||||||
|
@ -627,12 +626,11 @@ if __name__ == "__main__":
|
||||||
scheduler = BackgroundScheduler(timezone="Europe/London")
|
scheduler = BackgroundScheduler(timezone="Europe/London")
|
||||||
scheduler.add_job(auto_restart, "interval", seconds=600)
|
scheduler.add_job(auto_restart, "interval", seconds=600)
|
||||||
scheduler.add_job(clean_tempfile, "interval", seconds=120)
|
scheduler.add_job(clean_tempfile, "interval", seconds=120)
|
||||||
if not IS_BACKUP_BOT:
|
scheduler.add_job(Redis().reset_today, "cron", hour=0, minute=0)
|
||||||
scheduler.add_job(Redis().reset_today, "cron", hour=0, minute=0)
|
scheduler.add_job(InfluxDB().collect_data, "interval", seconds=120)
|
||||||
scheduler.add_job(InfluxDB().collect_data, "interval", seconds=120)
|
# scheduler.add_job(TronTrx().check_payment, "interval", seconds=60, max_instances=1)
|
||||||
scheduler.add_job(TronTrx().check_payment, "interval", seconds=60, max_instances=1)
|
# default quota allocation of 10,000 units per day
|
||||||
# default quota allocation of 10,000 units per day
|
# scheduler.add_job(periodic_sub_check, "interval", seconds=3600)
|
||||||
scheduler.add_job(periodic_sub_check, "interval", seconds=3600)
|
|
||||||
scheduler.start()
|
scheduler.start()
|
||||||
banner = f"""
|
banner = f"""
|
||||||
▌ ▌ ▀▛▘ ▌ ▛▀▖ ▜ ▌
|
▌ ▌ ▀▛▘ ▌ ▛▀▖ ▜ ▌
|
||||||
|
|
Loading…
Reference in a new issue