diff --git a/README.md b/README.md index 99b8faafc0..6cd20413a9 100644 --- a/README.md +++ b/README.md @@ -62,6 +62,15 @@ - Download using premium account if available - Download restricted messages (document or link) by tg private/public/super links + +
{input_token}
\n\n'
- msg += f'Validity: {get_readable_time(int(config_dict["TOKEN_TIMEOUT"]))}'
+ msg = BotTheme('TOKEN_MSG', token=input_token, validity=get_readable_time(int(config_dict["TOKEN_TIMEOUT"])))
return await sendMessage(message, msg, reply_markup)
elif await CustomFilters.authorized(client, message):
start_string = BotTheme('ST_MSG', help_command=f"/{BotCommands.HelpCommand}")
@@ -80,7 +80,7 @@ async def token_callback(_, query):
update_user_ldata(user_id, 'time', time())
await query.answer('Activated Temporary Token!', show_alert=True)
kb = query.message.reply_markup.inline_keyboard[1:]
- kb.insert(0, [InlineKeyboardButton('β
οΈ Activated β
', callback_data='pass activated')])
+ kb.insert(0, [InlineKeyboardButton(BotTheme('ACTIVATED'), callback_data='pass activated')])
await editReplyMarkup(query.message, InlineKeyboardMarkup(kb))
@@ -91,14 +91,13 @@ async def login(_, message):
user_id = message.from_user.id
input_pass = message.command[1]
if user_data.get(user_id, {}).get('token', '') == config_dict['LOGIN_PASS']:
- return await sendMessage(message, 'Already Bot Login In!')
- if input_pass == config_dict['LOGIN_PASS']:
- update_user_ldata(user_id, 'token', config_dict['LOGIN_PASS'])
- return await sendMessage(message, 'Bot Permanent Login Successfully!')
- else:
- return await sendMessage(message, 'Invalid Password!\n\nKindly put the correct Password .')
+ return await sendMessage(message, BotTheme('LOGGED_IN'))
+ if input_pass != config_dict['LOGIN_PASS']:
+ return await sendMessage(message, BotTheme('INVALID_PASS'))
+ update_user_ldata(user_id, 'token', config_dict['LOGIN_PASS'])
+ return await sendMessage(message, BotTheme('PASS_LOGGED'))
else:
- await sendMessage(message, 'Bot Login Usage :\n\n/cmd {password}
')
+ await sendMessage(message, BotTheme('LOGIN_USED'))
async def restart(client, message):
@@ -127,46 +126,47 @@ async def ping(_, message):
async def log(_, message):
buttons = ButtonMaker()
- buttons.ibutton('π Log Display', f'wzmlx {message.from_user.id} logdisplay')
- buttons.ibutton('π¨ Web Paste', f'wzmlx {message.from_user.id} webpaste')
+ buttons.ibutton(BotTheme('LOG_DISPLAY_BT'), f'wzmlx {message.from_user.id} logdisplay')
+ buttons.ibutton(BotTheme('WEB_PASTE_BT'), f'wzmlx {message.from_user.id} webpaste')
await sendFile(message, 'log.txt', buttons=buttons.build_menu(1))
async def search_images():
- if query_list := config_dict['IMG_SEARCH']:
- try:
- total_pages = config_dict['IMG_PAGE']
- base_url = "https://www.wallpaperflare.com/search"
- for query in query_list:
- query = query.strip().replace(" ", "+")
- for page in range(1, total_pages + 1):
- url = f"{base_url}?wallpaper={query}&width=1280&height=720&page={page}"
- r = rget(url)
- soup = BeautifulSoup(r.text, "html.parser")
- images = soup.select('img[data-src^="https://c4.wallpaperflare.com/wallpaper"]')
- if len(images) == 0:
- LOGGER.info("Maybe Site is Blocked on your Server, Add Images Manually !!")
- for img in images:
- img_url = img['data-src']
- if img_url not in config_dict['IMAGES']:
- config_dict['IMAGES'].append(img_url)
- if len(config_dict['IMAGES']) != 0:
- config_dict['STATUS_LIMIT'] = 2
- if DATABASE_URL:
- await DbManger().update_config({'IMAGES': config_dict['IMAGES'], 'STATUS_LIMIT': config_dict['STATUS_LIMIT']})
- except Exception as e:
- LOGGER.error(f"An error occurred: {e}")
+ if not (query_list := config_dict['IMG_SEARCH']):
+ return
+ try:
+ total_pages = config_dict['IMG_PAGE']
+ base_url = "https://www.wallpaperflare.com/search"
+ for query in query_list:
+ query = query.strip().replace(" ", "+")
+ for page in range(1, total_pages + 1):
+ url = f"{base_url}?wallpaper={query}&width=1280&height=720&page={page}"
+ r = rget(url)
+ soup = BeautifulSoup(r.text, "html.parser")
+ images = soup.select('img[data-src^="https://c4.wallpaperflare.com/wallpaper"]')
+ if len(images) == 0:
+ LOGGER.info("Maybe Site is Blocked on your Server, Add Images Manually !!")
+ for img in images:
+ img_url = img['data-src']
+ if img_url not in config_dict['IMAGES']:
+ config_dict['IMAGES'].append(img_url)
+ if len(config_dict['IMAGES']) != 0:
+ config_dict['STATUS_LIMIT'] = 2
+ if DATABASE_URL:
+ await DbManger().update_config({'IMAGES': config_dict['IMAGES'], 'STATUS_LIMIT': config_dict['STATUS_LIMIT']})
+ except Exception as e:
+ LOGGER.error(f"An error occurred: {e}")
async def bot_help(client, message):
buttons = ButtonMaker()
user_id = message.from_user.id
- buttons.ibutton('Basic', f'wzmlx {user_id} guide basic')
- buttons.ibutton('Users', f'wzmlx {user_id} guide users')
- buttons.ibutton('Mics', f'wzmlx {user_id} guide miscs')
- buttons.ibutton('Owner & Sudos', f'wzmlx {user_id} guide admin')
- buttons.ibutton('Close', f'wzmlx {user_id} close')
- await sendMessage(message, "γ Help Guide Menu!\n\nNOTE: Click on any CMD to see more minor detalis.", buttons.build_menu(2))
+ buttons.ibutton(BotTheme('BASIC_BT'), f'wzmlx {user_id} guide basic')
+ buttons.ibutton(BotTheme('USER_BT'), f'wzmlx {user_id} guide users')
+ buttons.ibutton(BotTheme('MICS_BT'), f'wzmlx {user_id} guide miscs')
+ buttons.ibutton(BotTheme('O_S_BT'), f'wzmlx {user_id} guide admin')
+ buttons.ibutton(BotTheme('CLOSE_BT'), f'wzmlx {user_id} close')
+ await sendMessage(message, BotTheme('HELP_HEADER'), buttons.build_menu(2))
async def restart_notification():
@@ -180,7 +180,7 @@ async def restart_notification():
async def send_incompelete_task_message(cid, msg):
try:
if msg.startswith("β¬ Restarted Successfully!"):
- await bot.edit_message_text(chat_id=chat_id, message_id=msg_id, text=msg)
+ await bot.edit_message_text(chat_id=chat_id, message_id=msg_id, text=msg, disable_web_page_preview=True)
await aioremove(".restartmsg")
else:
await bot.send_message(chat_id=cid, text=msg, disable_web_page_preview=True, disable_notification=True)
@@ -193,9 +193,10 @@ async def send_incompelete_task_message(cid, msg):
msg = BotTheme('RESTART_SUCCESS', time=now.strftime('%I:%M:%S %p'), date=now.strftime('%d/%m/%y'), timz=config_dict['TIMEZONE'], version=get_version()) if cid == chat_id else BotTheme('RESTARTED')
msg += "\n\n⬠Incomplete Tasks!"
for tag, links in data.items():
- msg += f"\nβ² {tag}: "
+ msg += f"\nβ² User: {tag}\nβ Tasks:"
for index, link in enumerate(links, start=1):
- msg += f" {index} |"
+ msg_link, source = next(iter(link.items()))
+ msg += f" {index}. S -> L |"
if len(msg.encode()) > 4000:
await send_incompelete_task_message(cid, msg)
msg = ''
@@ -231,7 +232,18 @@ async def main():
bot.add_handler(MessageHandler(stats, filters=command(
BotCommands.StatsCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
LOGGER.info(f"WZML-X Bot [@{bot_name}] Started!")
+ if user:
+ LOGGER.info(f"WZ's User [@{user.me.first_name}] Ready!")
signal(SIGINT, exit_clean_up)
-bot.loop.run_until_complete(main())
-bot.loop.run_forever()
+async def stop_signals():
+ if user:
+ await gather(bot.stop(), user.stop())
+ else:
+ await bot.stop()
+
+
+bot_run = bot.loop.run_until_complete
+bot_run(main())
+bot_run(idle())
+bot_run(stop_signals())
diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py
index e23c4954c8..c55ded9f42 100644
--- a/bot/helper/ext_utils/bot_utils.py
+++ b/bot/helper/ext_utils/bot_utils.py
@@ -3,7 +3,7 @@
from base64 import b64encode
from datetime import datetime
from os import path as ospath
-from pkg_resources import get_distribution
+from pkg_resources import get_distribution, DistributionNotFound
from aiofiles import open as aiopen
from aiofiles.os import remove as aioremove, path as aiopath, mkdir
from re import match as re_match
@@ -55,7 +55,6 @@ class MirrorStatus:
STATUS_SPLITTING = "Split"
STATUS_CHECKING = "CheckUp"
STATUS_SEEDING = "Seed"
- STATUS_UPLOADDDL = "Upload DDL"
class setInterval:
@@ -125,8 +124,8 @@ async def get_telegraph_list(telegraph_content):
if len(path) > 1:
await telegraph.edit_telegraph(path, telegraph_content)
buttons = ButtonMaker()
- buttons.ubutton("π VIEW", f"https://telegra.ph/{path[0]}")
- buttons = extra_btns(buttons)
+ buttons.ubutton("π VIEW", f"https://te.legra.ph/{path[0]}")
+ buttons, _ = extra_btns(buttons)
return buttons.build_menu(1)
def handleIndex(index, dic):
@@ -165,13 +164,20 @@ def get_all_versions():
vr = result.stdout.split('\n')[0].split(' ')[1]
except FileNotFoundError:
vr = ''
+ try:
+ vpy = get_distribution('pyrogram').version
+ except DistributionNotFound:
+ try:
+ vpy = get_distribution('pyrofork').version
+ except DistributionNotFound:
+ vpy = "2.xx.xx"
bot_cache['eng_versions'] = {'p7zip':vp, 'ffmpeg': vf, 'rclone': vr,
'aria': aria2.client.get_version()['version'],
'aiohttp': get_distribution('aiohttp').version,
'gapi': get_distribution('google-api-python-client').version,
'mega': MegaApi('test').getVersion(),
'qbit': get_client().app.version,
- 'pyro': get_distribution('pyrogram').version,
+ 'pyro': vpy,
'ytdlp': get_distribution('yt-dlp').version}
@@ -185,7 +191,7 @@ def __init__(self):
self.STATUS_GD = f"Google-API v{version_cache['gapi']}"
self.STATUS_MEGA = f"MegaSDK v{version_cache['mega']}"
self.STATUS_QB = f"qBit {version_cache['qbit']}"
- self.STATUS_TG = f"Pyrogram v{version_cache['pyro']}"
+ self.STATUS_TG = f"PyroMulti v{version_cache['pyro']}"
self.STATUS_YT = f"yt-dlp v{version_cache['ytdlp']}"
self.STATUS_EXT = "pExtract v2"
self.STATUS_SPLIT_MERGE = f"ffmpeg v{version_cache['ffmpeg']}"
@@ -206,16 +212,15 @@ def get_readable_message():
for download in list(download_dict.values())[STATUS_START:STATUS_LIMIT+STATUS_START]:
msg_link = download.message.link if download.message.chat.type in [
ChatType.SUPERGROUP, ChatType.CHANNEL] and not config_dict['DELETE_LINKS'] else ''
- msg += BotTheme('STATUS_NAME', Name="Task is being Processed!" if config_dict['SAFE_MODE'] else escape(f'{download.name()}'))
+ elapsed = time() - download.message.date.timestamp()
+ msg += BotTheme('STATUS_NAME', Name="Task is being Processed!" if config_dict['SAFE_MODE'] and elapsed >= config_dict['STATUS_UPDATE_INTERVAL'] else escape(f'{download.name()}'))
if download.status() not in [MirrorStatus.STATUS_SPLITTING, MirrorStatus.STATUS_SEEDING]:
- if download.status() != MirrorStatus.STATUS_UPLOADDDL:
- msg += BotTheme('BAR', Bar=f"{get_progress_bar_string(download.progress())} {download.progress()}")
- msg += BotTheme('PROCESSED', Processed=f"{download.processed_bytes()} of {download.size()}")
+ msg += BotTheme('BAR', Bar=f"{get_progress_bar_string(download.progress())} {download.progress()}")
+ msg += BotTheme('PROCESSED', Processed=f"{download.processed_bytes()} of {download.size()}")
msg += BotTheme('STATUS', Status=download.status(), Url=msg_link)
- if download.status() != MirrorStatus.STATUS_UPLOADDDL:
- msg += BotTheme('ETA', Eta=download.eta())
- msg += BotTheme('SPEED', Speed=download.speed())
- msg += BotTheme('ELAPSED', Elapsed=get_readable_time(time() - download.message.date.timestamp()))
+ msg += BotTheme('ETA', Eta=download.eta())
+ msg += BotTheme('SPEED', Speed=download.speed())
+ msg += BotTheme('ELAPSED', Elapsed=get_readable_time(elapsed))
msg += BotTheme('ENGINE', Engine=download.eng())
msg += BotTheme('STA_MODE', Mode=download.upload_details['mode'])
if hasattr(download, 'seeders_num'):
@@ -254,6 +259,10 @@ def convert_speed_to_bytes_per_second(spd):
return float(spd.split('K')[0]) * 1024
elif 'M' in spd:
return float(spd.split('M')[0]) * 1048576
+ elif 'G' in spd:
+ return float(spd.split('G')[0]) * 1073741824
+ elif 'T' in spd:
+ return float(spd.split('T')[0]) * 1099511627776
else:
return 0
@@ -340,6 +349,10 @@ def is_share_link(url):
return bool(re_match(r'https?:\/\/.+\.gdtot\.\S+|https?:\/\/(filepress|filebee|appdrive|gdflix)\.\S+', url))
+def is_index_link(url):
+ return bool(re_match(r'https?:\/\/.+\/\d+\:\/', url))
+
+
def is_mega_link(url):
return "mega.nz" in url or "mega.co.nz" in url
@@ -471,8 +484,8 @@ def wrapper(*args, wait=False, **kwargs):
async def compare_versions(v1, v2):
- v1_parts = [int(part) for part in v1[1:-2].split('.')]
- v2_parts = [int(part) for part in v2[1:-2].split('.')]
+ v1_parts = [int(part) for part in v1.split('-')[0][1:].split('.')]
+ v2_parts = [int(part) for part in v2.split('-')[0][1:].split('.')]
for i in range(3):
v1_part, v2_part = v1_parts[i], v2_parts[i]
if v1_part < v2_part:
@@ -497,6 +510,7 @@ async def get_stats(event, key="home"):
total, used, free, disk = disk_usage('/')
swap = swap_memory()
memory = virtual_memory()
+ disk_io = disk_io_counters()
msg = BotTheme('BOT_STATS',
bot_uptime=get_readable_time(time() - botStartTime),
ram_bar=get_progress_bar_string(memory.percent),
@@ -511,8 +525,8 @@ async def get_stats(event, key="home"):
swap_t=get_readable_file_size(swap.total),
disk=disk,
disk_bar=get_progress_bar_string(disk),
- disk_read=get_readable_file_size(disk_io_counters().read_bytes) + f" ({get_readable_time(disk_io_counters().read_time / 1000)})",
- disk_write=get_readable_file_size(disk_io_counters().write_bytes) + f" ({get_readable_time(disk_io_counters().write_time / 1000)})",
+ disk_read=get_readable_file_size(disk_io.read_bytes) + f" ({get_readable_time(disk_io.read_time / 1000)})" if disk_io else "Access Denied",
+ disk_write=get_readable_file_size(disk_io.write_bytes) + f" ({get_readable_time(disk_io.write_time / 1000)})" if disk_io else "Access Denied",
disk_t=get_readable_file_size(total),
disk_u=get_readable_file_size(used),
disk_f=get_readable_file_size(free),
@@ -542,7 +556,7 @@ async def get_stats(event, key="home"):
if await aiopath.exists('.git'):
last_commit = (await cmd_exec("git log -1 --pretty='%cd ( %cr )' --date=format-local:'%d/%m/%Y'", True))[0]
changelog = (await cmd_exec("git log -1 --pretty=format:'%s
By %an'", True))[0]
- official_v = (await cmd_exec("curl -o latestversion.py https://raw.githubusercontent.com/weebzone/WZML-X/master/bot/version.py -s && python3 latestversion.py && rm latestversion.py", True))[0]
+ official_v = (await cmd_exec(f"curl -o latestversion.py https://raw.githubusercontent.com/weebzone/WZML-X/{config_dict['UPSTREAM_BRANCH']}/bot/version.py -s && python3 latestversion.py && rm latestversion.py", True))[0]
msg = BotTheme('REPO_STATS',
last_commit=last_commit,
bot_version=get_version(),
@@ -647,11 +661,11 @@ async def checking_access(user_id, button=None):
return None, button
-def extra_btns(buttons):
- if extra_buttons:
+def extra_btns(buttons, already=False):
+ if extra_buttons and not already:
for btn_name, btn_url in extra_buttons.items():
- buttons.ubutton(btn_name, btn_url)
- return buttons
+ buttons.ubutton(btn_name, btn_url, 'l_body')
+ return buttons, True
async def set_commands(client):
@@ -699,9 +713,3 @@ async def set_commands(client):
LOGGER.info('Bot Commands have been Set & Updated')
except Exception as err:
LOGGER.error(err)
-
-
-def is_valid_token(url, token):
- resp = rget(url=f"{url}getAccountDetails?token={token}&allDetails=true").json()
- if resp["status"] == "error-wrongToken":
- raise Exception("Invalid Gofile Token, Get your Gofile token from --> https://gofile.io/myProfile")
diff --git a/bot/helper/ext_utils/db_handler.py b/bot/helper/ext_utils/db_handler.py
index 5dc567c173..cb0c344f18 100644
--- a/bot/helper/ext_utils/db_handler.py
+++ b/bot/helper/ext_utils/db_handler.py
@@ -169,10 +169,10 @@ async def rss_delete(self, user_id):
await self.__db.rss[bot_id].delete_one({'_id': user_id})
self.__conn.close
- async def add_incomplete_task(self, cid, link, tag):
+ async def add_incomplete_task(self, cid, link, tag, msg_link):
if self.__err:
return
- await self.__db.tasks[bot_id].insert_one({'_id': link, 'cid': cid, 'tag': tag})
+ await self.__db.tasks[bot_id].insert_one({'_id': link, 'cid': cid, 'tag': tag, 'source': msg_link})
self.__conn.close
async def rm_complete_task(self, link):
@@ -186,20 +186,19 @@ async def get_incomplete_tasks(self):
if self.__err:
return notifier_dict
if await self.__db.tasks[bot_id].find_one():
- # return a dict ==> {_id, cid, tag}
+ # return a dict ==> {_id, cid, tag, source}
rows = self.__db.tasks[bot_id].find({})
async for row in rows:
if row['cid'] in list(notifier_dict.keys()):
if row['tag'] in list(notifier_dict[row['cid']]):
- notifier_dict[row['cid']][row['tag']].append(
- row['_id'])
+ notifier_dict[row['cid']][row['tag']].append({row['_id']: row['source']})
else:
- notifier_dict[row['cid']][row['tag']] = [row['_id']]
+ notifier_dict[row['cid']][row['tag']] = [{row['_id']: row['source']}]
else:
- notifier_dict[row['cid']] = {row['tag']: [row['_id']]}
+ notifier_dict[row['cid']] = {row['tag']: [{row['_id']: row['source']}]}
await self.__db.tasks[bot_id].drop()
self.__conn.close
- return notifier_dict # return a dict ==> {cid: {tag: [_id, _id, ...]}}
+ return notifier_dict # return a dict ==> {cid: {tag: [{_id: source}, {_id, source}, ...]}}
async def trunc_table(self, name):
if self.__err:
@@ -207,6 +206,5 @@ async def trunc_table(self, name):
await self.__db[name][bot_id].drop()
self.__conn.close
-
if DATABASE_URL:
bot_loop.run_until_complete(DbManger().db_load())
diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py
index 10973a015b..40011e90d0 100644
--- a/bot/helper/ext_utils/fs_utils.py
+++ b/bot/helper/ext_utils/fs_utils.py
@@ -64,7 +64,7 @@ async def start_cleanup():
await aiormtree(DOWNLOAD_DIR)
except:
pass
- await makedirs(DOWNLOAD_DIR)
+ await makedirs(DOWNLOAD_DIR, exist_ok=True)
def clean_all():
@@ -167,7 +167,10 @@ async def join_files(path):
LOGGER.error(f'Failed to join {final_name}, stderr: {stderr}')
else:
results.append(final_name)
+ else:
+ LOGGER.warning('No Binary files to join!')
if results:
+ LOGGER.info('Join Completed!')
for res in results:
for file_ in files:
if re_search(fr"{res}\.0[0-9]+$", file_):
diff --git a/bot/helper/ext_utils/help_messages.py b/bot/helper/ext_utils/help_messages.py
index dc8a06da21..2632cee10b 100644
--- a/bot/helper/ext_utils/help_messages.py
+++ b/bot/helper/ext_utils/help_messages.py
@@ -119,6 +119,7 @@
17. -index: Index url for gdrive_arg
18. -c or -category : Gdrive category to Upload, Specific Name (case insensitive)
19. -ud or -dump : Dump category to Upload, Specific Name (case insensitive) or chat_id or chat_username
+20. -ss or -screenshots : Generate Screenshots for Leeched Files, Specify 1, 3, .. after this.
""", """
β² By along the cmd:
/cmd
link -n new name
@@ -133,6 +134,12 @@
β² Direct Link Authorization: -u -p or -user -pass
/cmd
link -u username -p password
+β² Direct link custom headers: -h or -headers
+/cmd
link -h key: value key1: value1
+
+β² Screenshot Generation: -ss or -screenshots
+/cmd
link -ss number ,Screenshots for each Video File
+
β² Extract / Zip: -uz -z or -zip -unzip or -e -extract
/cmd
link -e password (extract password protected)
/cmd
link -z password (zip password protected)
@@ -396,6 +403,15 @@
β /{BotCommands.MyDramaListCommand}: Search in MyDramaList.
''']
+
+PASSWORD_ERROR_MESSAGE = """
+This link requires a password!
+- Insert sign :: after the link and write the password after the sign.
+Example: {}::love you
+Note: No spaces between the signs ::
+For the password, you can use a space!
+"""
+
default_desp = {'AS_DOCUMENT': 'Default type of Telegram file upload. Default is False mean as media.',
'ANIME_TEMPLATE': 'Set template for AniList Template. HTML Tags supported',
'AUTHORIZED_CHATS': 'Fill user_id and chat_id of groups/users you want to authorize. Separate them by space.',
@@ -450,6 +466,8 @@
'LEECH_FILENAME_REMNAME': 'Remove custom word from the leeched file name. Str',
'LOGIN_PASS': 'Permanent pass for user to skip the token system',
'TOKEN_TIMEOUT': 'Token timeout for each group member in sec. Int',
+ 'DEBRID_LINK_API': 'Set debrid-link.com API for 172 Supported Hosters Leeching Support. Str',
+ 'REAL_DEBRID_API': 'Set real-debrid.com API for Torrent Cache & Few Supported Hosters (VPN Maybe). Str',
'LEECH_SPLIT_SIZE': 'Size of split in bytes. Default is 2GB. Default is 4GB if your account is premium.',
'MEDIA_GROUP': 'View Uploaded splitted file parts in media group. Default is False.',
'MEGA_EMAIL': 'E-Mail used to sign-in on mega.nz for using premium account. Str',
@@ -479,6 +497,7 @@
'TORRENT_TIMEOUT': 'Timeout of dead torrents downloading with qBittorrent and Aria2c in seconds. Int',
'UPSTREAM_REPO': "Your github repository link, if your repo is private add https://username:{githubtoken}@github.com/{username}/{reponame} format. Get token from Github settings. So you can update your bot from filled repository on each restart.",
'UPSTREAM_BRANCH': 'Upstream branch for update. Default is master.',
+ 'UPGRADE_PACKAGES': 'Install New Requirements File without thinking of Crash. Bool',
'SAVE_MSG': 'Add button of save message. Bool',
'SET_COMMANDS': 'Set bot command automatically. Bool',
'UPTOBOX_TOKEN': 'Uptobox token to mirror uptobox links. Get it from Uptobox Premium Account.',
diff --git a/bot/helper/ext_utils/leech_utils.py b/bot/helper/ext_utils/leech_utils.py
index 6c2a60ac74..1a2253dbf4 100644
--- a/bot/helper/ext_utils/leech_utils.py
+++ b/bot/helper/ext_utils/leech_utils.py
@@ -1,12 +1,17 @@
-import hashlib
+from hashlib import md5
+from time import strftime, gmtime
from re import sub as re_sub
from shlex import split as ssplit
+from natsort import natsorted
from os import path as ospath
-from aiofiles.os import remove as aioremove, path as aiopath, mkdir
+from aiofiles.os import remove as aioremove, path as aiopath, mkdir, makedirs, listdir
+from aioshutil import rmtree as aiormtree
from time import time
from re import search as re_search
-from asyncio import create_subprocess_exec
+from asyncio import create_subprocess_exec, create_task, gather
from asyncio.subprocess import PIPE
+from telegraph import upload_file
+from langcodes import Language
from bot import LOGGER, MAX_SPLIT_SIZE, config_dict, user_data
from bot.modules.mediainfo import parseinfo
@@ -37,23 +42,32 @@ async def is_multi_streams(path):
return videos > 1 or audios > 1
-async def get_media_info(path):
+async def get_media_info(path, metadata=False):
try:
result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format",
- "json", "-show_format", path])
+ "json", "-show_format", "-show_streams", path])
if res := result[1]:
LOGGER.warning(f'Get Media Info: {res}')
except Exception as e:
LOGGER.error(f'Get Media Info: {e}. Mostly File not found!')
return 0, None, None
- fields = eval(result[0]).get('format')
+ ffresult = eval(result[0])
+ fields = ffresult.get('format')
if fields is None:
- LOGGER.error(f"get_media_info: {result}")
+ LOGGER.error(f"Get Media Info: {result}")
return 0, None, None
duration = round(float(fields.get('duration', 0)))
tags = fields.get('tags', {})
- artist = tags.get('artist') or tags.get('ARTIST')
- title = tags.get('title') or tags.get('TITLE')
+ artist = tags.get('artist') or tags.get('ARTIST') or tags.get("Artist")
+ title = tags.get('title') or tags.get('TITLE') or tags.get("Title")
+ if metadata:
+ lang, qual = "", ""
+ if (streams := ffresult.get('streams')) and streams[0].get('codec_type') == 'video':
+ qual = f"{streams[0].get('height')}p"
+ for stream in streams:
+ if stream.get('codec_type') == 'audio' and (lc := stream.get('tags', {}).get('language')):
+ lang += Language.get(lc).display_name() + ", "
+ return duration, qual, lang[:-2]
return duration, artist, title
@@ -88,27 +102,48 @@ async def get_document_type(path):
return is_video, is_audio, is_image
-async def take_ss(video_file, duration):
+async def get_audio_thumb(audio_file):
des_dir = 'Thumbnails'
if not await aiopath.exists(des_dir):
await mkdir(des_dir)
des_dir = ospath.join(des_dir, f"{time()}.jpg")
- if duration is None:
- duration = (await get_media_info(video_file))[0]
- if duration == 0:
- duration = 3
- duration = duration // 2
- cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration),
- "-i", video_file, "-vf", "thumbnail", "-frames:v", "1", des_dir]
+ cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error",
+ "-i", audio_file, "-an", "-vcodec", "copy", des_dir]
status = await create_subprocess_exec(*cmd, stderr=PIPE)
if await status.wait() != 0 or not await aiopath.exists(des_dir):
err = (await status.stderr.read()).decode().strip()
LOGGER.error(
- f'Error while extracting thumbnail. Name: {video_file} stderr: {err}')
+ f'Error while extracting thumbnail from audio. Name: {audio_file} stderr: {err}')
return None
return des_dir
+async def take_ss(video_file, duration=None, total=1, gen_ss=False):
+ des_dir = ospath.join('Thumbnails', f"{time()}")
+ await makedirs(des_dir, exist_ok=True)
+ if duration is None:
+ duration = (await get_media_info(video_file))[0]
+ if duration == 0:
+ duration = 3
+ duration = duration - (duration * 2 / 100)
+ cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", "",
+ "-i", video_file, "-vf", "thumbnail", "-frames:v", "1", des_dir]
+ tasks = []
+ tstamps = {}
+ for eq_thumb in range(1, total+1):
+ cmd[5] = str((duration // total) * eq_thumb)
+ tstamps[f"wz_thumb_{eq_thumb}.jpg"] = strftime("%H:%M:%S", gmtime(float(cmd[5])))
+ cmd[-1] = ospath.join(des_dir, f"wz_thumb_{eq_thumb}.jpg")
+ tasks.append(create_task(create_subprocess_exec(*cmd, stderr=PIPE)))
+ status = await gather(*tasks)
+ for task, eq_thumb in zip(status, range(1, total+1)):
+ if await task.wait() != 0 or not await aiopath.exists(ospath.join(des_dir, f"wz_thumb_{eq_thumb}.jpg")):
+ err = (await task.stderr.read()).decode().strip()
+ LOGGER.error(f'Error while extracting thumbnail no. {eq_thumb} from video. Name: {video_file} stderr: {err}')
+ return None
+ return (des_dir, tstamps) if gen_ss else ospath.join(des_dir, "wz_thumb_1.jpg")
+
+
async def split_file(path, size, file_, dirpath, split_size, listener, start_time=0, i=1, inLoop=False, multi_streams=True):
if listener.suproc == 'cancelled' or listener.suproc is not None and listener.suproc.returncode == -9:
return False
@@ -121,7 +156,7 @@ async def split_file(path, size, file_, dirpath, split_size, listener, start_tim
leech_split_size = user_dict.get(
'split_size') or config_dict['LEECH_SPLIT_SIZE']
parts = -(-size // leech_split_size)
- if (user_dict.get('equal_splits') or config_dict['EQUAL_SPLITS']) and not inLoop:
+ if (user_dict.get('equal_splits') or config_dict['EQUAL_SPLITS'] and 'equal_splits' not in user_dict) and not inLoop:
split_size = ((size + parts - 1) // parts) + 1000
if (await get_document_type(path))[0]:
if multi_streams:
@@ -199,9 +234,7 @@ async def format_filename(file_, user_id, dirpath=None, isMirror=False):
lcaption = config_dict['LEECH_FILENAME_CAPTION'] if (val:=user_dict.get('lcaption', '')) == '' else val
prefile_ = file_
- # SD-Style V2 ~ WZML-X
- if file_.startswith('www'): #Remove all www.xyz.xyz domains
- file_ = ' '.join(file_.split()[1:])
+ file_ = re_sub(r'www\S+', '', file_)
if remname:
if not remname.startswith('|'):
@@ -223,7 +256,7 @@ async def format_filename(file_, user_id, dirpath=None, isMirror=False):
nfile_ = file_
if prefix:
nfile_ = prefix.replace('\s', ' ') + file_
- prefix = re_sub('<.*?>', '', prefix).replace('\s', ' ')
+ prefix = re_sub(r'<.*?>', '', prefix).replace('\s', ' ')
if not file_.startswith(prefix):
file_ = f"{prefix}{file_}"
@@ -251,10 +284,13 @@ async def format_filename(file_, user_id, dirpath=None, isMirror=False):
lcaption = lcaption.replace('\|', '%%').replace('\s', ' ')
slit = lcaption.split("|")
up_path = ospath.join(dirpath, prefile_)
+ dur, qual, lang = await get_media_info(up_path, True)
cap_mono = slit[0].format(
filename = nfile_,
size = get_readable_file_size(await aiopath.getsize(up_path)),
- duration = get_readable_time((await get_media_info(up_path))[0]),
+ duration = get_readable_time(dur),
+ quality = qual,
+ languages = lang,
md5_hash = get_md5_hash(up_path)
)
if len(slit) > 1:
@@ -268,8 +304,17 @@ async def format_filename(file_, user_id, dirpath=None, isMirror=False):
cap_mono = cap_mono.replace(args[0], '')
cap_mono = cap_mono.replace('%%', '|')
return file_, cap_mono
-
-
+
+
+async def get_ss(up_path, ss_no):
+ thumbs_path, tstamps = await take_ss(up_path, total=ss_no, gen_ss=True)
+ th_html = f"π Screenshot at {tstamps[thumb]}' for thumb in natsorted(await listdir(thumbs_path))) + await aiormtree(thumbs_path) + link_id = (await telegraph.create_page(title="ScreenShots X", content=th_html))["path"] + return f"https://graph.org/{link_id}" + + async def get_mediainfo_link(up_path): stdout, __, _ = await cmd_exec(ssplit(f'mediainfo "{up_path}"')) tc = f"π
{url}_{version['name']}
"
+ raise DirectDownloadLinkException(f'ERROR: {error}')
\ No newline at end of file
diff --git a/bot/helper/mirror_utils/download_utils/gd_download.py b/bot/helper/mirror_utils/download_utils/gd_download.py
index 8c6d9f77a7..d3a5aa33a4 100644
--- a/bot/helper/mirror_utils/download_utils/gd_download.py
+++ b/bot/helper/mirror_utils/download_utils/gd_download.py
@@ -1,7 +1,6 @@
#!/usr/bin/env python3
from json import dumps as jdumps
-from random import SystemRandom
-from string import ascii_letters, digits
+from secrets import token_hex
from cloudscraper import create_scraper as cget
from bot import download_dict, download_dict_lock, LOGGER, non_queued_dl, queue_dict_lock
@@ -23,8 +22,7 @@ async def add_gd_download(link, path, listener, newname, org_link):
return
name = newname or name
- gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
-
+ gid = token_hex(5)
msg, button = await stop_duplicate_check(name, listener)
if msg:
await sendMessage(listener.message, msg, button)
diff --git a/bot/helper/mirror_utils/download_utils/mega_download.py b/bot/helper/mirror_utils/download_utils/mega_download.py
index 04bb05e706..19e0acb584 100644
--- a/bot/helper/mirror_utils/download_utils/mega_download.py
+++ b/bot/helper/mirror_utils/download_utils/mega_download.py
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
-from random import SystemRandom
-from string import ascii_letters, digits
+from secrets import token_hex
from aiofiles.os import makedirs
from asyncio import Event
from mega import MegaApi, MegaListener, MegaRequest, MegaTransfer, MegaError
@@ -156,7 +155,7 @@ async def add_mega_download(mega_link, path, listener, name):
await executor.do(folder_api.logout, ())
return
- gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=8))
+ gid = token_hex(5)
size = api.getSize(node)
if limit_exceeded := await limit_checker(size, listener, isMega=True):
await sendMessage(listener.message, limit_exceeded)
diff --git a/bot/helper/mirror_utils/download_utils/rclone_download.py b/bot/helper/mirror_utils/download_utils/rclone_download.py
index 7cc1a1eccb..9ac4203c9d 100644
--- a/bot/helper/mirror_utils/download_utils/rclone_download.py
+++ b/bot/helper/mirror_utils/download_utils/rclone_download.py
@@ -1,8 +1,7 @@
#!/usr/bin/env python3
from asyncio import gather
from json import loads
-from random import SystemRandom
-from string import ascii_letters, digits
+from secrets import token_hex
from bot import download_dict, download_dict_lock, queue_dict_lock, non_queued_dl, LOGGER
from bot.helper.ext_utils.bot_utils import cmd_exec
@@ -41,8 +40,7 @@ async def add_rclone_download(rc_path, config_path, path, name, listener):
else:
name = rc_path.rsplit('/', 1)[-1]
size = rsize['bytes']
- gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
-
+ gid = token_hex(5)
msg, button = await stop_duplicate_check(name, listener)
if msg:
await sendMessage(listener.message, msg, button)
diff --git a/bot/helper/mirror_utils/download_utils/yt_dlp_download.py b/bot/helper/mirror_utils/download_utils/yt_dlp_download.py
index 6b80b1245b..022a71cb25 100644
--- a/bot/helper/mirror_utils/download_utils/yt_dlp_download.py
+++ b/bot/helper/mirror_utils/download_utils/yt_dlp_download.py
@@ -1,12 +1,11 @@
#!/usr/bin/env python3
from os import path as ospath, listdir
-from random import SystemRandom
-from string import ascii_letters, digits
+from secrets import token_hex
from logging import getLogger
from yt_dlp import YoutubeDL, DownloadError
from re import search as re_search
-from bot import download_dict_lock, download_dict, non_queued_dl, queue_dict_lock, config_dict
+from bot import download_dict_lock, download_dict, non_queued_dl, queue_dict_lock
from bot.helper.telegram_helper.message_utils import sendStatusMessage
from ..status_utils.yt_dlp_download_status import YtDlpDownloadStatus
from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus
@@ -67,10 +66,10 @@ def __init__(self, listener):
'overwrites': True,
'writethumbnail': True,
'trim_file_name': 220,
- 'retry_sleep_functions': {'http': lambda x: 2,
- 'fragment': lambda x: 2,
- 'file_access': lambda x: 2,
- 'extractor': lambda x: 2}}
+ 'retry_sleep_functions': {'http': lambda n: 3,
+ 'fragment': lambda n: 3,
+ 'file_access': lambda n: 3,
+ 'extractor': lambda n: 3}}
@property
def download_speed(self):
@@ -151,11 +150,10 @@ def extractMetaData(self, link, name):
self.__size += entry['filesize_approx']
elif 'filesize' in entry:
self.__size += entry['filesize']
- if not name:
+ if not self.name:
outtmpl_ = '%(series,playlist_title,channel)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d.%(ext)s'
- name, ext = ospath.splitext(
+ self.name, ext = ospath.splitext(
ydl.prepare_filename(entry, outtmpl=outtmpl_))
- self.name = name
if not self.__ext:
self.__ext = ext
else:
@@ -194,18 +192,19 @@ async def add_download(self, link, path, name, qual, playlist, options):
self.opts['ignoreerrors'] = True
self.is_playlist = True
- self.__gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=10))
-
+ self.__gid = token_hex(5)
await self.__onDownloadStart()
- self.opts['postprocessors'] = [{'add_chapters': True, 'add_infojson': 'if_exists', 'add_metadata': True, 'key': 'FFmpegMetadata'}]
+ self.opts['postprocessors'] = [
+ {'add_chapters': True, 'add_infojson': 'if_exists', 'add_metadata': True, 'key': 'FFmpegMetadata'}]
if qual.startswith('ba/b-'):
audio_info = qual.split('-')
qual = audio_info[0]
audio_format = audio_info[1]
rate = audio_info[2]
- self.opts['postprocessors'].append({'key': 'FFmpegExtractAudio', 'preferredcodec': audio_format, 'preferredquality': rate})
+ self.opts['postprocessors'].append(
+ {'key': 'FFmpegExtractAudio', 'preferredcodec': audio_format, 'preferredquality': rate})
if audio_format == 'vorbis':
self.__ext = '.ogg'
elif audio_format == 'alac':
@@ -225,7 +224,8 @@ async def add_download(self, link, path, name, qual, playlist, options):
base_name, ext = ospath.splitext(self.name)
trim_name = self.name if self.is_playlist else base_name
if len(trim_name.encode()) > 200:
- self.name = self.name[:200] if self.is_playlist else f'{base_name[:200]}{ext}'
+ self.name = self.name[:
+ 200] if self.is_playlist else f'{base_name[:200]}{ext}'
base_name = ospath.splitext(self.name)[0]
if self.is_playlist:
@@ -237,18 +237,20 @@ async def add_download(self, link, path, name, qual, playlist, options):
else:
self.opts['outtmpl'] = {'default': f"{path}/{self.name}",
'thumbnail': f"{path}/yt-dlp-thumb/{base_name}.%(ext)s"}
- self.name = base_name
+
+ if qual.startswith('ba/b'):
+ self.name = f'{base_name}{self.__ext}'
if self.__listener.isLeech:
self.opts['postprocessors'].append(
{'format': 'jpg', 'key': 'FFmpegThumbnailsConvertor', 'when': 'before_dl'})
- if self.__ext in ['.mp3', '.mkv', '.mka', '.ogg', '.opus', '.flac', '.m4a', '.mp4', '.mov']:
+ if self.__ext in ['.mp3', '.mkv', '.mka', '.ogg', '.opus', '.flac', '.m4a', '.mp4', '.mov', 'm4v']:
self.opts['postprocessors'].append(
{'already_have_thumbnail': self.__listener.isLeech, 'key': 'EmbedThumbnail'})
elif not self.__listener.isLeech:
self.opts['writethumbnail'] = False
- msg, button = await stop_duplicate_check(name, self.__listener)
+ msg, button = await stop_duplicate_check(self.name, self.__listener)
if msg:
await self.__listener.onDownloadError(msg, button)
return
@@ -285,11 +287,13 @@ def __set_options(self, options):
options = options.split('|')
for opt in options:
key, value = map(str.strip, opt.split(':', 1))
+ if key == 'format' and value.startswith('ba/b-'):
+ continue
if value.startswith('^'):
if '.' in value or value == '^inf':
- value = float(value.split('^')[1])
+ value = float(value.split('^', 1)[1])
else:
- value = int(value.split('^')[1])
+ value = int(value.split('^', 1)[1])
elif value.lower() == 'true':
value = True
elif value.lower() == 'false':
diff --git a/bot/helper/mirror_utils/rclone_utils/serve.py b/bot/helper/mirror_utils/rclone_utils/serve.py
index 2f1f4f9c8b..8b25c32a88 100644
--- a/bot/helper/mirror_utils/rclone_utils/serve.py
+++ b/bot/helper/mirror_utils/rclone_utils/serve.py
@@ -7,7 +7,6 @@
RcloneServe = []
-
async def rclone_serve_booter():
if not config_dict['RCLONE_SERVE_URL'] or not await aiopath.exists('rclone.conf'):
if RcloneServe:
diff --git a/bot/helper/mirror_utils/status_utils/ddl_status.py b/bot/helper/mirror_utils/status_utils/ddl_status.py
index eafa18ab5c..20cb76e2f8 100644
--- a/bot/helper/mirror_utils/status_utils/ddl_status.py
+++ b/bot/helper/mirror_utils/status_utils/ddl_status.py
@@ -16,7 +16,7 @@ def size(self):
return get_readable_file_size(self.__size)
def status(self):
- return MirrorStatus.STATUS_UPLOADDDL
+ return MirrorStatus.STATUS_UPLOADING
def name(self):
return self.__obj.name
diff --git a/bot/helper/mirror_utils/status_utils/direct_status.py b/bot/helper/mirror_utils/status_utils/direct_status.py
new file mode 100644
index 0000000000..bab1eea4d4
--- /dev/null
+++ b/bot/helper/mirror_utils/status_utils/direct_status.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python3
+
+from bot.helper.ext_utils.bot_utils import (EngineStatus, MirrorStatus,
+ get_readable_file_size,
+ get_readable_time)
+
+
+
+class DirectStatus:
+ def __init__(self, obj, gid, listener, upload_details):
+ self.__gid = gid
+ self.__listener = listener
+ self.__obj = obj
+ self.__name = self.__obj.name
+ self.upload_details = upload_details
+ self.message = self.__listener.message
+
+ def gid(self):
+ return self.__gid
+
+ def speed_raw(self):
+ return self.__obj.speed
+
+ def progress_raw(self):
+ try:
+ return self.processed_raw() / self.__obj.total_size * 100
+ except:
+ return 0
+
+ def progress(self):
+ return f'{round(self.progress_raw(), 2)}%'
+
+ def speed(self):
+ return f'{get_readable_file_size(self.speed_raw())}/s'
+
+ def name(self):
+ return self.__name
+
+ def size(self):
+ return get_readable_file_size(self.__obj.total_size)
+
+ def eta(self):
+ try:
+ seconds = (self.__obj.total_size - self.processed_raw()) / self.speed_raw()
+ return get_readable_time(seconds)
+ except:
+ return '-'
+
+ def status(self):
+ return MirrorStatus.STATUS_DOWNLOADING
+
+ def processed_bytes(self):
+ return get_readable_file_size(self.processed_raw())
+
+ def processed_raw(self):
+ return self.__obj.processed_bytes
+
+ def download(self):
+ return self.__obj
+
+ def eng(self):
+ return EngineStatus().STATUS_ARIA
diff --git a/bot/helper/mirror_utils/upload_utils/ddlEngine.py b/bot/helper/mirror_utils/upload_utils/ddlEngine.py
index 3a7f3e2552..39595a1d88 100644
--- a/bot/helper/mirror_utils/upload_utils/ddlEngine.py
+++ b/bot/helper/mirror_utils/upload_utils/ddlEngine.py
@@ -1,98 +1,130 @@
#!/usr/bin/env python3
-import asyncio
+from pathlib import Path
+from traceback import format_exc
+from json import JSONDecodeError
+from io import BufferedReader
from re import findall as re_findall
-from os import path as ospath
+from aiofiles.os import path as aiopath
from time import time
+from tenacity import retry, wait_exponential, stop_after_attempt, retry_if_exception_type
+from aiohttp import ClientSession
+from aiohttp.client_exceptions import ContentTypeError
from bot import LOGGER, user_data
+from bot.helper.mirror_utils.upload_utils.ddlserver.gofile import Gofile
+from bot.helper.mirror_utils.upload_utils.ddlserver.streamtape import Streamtape
from bot.helper.ext_utils.fs_utils import get_mime_type
-from bot.helper.ext_utils.bot_utils import setInterval
-from bot.helper.mirror_utils.upload_utils.ddlserver.gofile import Async_Gofile
-class DDLUploader:
- def __init__(self, name=None, path=None, listener=None):
+class ProgressFileReader(BufferedReader):
+ def __init__(self, filename, read_callback=None):
+ super().__init__(open(filename, "rb"))
+ self.__read_callback = read_callback
+ self.length = Path(filename).stat().st_size
+
+ def read(self, size=None):
+ size = size or (self.length - self.tell())
+ if self.__read_callback:
+ self.__read_callback(self.tell())
+ return super().read(size)
+
+
+class DDLUploader:
+ def __init__(self, listener=None, name=None, path=None):
self.name = name
self.__processed_bytes = 0
+ self.last_uploaded = 0
self.__listener = listener
self.__path = path
- self.__updater = None
self.__start_time = time()
- self.__total_files = 0
- self.__total_folders = 0
- self.__is_cancelled = False
+ self.total_files = 0
+ self.total_folders = 0
+ self.is_cancelled = False
self.__is_errored = False
self.__ddl_servers = {}
- self.__engine = ''
- self.__total_time = 0
- self.__update_interval = 3
+ self.__engine = 'DDL v1'
+ self.__asyncSession = None
self.__user_id = self.__listener.message.from_user.id
async def __user_settings(self):
user_dict = user_data.get(self.__user_id, {})
self.__ddl_servers = user_dict.get('ddl_servers', {})
- async def __progress(self):
- if self.__updater is not None:
- self.__processed_bytes += self.__updater.interval
-
- async def __upload_to_gofile(self, file_path, token):
- gf = Async_Gofile(token=token)
- if ospath.isfile(file_path):
- cmd = await gf.upload(file=file_path)
- elif ospath.isdir(file_path):
- cmd = await gf.upload_folder(path=file_path)
- if cmd and 'parentFolder' in cmd:
- await gf.set_option(contentId=cmd['parentFolder'], option="public", value="true")
- if cmd and 'downloadPage' in cmd:
- return cmd['downloadPage']
- raise Exception("Failed to upload file/folder")
+ def __progress_callback(self, current):
+ chunk_size = current - self.last_uploaded
+ self.last_uploaded = current
+ self.__processed_bytes += chunk_size
+
+ @retry(wait=wait_exponential(multiplier=2, min=4, max=8), stop=stop_after_attempt(3),
+ retry=retry_if_exception_type(Exception))
+ async def upload_aiohttp(self, url, file_path, req_file, data):
+ with ProgressFileReader(filename=file_path, read_callback=self.__progress_callback) as file:
+ data[req_file] = file
+ async with ClientSession() as self.__asyncSession:
+ async with self.__asyncSession.post(url, data=data) as resp:
+ if resp.status == 200:
+ try:
+ return await resp.json()
+ except ContentTypeError:
+ return "Uploaded"
+ except JSONDecodeError:
+ return None
async def __upload_to_ddl(self, file_path):
+ all_links = {}
for serv, (enabled, api_key) in self.__ddl_servers.items():
if enabled:
+ self.total_files = 0
+ self.total_folders = 0
if serv == 'gofile':
self.__engine = 'GoFile API'
- return await self.__upload_to_gofile(file_path, api_key)
- elif serv == 'streamsb':
- self.__engine = 'StreamSB API'
- # return await self.__upload_to_streamsb(file_path, api_key)
- raise Exception("No DDL Enabled to Upload.")
+ nlink = await Gofile(self, api_key).upload(file_path)
+ all_links['GoFile'] = nlink
+ if serv == 'streamtape':
+ self.__engine = 'StreamTape API'
+ try:
+ login, key = api_key.split(':')
+ except IndexError:
+ raise Exception("StreamTape Login & Key not Found, Kindly Recheck !")
+ nlink = await Streamtape(self, login, key).upload(file_path)
+ all_links['StreamTape'] = nlink
+ self.__processed_bytes = 0
+ if not all_links:
+ raise Exception("No DDL Enabled to Upload.")
+ return all_links
async def upload(self, file_name, size):
item_path = f"{self.__path}/{file_name}"
LOGGER.info(f"Uploading: {item_path} via DDL")
- self.__updater = setInterval(self.__update_interval, self.__progress)
await self.__user_settings()
try:
- if ospath.isfile(item_path):
+ if await aiopath.isfile(item_path):
mime_type = get_mime_type(item_path)
- link = await self.__upload_to_ddl(item_path)
- if self.__is_cancelled:
- return
- if link is None:
- raise Exception('Upload has been manually cancelled')
- LOGGER.info(f"Uploaded To DDL: {item_path}")
else:
mime_type = 'Folder'
- link = await self.__upload_to_ddl(item_path)
- if link is None:
- raise Exception('Upload has been manually cancelled!')
- if self.__is_cancelled:
- return
- LOGGER.info(f"Uploaded To DDL: {file_name}")
+ link = await self.__upload_to_ddl(item_path)
+ if link is None:
+ raise Exception('Upload has been manually cancelled!')
+ if self.is_cancelled:
+ return
+ LOGGER.info(f"Uploaded To DDL: {item_path}")
except Exception as err:
- LOGGER.info(f"DDL Upload has been Cancelled")
+ LOGGER.info("DDL Upload has been Cancelled")
+ if self.__asyncSession:
+ await self.__asyncSession.close()
+ err = str(err).replace('>', '').replace('<', '')
+ LOGGER.info(format_exc())
+ await self.__listener.onUploadError(err)
self.__is_errored = True
finally:
- if self.__is_cancelled or self.__is_errored:
+ if self.is_cancelled or self.__is_errored:
return
- await self.__listener.onUploadComplete(link, size, self.__total_files, self.__total_folders, mime_type, file_name)
+ await self.__listener.onUploadComplete(link, size, self.total_files, self.total_folders, mime_type, file_name)
@property
def speed(self):
try:
- return self.__processed_bytes / self.__total_time
+ return self.__processed_bytes / int(time() - self.__start_time)
except ZeroDivisionError:
return 0
@@ -105,6 +137,8 @@ def engine(self):
return self.__engine
async def cancel_download(self):
- self.__is_cancelled = True
+ self.is_cancelled = True
LOGGER.info(f"Cancelling Upload: {self.name}")
+ if self.__asyncSession:
+ await self.__asyncSession.close()
await self.__listener.onUploadError('Your upload has been stopped!')
diff --git a/bot/helper/mirror_utils/upload_utils/ddlserver/gofile.py b/bot/helper/mirror_utils/upload_utils/ddlserver/gofile.py
index 1001de6366..8938710552 100644
--- a/bot/helper/mirror_utils/upload_utils/ddlserver/gofile.py
+++ b/bot/helper/mirror_utils/upload_utils/ddlserver/gofile.py
@@ -1,209 +1,175 @@
-import os
-
+#!/usr/bin/env python3
+from os import path as ospath, walk
+from aiofiles.os import path as aiopath, rename as aiorename
from asyncio import sleep
from aiohttp import ClientSession
-from bot.helper.ext_utils.bot_utils import is_valid_token
-
+from bot import LOGGER
+from bot.helper.ext_utils.bot_utils import sync_to_async
-class Async_Gofile:
- def __init__(self, token=None):
+class Gofile:
+ def __init__(self, dluploader=None, token=None):
self.api_url = "https://api.gofile.io/"
+ self.dluploader = dluploader
self.token = token
- if self.token is not None:
- is_valid_token(url=self.api_url, token=self.token)
- async def _api_resp_handler(self, response):
- api_status = response["status"]
- if api_status == "ok":
+ @staticmethod
+ async def is_goapi(token):
+ if token is None:
+ return
+ async with ClientSession() as session:
+ async with session.get(f"https://api.gofile.io/getAccountDetails?token={token}&allDetails=true") as resp:
+ if (await resp.json())["status"] == "ok":
+ return True
+ return False
+
+ async def __resp_handler(self, response):
+ api_resp = response.get("status", "")
+ if api_resp == "ok":
return response["data"]
- else:
- if "error-" in response["status"]:
- error = response["status"].split("-")[1]
- else:
- error = "Response Status is not ok and reason is unknown"
- raise Exception(error)
+ raise Exception(api_resp.split("-")[1] if "error-" in api_resp else "Response Status is not ok and Reason is Unknown")
- async def get_Server(self, pre_session=None):
- if pre_session:
- server_resp = await pre_session.get(f"{self.api_url}getServer")
- server_resp = await server_resp.json()
- return await self._api_resp_handler(server_resp)
- else:
- async with ClientSession() as session:
- try:
- server_resp = await session.get(f"{self.api_url}getServer")
- server_resp = await server_resp.json()
- return await self._api_resp_handler(server_resp)
- except Exception as e:
- raise Exception(e)
+ async def __getServer(self):
+ async with ClientSession() as session:
+ async with session.get(f"{self.api_url}getServer") as resp:
+ return await self.__resp_handler(await resp.json())
- async def get_Account(self, check_account=False):
+ async def __getAccount(self, check_account=False):
if self.token is None:
raise Exception()
+
+ api_url = f"{self.api_url}getAccountDetails?token={self.token}&allDetails=true"
async with ClientSession() as session:
- try:
- get_account_resp = await session.get(url=f"{self.api_url}getAccountDetails?token={self.token}&allDetails=true")
- get_account_resp = await get_account_resp.json()
- if check_account is True:
- if get_account_resp["status"] == "ok":
- return True
- elif get_account_resp["status"] == "error-wrongToken":
- return False
- else:
- return await self._api_resp_handler(get_account_resp)
- else:
- return await self._api_resp_handler(get_account_resp)
- except Exception as e:
- raise Exception(e)
-
- async def upload_folder(self, path: str, folderId: str = "", delay: int = 2):
- if not os.path.isdir(path):
- raise Exception(f"{path} is not a valid directory")
-
- folder_name = os.path.basename(path)
- if not folderId:
- account_data = await self.get_Account()
- rtfid = account_data["rootFolder"]
- folder_data = await self.create_folder(rtfid, folder_name)
- folderId = folder_data["id"]
-
- uploaded = None
- folder_ids = {".": folderId} # Dictionary to store created folder IDs
- for root, dirs, files in os.walk(path):
- relative_path = os.path.relpath(root, path)
- if relative_path == ".":
- current_folder_id = folderId
+ resp = await (await session.get(url=api_url)).json()
+ if check_account:
+ return resp["status"] == "ok" if True else await self.__resp_handler(resp)
else:
- parent_folder_id = folder_ids.get(os.path.dirname(relative_path), folderId)
- folder_name = os.path.basename(relative_path)
- folder_data = await self.create_folder(parent_folder_id, folder_name)
- current_folder_id = folder_data["id"]
- folder_ids[relative_path] = current_folder_id
+ return await self.__resp_handler(resp)
+
+ async def upload_folder(self, path, folderId=None):
+ if not await aiopath.isdir(path):
+ raise Exception(f"Path: {path} is not a valid directory")
- for file in files:
- file_path = os.path.join(root, file)
- udt = await self.upload(file_path, current_folder_id)
- if uploaded is None:
- uploaded = udt
- await sleep(delay)
- return uploaded
-
- async def upload(self, file: str, folderId: str = "", description: str = "", password: str = "", tags: str = "", expire: str = ""):
- async with ClientSession() as session:
- # Check time
- if password and len(password) < 4:
- raise ValueError("Password Length must be greater than 4")
-
- server = await self.get_Server(pre_session=session)
- server = server["server"]
- token = self.token if self.token else ""
+ folder_data = await self.create_folder((await self.__getAccount())["rootFolder"], ospath.basename(path))
+ await self.__setOptions(contentId=folder_data["id"], option="public", value="true")
+
+ folderId = folderId or folder_data["id"]
+ folder_ids = {".": folderId}
+ for root, _, files in await sync_to_async(walk, path):
+ rel_path = ospath.relpath(root, path)
+ parentFolderId = folder_ids.get(ospath.dirname(rel_path), folderId)
+ folder_name = ospath.basename(rel_path)
+ currFolderId = (await self.create_folder(parentFolderId, folder_name))["id"]
+ await self.__setOptions(contentId=currFolderId, option="public", value="true")
+ folder_ids[rel_path] = currFolderId
- # Making dict
- req_dict = {}
- if token:
- req_dict["token"] = token
- if folderId:
- req_dict["folderId"] = folderId
- if description:
- req_dict["description"] = description
- if password:
- req_dict["password"] = password
- if tags:
- req_dict["tags"] = tags
- if expire:
- req_dict["expire"] = expire
-
- with open(file, "rb") as go_file_d:
- req_dict["file"] = go_file_d
- upload_file = await session.post(
- url=f"https://{server}.gofile.io/uploadFile",
- data=req_dict
- )
- upload_file = await upload_file.json()
- return await self._api_resp_handler(upload_file)
+ for file in files:
+ file_path = ospath.join(root, file)
+ up = await self.upload_file(file_path, currFolderId)
+
+ return folder_data["code"]
+
+ async def upload_file(self, path: str, folderId: str = "", description: str = "", password: str = "", tags: str = "", expire: str = ""):
+ if password and len(password) < 4:
+ raise ValueError("Password Length must be greater than 4")
+
+ server = (await self.__getServer())["server"]
+ token = self.token if self.token else ""
+ req_dict = {}
+ if token:
+ req_dict["token"] = token
+ if folderId:
+ req_dict["folderId"] = folderId
+ if description:
+ req_dict["description"] = description
+ if password:
+ req_dict["password"] = password
+ if tags:
+ req_dict["tags"] = tags
+ if expire:
+ req_dict["expire"] = expire
+
+ if self.dluploader.is_cancelled:
+ return
+ new_path = ospath.join(ospath.dirname(path), ospath.basename(path).replace(' ', '.'))
+ await aiorename(path, new_path)
+ self.dluploader.last_uploaded = 0
+ upload_file = await self.dluploader.upload_aiohttp(f"https://{server}.gofile.io/uploadFile", new_path, "file", req_dict)
+ return await self.__resp_handler(upload_file)
+
+ async def upload(self, file_path):
+ if not await self.is_goapi(self.token):
+ raise Exception("Invalid Gofile API Key, Recheck your account !!")
+ if await aiopath.isfile(file_path):
+ if (gCode := await self.upload_file(path=file_path)) and gCode.get("downloadPage", False):
+ return gCode['downloadPage']
+ elif await aiopath.isdir(file_path):
+ if (gCode := await self.upload_folder(path=file_path)):
+ return f"https://gofile.io/d/{gCode}"
+ if self.dluploader.is_cancelled:
+ return
+ raise Exception("Failed to upload file/folder to Gofile API, Retry or Try after sometimes...")
async def create_folder(self, parentFolderId, folderName):
if self.token is None:
raise Exception()
+
async with ClientSession() as session:
- try:
- folder_resp = await session.put(
- url=f"{self.api_url}createFolder",
- data={
+ async with session.put(url=f"{self.api_url}createFolder",
+ data={
"parentFolderId": parentFolderId,
"folderName": folderName,
"token": self.token
}
- )
- folder_resp = await folder_resp.json()
- return await self._api_resp_handler(folder_resp)
- except Exception as e:
- raise Exception(e)
+ ) as resp:
+ return await self.__resp_handler(await resp.json())
- async def set_option(self, contentId, option, value):
+ async def __setOptions(self, contentId, option, value):
if self.token is None:
raise Exception()
+
if not option in ["public", "password", "description", "expire", "tags"]:
- raise Exception(option)
+ raise Exception(f"Invalid GoFile Option Specified : {option}")
async with ClientSession() as session:
- try:
- set_resp = await session.put(
- url=f"{self.api_url}setOption",
- data={
+ async with session.put(url=f"{self.api_url}setOption",
+ data={
"token": self.token,
"contentId": contentId,
"option": option,
"value": value
}
- )
- set_resp = await set_resp.json()
- return await self._api_resp_handler(set_resp)
- except Exception as e:
- raise Exception(e)
+ ) as resp:
+ return await self.__resp_handler(await resp.json())
async def get_content(self, contentId):
if self.token is None:
raise Exception()
+
async with ClientSession() as session:
- try:
- get_content_resp = await session.get(url=f"{self.api_url}getContent?contentId={contentId}&token={self.token}")
- get_content_resp = await get_content_resp.json()
- return await self._api_resp_handler(get_content_resp)
- except Exception as e:
- raise Exception(e)
+ async with session.get(url=f"{self.api_url}getContent?contentId={contentId}&token={self.token}") as resp:
+ return await self.__resp_handler(await resp.json())
async def copy_content(self, contentsId, folderIdDest):
if self.token is None:
raise Exception()
async with ClientSession() as session:
- try:
- copy_content_resp = await session.put(
- url=f"{self.api_url}copyContent",
+ async with session.put(url=f"{self.api_url}copyContent",
data={
"token": self.token,
"contentsId": contentsId,
"folderIdDest": folderIdDest
}
- )
- copy_content_resp = await copy_content_resp.json()
- return await self._api_resp_handler(copy_content_resp)
- except Exception as e:
- raise Exception(e)
+ ) as resp:
+ return await self.__resp_handler(await resp.json())
async def delete_content(self, contentId):
if self.token is None:
raise Exception()
async with ClientSession() as session:
- try:
- del_content_resp = await session.delete(
- url=f"{self.api_url}deleteContent",
+ async with session.delete(url=f"{self.api_url}deleteContent",
data={
"contentId": contentId,
"token": self.token
}
- )
- del_content_resp = await del_content_resp.json()
- return await self._api_resp_handler(del_content_resp)
- except Exception as e:
- raise Exception(e)
+ ) as resp:
+ return await self.__resp_handler(await resp.json())
diff --git a/bot/helper/mirror_utils/upload_utils/ddlserver/streamtape.py b/bot/helper/mirror_utils/upload_utils/ddlserver/streamtape.py
new file mode 100644
index 0000000000..82989a388c
--- /dev/null
+++ b/bot/helper/mirror_utils/upload_utils/ddlserver/streamtape.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python3
+from pathlib import Path
+
+from aiofiles.os import scandir, path as aiopath
+from aiofiles import open as aiopen
+from aiohttp import ClientSession
+
+from bot import config_dict, LOGGER
+from bot.helper.ext_utils.telegraph_helper import telegraph
+
+ALLOWED_EXTS = [
+ '.avi', '.mkv', '.mpg', '.mpeg', '.vob', '.wmv', '.flv', '.mp4', '.mov', '.m4v',
+ '.m2v', '.divx', '.3gp', '.webm', '.ogv', '.ogg', '.ts', '.ogm'
+]
+
+class Streamtape:
+ def __init__(self, dluploader, login, key):
+ self.__userLogin = login
+ self.__passKey = key
+ self.dluploader = dluploader
+ self.base_url = 'https://api.streamtape.com'
+
+ async def __getAccInfo(self):
+ async with ClientSession() as session, session.get(f"{self.base_url}/account/info?login={self.__userLogin}&key={self.__passKey}") as response:
+ if response.status == 200:
+ if (data := await response.json()) and data["status"] == 200:
+ return data["result"]
+ return None
+
+ async def __getUploadURL(self, folder=None, sha256=None, httponly=False):
+ _url = f"{self.base_url}/file/ul?login={self.__userLogin}&key={self.__passKey}"
+ if folder is not None:
+ _url += f"&folder={folder}"
+ if sha256 is not None:
+ _url += f"&sha256={sha256}"
+ if httponly:
+ _url += "&httponly=true"
+ async with ClientSession() as session:
+ async with session.get(_url) as response:
+ if response.status == 200:
+ data = await response.json()
+ if (data := await response.json()) and data["status"] == 200:
+ return data["result"]
+ return None
+
+ async def upload_file(self, file_path, folder_id=None, sha256=None, httponly=False):
+ if Path(file_path).suffix.lower() not in ALLOWED_EXTS:
+ return f"Skipping '{file_path}' due to disallowed extension."
+ file_name = Path(file_path).name
+ if not folder_id:
+ genfolder = await self.create_folder(file_name.rsplit(".", 1)[0])
+ if genfolder is None:
+ return None
+ folder_id = genfolder["folderid"]
+ upload_info = await self.__getUploadURL(folder=folder_id, sha256=sha256, httponly=httponly)
+ if upload_info is None:
+ return None
+ if self.dluploader.is_cancelled:
+ return
+ self.dluploader.last_uploaded = 0
+ uploaded = await self.dluploader.upload_aiohttp(upload_info["url"], file_path, file_name, {})
+ if uploaded:
+ file_id = (await self.list_folder(folder=folder_id))['files'][0]['linkid']
+ await self.rename(file_id, file_name)
+ return f"https://streamtape.to/v/{file_id}"
+ return None
+
+ async def create_folder(self, name, parent=None):
+ exfolders = [folder["name"] for folder in (await self.list_folder(folder=parent) or {"folders": []})["folders"]]
+ if name in exfolders:
+ i = 1
+ while f"{i} {name}" in exfolders:
+ i += 1
+ name = f"{i} {name}"
+
+ url = f"{self.base_url}/file/createfolder?login={self.__userLogin}&key={self.__passKey}&name={name}"
+ if parent is not None:
+ url += f"&pid={parent}"
+ async with ClientSession() as session, session.get(url) as response:
+ if response.status == 200:
+ data = await response.json()
+ if data.get("status") == 200:
+ return data.get("result")
+ return None
+
+ async def rename(self, file_id, name):
+ url = f"{self.base_url}/file/rename?login={self.__userLogin}&key={self.__passKey}&file={file_id}&name={name}"
+ async with ClientSession() as session, session.get(url) as response:
+ if response.status == 200:
+ data = await response.json()
+ if data.get("status") == 200:
+ return data.get("result")
+ return None
+
+ async def list_telegraph(self, folder_id, nested=False):
+ tg_html = ""
+ contents = await self.list_folder(folder_id)
+ for fid in contents['folders']:
+ tg_html += f"{finfo['name']}
{token}
+Validity: {validity}'''
# ---------------------
+ # async def token_callback(_, query): ---> __main__.py
+ ACTIVATED = 'β
οΈ Activated β
'
+ # ---------------------
+ # async def login(_, message): --> __main__.py
+ LOGGED_IN = 'Already Bot Login In!'
+ INVALID_PASS = 'Invalid Password!\n\nKindly put the correct Password .'
+ PASS_LOGGED = 'Bot Permanent Login Successfully!'
+ LOGIN_USED = 'Bot Login Usage :\n\n/cmd [password]
'
+ # ---------------------
+ # async def log(_, message): ---> __main__.py
+ LOG_DISPLAY_BT = 'π Log Display'
+ WEB_PASTE_BT = 'π¨ Web Paste (SB)'
+ # ---------------------
+ # async def bot_help(client, message): ---> __main__.py
+ BASIC_BT = 'Basic'
+ USER_BT = 'Users'
+ MICS_BT = 'Mics'
+ O_S_BT = 'Owner & Sudos'
+ CLOSE_BT = 'Close'
+ HELP_HEADER = "γ Help Guide Menu!\n\nNOTE: Click on any CMD to see more minor detalis."
# async def stats(client, message):
BOT_STATS = '''β¬ BOT STATISTICS :
@@ -120,7 +147,8 @@ class WZMLStyle:
L_CC = 'β By: {Tag}\n\n'
PM_BOT_MSG = 'β² File(s) have been Sent above'
L_BOT_MSG = 'β² File(s) have been Sent to Bot PM (Private)'
- L_LL_MSG = 'β² File(s) have been Sent. Access via Links...'
+ L_LL_MSG = 'β² File(s) have been Sent. Access via Links...\n'
+ L_PM_WARN = 'β² BOT PM is Off turn it ON to get the Leech Index Link'
# ----- MIRROR -------
M_TYPE = 'β Type: {Mimetype}\n'
@@ -129,7 +157,7 @@ class WZMLStyle:
RCPATH = 'β Path: {RCpath}
\n'
M_CC = 'β By: {Tag}\n\n'
M_BOT_MSG = 'β² Link(s) have been Sent to Bot PM (Private)'
-
+ M_PM_WARN = 'β² BOT PM is Off turn it ON to get the Mirror Link'
# ----- BUTTONS -------
CLOUD_LINK = 'βοΈ Cloud Link'
SAVE_MSG = 'π¨ Save Message'
@@ -142,6 +170,7 @@ class WZMLStyle:
CHECK_PM = 'π₯ View in Bot PM'
CHECK_LL = 'π View in Links Log'
MEDIAINFO_LINK = 'π MediaInfo'
+ SCREENSHOTS = 'πΌ ScreenShots'
# ---------------------
# def get_readable_message(): ---> bot_utilis.py
diff --git a/bot/modules/anilist.py b/bot/modules/anilist.py
index cae1b37332..83c5e25e64 100644
--- a/bot/modules/anilist.py
+++ b/bot/modules/anilist.py
@@ -207,24 +207,31 @@ async def anilist(_, msg, aniid=None, u_id=None):
else:
user_id = int(u_id)
vars = {'id' : aniid}
- animeResp = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': vars}).json()['data'].get('Media', None)
- if animeResp:
+ if (
+ animeResp := rpost(
+ url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': vars}
+ )
+ .json()['data']
+ .get('Media', None)
+ ):
ro_title = animeResp['title']['romaji']
na_title = animeResp['title']['native']
en_title = animeResp['title']['english']
- format = animeResp['format']
- if format: format = format.capitalize()
- status = animeResp['status']
- if status: status = status.capitalize()
+ if format := animeResp['format']:
+ format = format.capitalize()
+ if status := animeResp['status']:
+ status = status.capitalize()
year = animeResp['seasonYear'] or 'N/A'
try:
sd = animeResp['startDate']
if sd['day'] and sd['year']: startdate = f"{month_name[sd['month']]} {sd['day']}, {sd['year']}"
- except: startdate = ""
+ except Exception:
+ startdate = ""
try:
ed = animeResp['endDate']
if ed['day'] and ed['year']: enddate = f"{month_name[ed['month']]} {ed['day']}, {ed['year']}"
- except: enddate = ""
+ except Exception:
+ enddate = ""
season = f"{animeResp['season'].capitalize()} {animeResp['seasonYear']}"
conname = (conn.get(alpha_2=animeResp['countryOfOrigin'])).name
try:
@@ -235,13 +242,14 @@ async def anilist(_, msg, aniid=None, u_id=None):
episodes = animeResp.get('episodes', 'N/A')
try:
duration = f"{get_readable_time(animeResp['duration']*60)}"
- except: duration = "N/A"
+ except Exception:
+ duration = "N/A"
avgscore = f"{animeResp['averageScore']}%" or ''
genres = ", ".join(f"{GENRES_EMOJI[x]} #{x.replace(' ', '_').replace('-', '_')}" for x in animeResp['genres'])
studios = ", ".join(f"""{x['name']}""" for x in animeResp['studios']['nodes'])
source = animeResp['source'] or '-'
hashtag = animeResp['hashtag'] or 'N/A'
- synonyms = ", ".join(x for x in animeResp['synonyms']) or ''
+ synonyms = ", ".join(animeResp['synonyms']) or ''
siteurl = animeResp.get('siteUrl')
trailer = animeResp.get('trailer', None)
if trailer and trailer.get('site') == "youtube":
@@ -278,11 +286,10 @@ async def anilist(_, msg, aniid=None, u_id=None):
LOGGER.error(f"AniList Error: {e}")
if aniid:
return template, btns.build_menu(3)
- else:
- try:
- await sendMessage(msg, template, btns.build_menu(3), photo=title_img)
- except:
- await sendMessage(msg, template, btns.build_menu(3), photo='https://te.legra.ph/file/8a5155c0fc61cc2b9728c.jpg')
+ try:
+ await sendMessage(msg, template, btns.build_menu(3), photo=title_img)
+ except Exception:
+ await sendMessage(msg, template, btns.build_menu(3), photo='https://te.legra.ph/file/8a5155c0fc61cc2b9728c.jpg')
async def setAnimeButtons(client, query):
@@ -299,30 +306,41 @@ async def setAnimeButtons(client, query):
await query.answer()
if data[2] == "tags":
aniTag = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': {'id' : siteid}}).json()['data'].get('Media', None)
- msg = "Tags :\n\n"
- msg += "\n".join(f"""{x['name']} {x['rank']}%""" for x in aniTag['tags'])
+ msg = "Tags :\n\n" + "\n".join(
+ f"""{x['name']} {x['rank']}%"""
+ for x in aniTag['tags']
+ )
elif data[2] == "sts":
links = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': {'id' : siteid}}).json()['data'].get('Media', None)
- msg = "External & Streaming Links :\n\n"
- msg += "\n".join(f"""{x['site']}""" for x in links['externalLinks'])
+ msg = "External & Streaming Links :\n\n" + "\n".join(
+ f"""{x['site']}"""
+ for x in links['externalLinks']
+ )
elif data[2] == "rev":
animeResp = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': {'id' : siteid}}).json()['data'].get('Media', None)
- msg = "Reviews :\n\n"
reList = animeResp['reviews']['nodes']
- msg += "\n\n".join(f"""{x['summary']}\nScore : {x['score']} / 100
\nBy {x['user']['name']}""" for x in reList[:8])
+ msg = "Reviews :\n\n" + "\n\n".join(
+ f"""{x['summary']}\nScore : {x['score']} / 100
\nBy {x['user']['name']}"""
+ for x in reList[:8]
+ )
elif data[2] == "rel":
animeResp = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': {'id' : siteid}}).json()['data'].get('Media', None)
- msg = "Relations :\n\n"
- msg += "\n\n".join(f"""{x['node']['title']['english']} ({x['node']['title']['romaji']})\nFormat: {x['node']['format'].capitalize()}
\nStatus: {x['node']['status'].capitalize()}
\nAverage Score: {x['node']['averageScore']}%
\nSource: {x['node']['source'].capitalize()}
\nRelation Type: {x.get('relationType', 'N/A').capitalize()}
""" for x in animeResp['relations']['edges'])
+ msg = "Relations :\n\n" + "\n\n".join(
+ f"""{x['node']['title']['english']} ({x['node']['title']['romaji']})\nFormat: {x['node']['format'].capitalize()}
\nStatus: {x['node']['status'].capitalize()}
\nAverage Score: {x['node']['averageScore']}%
\nSource: {x['node']['source'].capitalize()}
\nRelation Type: {x.get('relationType', 'N/A').capitalize()}
"""
+ for x in animeResp['relations']['edges']
+ )
elif data[2] == "cha":
animeResp = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': {'id' : siteid}}).json()['data'].get('Media', None)
- msg = "List of Characters :\n\n"
- msg += "\n\n".join(f"""β’ {x['node']['name']['full']} ({x['node']['name']['native']})\nRole : {x['role'].capitalize()}""" for x in (animeResp['characters']['edges'])[:8])
+ msg = "List of Characters :\n\n" + "\n\n".join(
+ f"""β’ {x['node']['name']['full']} ({x['node']['name']['native']})\nRole : {x['role'].capitalize()}"""
+ for x in (animeResp['characters']['edges'])[:8]
+ )
elif data[2] == "home":
msg, btns = await anilist(client, message, siteid, data[1])
await editMessage(message, msg, btns)
return
await editMessage(message, msg, btns.build_menu(1))
+ return
async def character(_, message, aniid=None, u_id=None):
@@ -338,8 +356,11 @@ async def character(_, message, aniid=None, u_id=None):
else:
vars = {'id': aniid}
user_id = int(u_id)
- json = rpost(url, json={'query': character_query, 'variables': vars}).json()['data'].get('Character', None)
- if json:
+ if (
+ json := rpost(url, json={'query': character_query, 'variables': vars})
+ .json()['data']
+ .get('Character', None)
+ ):
msg = f"{json.get('name').get('full')} ({json.get('name').get('native')}
)\n\n"
description = json['description']
site_url = json.get('siteUrl')
@@ -353,16 +374,14 @@ async def character(_, message, aniid=None, u_id=None):
if len(description) > 700:
description = f"{description[:700]}...."
msg += markdown(description).replace('', '').replace('
', '') - image = json.get('image', None) - if image: + if image := json.get('image', None): img = image.get('large') if aniid: return msg, rlp_mk - else: - if img: - await sendMessage(message, msg, rlp_mk, img) - else: - await sendMessage(message, msg) + if img: + await sendMessage(message, msg, rlp_mk, img) + else: + await sendMessage(message, msg) async def setCharacButtons(client, query): @@ -418,7 +437,7 @@ async def manga(_, message): msg = msg.replace('{get_readable_time(time() - start_time)}
\nBroadcast ID: {bc_hash}
")
+ await editMessage(
+ pls_wait,
+ f"{status.format(**locals())}\n\nElapsed Time: {get_readable_time(time() - start_time)}
\nBroadcast ID: {bc_hash}
",
+ )
bot.add_handler(MessageHandler(broadcast, filters=command(BotCommands.BroadcastCommand) & CustomFilters.sudo))
\ No newline at end of file
diff --git a/bot/modules/category_select.py b/bot/modules/category_select.py
index a4d54714bc..5b67ff88bd 100644
--- a/bot/modules/category_select.py
+++ b/bot/modules/category_select.py
@@ -131,7 +131,7 @@ async def confirm_dump(client, query):
user_dumps = await fetch_user_dumps(user_id)
cat_name = data[3].replace('_', ' ')
upall = cat_name == "All"
- bot_cache[msg_id][0] = user_dumps[cat_name] if not upall else list(user_dumps.values())
+ bot_cache[msg_id][0] = list(user_dumps.values()) if upall else user_dumps[cat_name]
buttons = ButtonMaker()
if user_dumps:
for _name in user_dumps.keys():
diff --git a/bot/modules/clone.py b/bot/modules/clone.py
index 391ca25655..1db2b975b1 100644
--- a/bot/modules/clone.py
+++ b/bot/modules/clone.py
@@ -1,8 +1,7 @@
#!/usr/bin/env python3
from pyrogram.handlers import MessageHandler
from pyrogram.filters import command
-from random import SystemRandom
-from string import ascii_letters, digits
+from secrets import token_hex
from asyncio import sleep, gather
from aiofiles.os import path as aiopath
from cloudscraper import create_scraper as cget
@@ -58,12 +57,13 @@ async def rcloneNode(client, message, link, dst_path, rcf, tag):
if config_path != f'rclone/{message.from_user.id}.conf':
await sendMessage(message, 'You should use same rclone.conf to clone between paths!')
return
+ dst_path = dst_path.lstrip('mrcc:')
elif config_path != 'rclone.conf':
await sendMessage(message, 'You should use same rclone.conf to clone between paths!')
return
remote, src_path = link.split(':', 1)
- src_path = src_path .strip('/')
+ src_path = src_path.strip('/')
cmd = ['rclone', 'lsjson', '--fast-list', '--stat',
'--no-modtime', '--config', config_path, f'{remote}:{src_path}']
@@ -87,7 +87,7 @@ async def rcloneNode(client, message, link, dst_path, rcf, tag):
RCTransfer = RcloneTransferHelper(listener, name)
LOGGER.info(f'Clone Started: Name: {name} - Source: {link} - Destination: {dst_path}')
- gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
+ gid = token_hex(5)
async with download_dict_lock:
download_dict[message.id] = RcloneStatus(
RCTransfer, message, gid, 'cl', listener.upload_details)
@@ -149,7 +149,7 @@ async def gdcloneNode(message, link, listen_up):
button = await get_telegraph_list(telegraph_content)
await sendMessage(message, msg, button)
return
- listener = MirrorLeechListener(message, tag=listen_up[0], isClone=True, drive_id=listen_up[1], index_link=listen_up[2], source_url=org_link if org_link else link)
+ listener = MirrorLeechListener(message, tag=listen_up[0], isClone=True, drive_id=listen_up[1], index_link=listen_up[2], source_url=org_link or link)
if limit_exceeded := await limit_checker(size, listener):
await sendMessage(listener.message, limit_exceeded)
return
@@ -161,7 +161,7 @@ async def gdcloneNode(message, link, listen_up):
link, size, mime_type, files, folders = await sync_to_async(drive.clone, link, listener.drive_id)
await deleteMessage(msg)
else:
- gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12))
+ gid = token_hex(5)
async with download_dict_lock:
download_dict[message.id] = GdriveStatus(
drive, size, message, gid, 'cl', listener.upload_details)
@@ -195,7 +195,7 @@ async def clone(client, message):
try:
multi = int(args['-i'])
- except:
+ except Exception:
multi = 0
dst_path = args['-up'] or args['-upload']
@@ -204,7 +204,7 @@ async def clone(client, message):
drive_id = args['-id']
index_link = args['-index']
gd_cat = args['-c'] or args['-category']
-
+
if username := message.from_user.username:
tag = f"@{username}"
else:
diff --git a/bot/modules/eval.py b/bot/modules/eval.py
index f3e0f28a7f..83220dff16 100644
--- a/bot/modules/eval.py
+++ b/bot/modules/eval.py
@@ -2,12 +2,14 @@
from pyrogram.handlers import MessageHandler
from pyrogram.filters import command
from os import path as ospath, getcwd, chdir
+from aiofiles import open as aiopen
from traceback import format_exc
from textwrap import indent
from io import StringIO, BytesIO
-from contextlib import redirect_stdout
+from re import match
+from contextlib import redirect_stdout, suppress
-from bot import LOGGER, bot
+from bot import LOGGER, bot, user
from bot.helper.telegram_helper.filters import CustomFilters
from bot.helper.telegram_helper.bot_commands import BotCommands
from bot.helper.telegram_helper.message_utils import sendFile, sendMessage
@@ -21,15 +23,13 @@ def namespace_of(message):
'__builtins__': globals()['__builtins__'],
'bot': bot,
'message': message,
- 'user': message.from_user or message.sender_chat,
- 'chat': message.chat}
-
+ 'user': user,
+ }
return namespaces[message.chat.id]
def log_input(message):
- LOGGER.info(
- f"IN: {message.text} (user={message.from_user.id}, chat={message.chat.id})")
+ LOGGER.info(f"INPUT: {message.text} (User ID ={message.from_user.id} | Chat ID ={message.chat.id})")
async def send(msg, message):
@@ -38,8 +38,12 @@ async def send(msg, message):
out_file.name = "output.txt"
await sendFile(message, out_file)
else:
- LOGGER.info(f"OUT: '{msg}'")
- await sendMessage(message, f"{msg}")
+ LOGGER.info(f"OUTPUT: '{msg}'")
+ if not msg or msg == '\n':
+ msg = "MessageEmpty"
+ elif not bool(match(r'<(spoiler|b|i|code|s|u|/a)>', msg)):
+ msg = f"{msg}
"
+ await sendMessage(message, msg)
@new_task
@@ -65,8 +69,8 @@ async def do(func, message):
env = namespace_of(message)
chdir(getcwd())
- with open(ospath.join(getcwd(), 'bot/modules/temp.txt'), 'w') as temp:
- temp.write(body)
+ async with aiopen(ospath.join(getcwd(), 'bot/modules/temp.txt'), 'w') as temp:
+ await temp.write(body)
stdout = StringIO()
@@ -92,10 +96,8 @@ async def do(func, message):
if value:
result = f'{value}'
else:
- try:
+ with suppress(Exception):
result = f'{repr(eval(body, env))}'
- except:
- pass
else:
result = f'{value}{func_return}'
if result:
@@ -107,7 +109,9 @@ async def clear(client, message):
global namespaces
if message.chat.id in namespaces:
del namespaces[message.chat.id]
- await send("Locals Cleared.", message)
+ await send("Cached Locals Cleared !", message)
+ else:
+ await send("No Cache Locals Found !", message)
bot.add_handler(MessageHandler(evaluate, filters=command(
diff --git a/bot/modules/gen_pyro_sess.py b/bot/modules/gen_pyro_sess.py
index 89e1ecfadd..f8eb3769ea 100644
--- a/bot/modules/gen_pyro_sess.py
+++ b/bot/modules/gen_pyro_sess.py
@@ -139,7 +139,8 @@ async def genPyroString(client, message):
try:
await aioremove(f'WZML-X-{message.from_user.id}.session')
await aioremove(f'WZML-X-{message.from_user.id}.session-journal')
- except: pass
+ except Exception:
+ pass
async def set_details(_, message, newkey):
diff --git a/bot/modules/images.py b/bot/modules/images.py
index 29bab52293..f74e4a00fb 100644
--- a/bot/modules/images.py
+++ b/bot/modules/images.py
@@ -20,13 +20,12 @@ async def picture_add(_, message):
editable = await sendMessage(message, "Fetching Input ...")
if len(message.command) > 1 or resm and resm.text:
msg_text = resm.text if resm else message.command[1]
- if msg_text.startswith("http"):
- pic_add = msg_text.strip()
- await editMessage(editable, f"Adding your Link : {pic_add}
")
- else:
+ if not msg_text.startswith("http"):
return await editMessage(editable, "Not a Valid Link, Must Start with 'http'")
+ pic_add = msg_text.strip()
+ await editMessage(editable, f"Adding your Link : {pic_add}
")
elif resm and resm.photo:
- if not (resm.photo and resm.photo.file_size <= 5242880*2):
+ if resm.photo.file_size > 5242880 * 2:
return await editMessage(editable, "Media is Not Supported! Only Photos!!")
try:
photo_dir = await resm.download()
@@ -53,12 +52,12 @@ async def picture_add(_, message):
async def pictures(_, message):
- user_id = message.from_user.id
if not config_dict['IMAGES']:
await sendMessage(message, f"No Photo to Show ! Add by /{BotCommands.AddImageCommand}")
else:
to_edit = await sendMessage(message, "Generating Grid of your Images...")
buttons = ButtonMaker()
+ user_id = message.from_user.id
buttons.ibutton("<<", f"images {user_id} turn -1")
buttons.ibutton(">>", f"images {user_id} turn 1")
buttons.ibutton("Remove Image", f"images {user_id} remov 0")
diff --git a/bot/modules/imdb.py b/bot/modules/imdb.py
index 2cf008de69..fbfd5488a1 100644
--- a/bot/modules/imdb.py
+++ b/bot/modules/imdb.py
@@ -1,4 +1,5 @@
#!/usr/bin/env python3
+from contextlib import suppress
from re import findall, IGNORECASE
from imdb import Cinemagoer
from pycountry import countries as conn
@@ -28,10 +29,10 @@ async def imdb_search(_, message):
buttons = ButtonMaker()
if title.lower().startswith("https://www.imdb.com/title/tt"):
movieid = title.replace("https://www.imdb.com/title/tt", "")
- movie = imdb.get_movie(movieid)
- if not movie:
+ if movie := imdb.get_movie(movieid):
+ buttons.ibutton(f"π¬ {movie.get('title')} ({movie.get('year')})", f"imdb {user_id} movie {movieid}")
+ else:
return await editMessage(k, "No Results Found")
- buttons.ibutton(f"π¬ {movie.get('title')} ({movie.get('year')})", f"imdb {user_id} movie {movieid}")
else:
movies = get_poster(title, bulk=True)
if not movies:
@@ -62,14 +63,10 @@ def get_poster(query, bulk=False, id=False, file=None):
if not movieid:
return None
if year:
- filtered=list(filter(lambda k: str(k.get('year')) == str(year), movieid))
- if not filtered:
- filtered = movieid
+ filtered = list(filter(lambda k: str(k.get('year')) == str(year), movieid)) or movieid
else:
filtered = movieid
- movieid=list(filter(lambda k: k.get('kind') in ['movie', 'tv series'], filtered))
- if not movieid:
- movieid = filtered
+ movieid = list(filter(lambda k: k.get('kind') in ['movie', 'tv series'], filtered)) or filtered
if bulk:
return movieid
movieid = movieid[0].movieID
@@ -83,10 +80,7 @@ def get_poster(query, bulk=False, id=False, file=None):
else:
date = "N/A"
plot = movie.get('plot')
- if plot and len(plot) > 0:
- plot = plot[0]
- else:
- plot = movie.get('plot outline')
+ plot = plot[0] if plot and len(plot) > 0 else movie.get('plot outline')
if plot and len(plot) > 300:
plot = f"{plot[:300]}..."
return {
@@ -152,11 +146,9 @@ def list_to_hash(k, flagg=False, emoji=False):
for elem in k:
ele = elem.replace(" ", "_").replace("-", "_")
if flagg:
- try:
+ with suppress(AttributeError):
conflag = (conn.get(name=elem)).flag
listing += f'{conflag} '
- except AttributeError:
- pass
if emoji:
listing += f"{IMDB_GENRE_EMOJI.get(elem, '')} "
listing += f'#{ele}, '
diff --git a/bot/modules/mediainfo.py b/bot/modules/mediainfo.py
index 5f78900481..4b16305e1c 100644
--- a/bot/modules/mediainfo.py
+++ b/bot/modules/mediainfo.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-import aiohttp
+from aiohttp import ClientSession
from re import search as re_search
from shlex import split as ssplit
from aiofiles import open as aiopen
@@ -27,7 +27,7 @@ async def gen_mediainfo(message, link=None, media=None, mmsg=None):
filename = re_search(".+/(.+)", link).group(1)
des_path = ospath.join(path, filename)
headers = {"user-agent":"Mozilla/5.0 (Linux; Android 12; 2201116PI) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Mobile Safari/537.36"}
- async with aiohttp.ClientSession() as session:
+ async with ClientSession() as session:
async with session.get(link, headers=headers) as response:
async with aiopen(des_path, "wb") as f:
async for chunk in response.content.iter_chunked(10000000):
@@ -85,11 +85,24 @@ async def mediainfo(_, message):
link = rply.text if rply else message.command[1]
return await gen_mediainfo(message, link)
elif rply:
- file = next((i for i in [rply.document, rply.video, rply.audio, rply.voice,
- rply.animation, rply.video_note] if i is not None), None)
- if not file:
+ if file := next(
+ (
+ i
+ for i in [
+ rply.document,
+ rply.video,
+ rply.audio,
+ rply.voice,
+ rply.animation,
+ rply.video_note,
+ ]
+ if i is not None
+ ),
+ None,
+ ):
+ return await gen_mediainfo(message, None, file, rply)
+ else:
return await sendMessage(message, help_msg)
- return await gen_mediainfo(message, None, file, rply)
else:
return await sendMessage(message, help_msg)
diff --git a/bot/modules/mirror_leech.py b/bot/modules/mirror_leech.py
index 42ce65b952..9b8a733142 100644
--- a/bot/modules/mirror_leech.py
+++ b/bot/modules/mirror_leech.py
@@ -9,6 +9,7 @@
from cloudscraper import create_scraper
from bot import bot, DOWNLOAD_DIR, LOGGER, config_dict, bot_name, categories_dict, user_data
+from bot.helper.mirror_utils.download_utils.direct_downloader import add_direct_download
from bot.helper.ext_utils.bot_utils import is_url, is_magnet, is_mega_link, is_gdrive_link, get_content_type, new_task, sync_to_async, is_rclone_path, is_telegram_link, arg_parser, fetch_user_tds, fetch_user_dumps, get_stats
from bot.helper.ext_utils.exceptions import DirectDownloadLinkException
from bot.helper.ext_utils.task_manager import task_utils
@@ -36,7 +37,7 @@ async def _mirror_leech(client, message, isQbit=False, isLeech=False, sameDir=No
input_list = text[0].split(' ')
arg_base = {'link': '',
- '-i': 0,
+ '-i': '0',
'-m': '', '-sd': '', '-samedir': '',
'-d': False, '-seed': False,
'-j': False, '-join': False,
@@ -54,15 +55,15 @@ async def _mirror_leech(client, message, isQbit=False, isLeech=False, sameDir=No
'-index': '',
'-c': '', '-category': '',
'-ud': '', '-dump': '',
+ '-h': '', '-headers': '',
+ '-ss': '0', '-screenshots': '',
+ '-t': '', '-thumb': '',
}
args = arg_parser(input_list[1:], arg_base)
cmd = input_list[0].split('@')[0]
- try:
- multi = int(args['-i'])
- except:
- multi = 0
+ multi = int(args['-i']) if args['-i'].isdigit() else 0
link = args['link']
folder_name = args['-m'] or args['-sd'] or args['-samedir']
@@ -79,6 +80,11 @@ async def _mirror_leech(client, message, isQbit=False, isLeech=False, sameDir=No
index_link = args['-index']
gd_cat = args['-c'] or args['-category']
user_dump = args['-ud'] or args['-dump']
+ headers = args['-h'] or args['-headers']
+ ussr = args['-u'] or args['-user']
+ pssw = args['-p'] or args['-pass']
+ thumb = args['-t'] or args['-thumb']
+ sshots = int(ss) if (ss := (args['-ss'] or args['-screenshots'])).isdigit() else 0
bulk_start = 0
bulk_end = 0
ratio = None
@@ -228,17 +234,26 @@ async def __run_multi():
LOGGER.info(link)
org_link = link
- if not is_mega_link(link) and not isQbit and not is_magnet(link) and not is_rclone_path(link) \
- and not is_gdrive_link(link) and not link.endswith('.torrent') and file_ is None:
+ if (not is_mega_link(link) or (is_mega_link(link) and not config_dict['MEGA_EMAIL'] and config_dict['DEBRID_LINK_API'])) \
+ and (not is_magnet(link) or (config_dict['REAL_DEBRID_API'] and is_magnet(link))) \
+ and (not isQbit or (config_dict['REAL_DEBRID_API'] and is_magnet(link))) \
+ and not is_rclone_path(link) and not is_gdrive_link(link) and not link.endswith('.torrent') and file_ is None:
content_type = await get_content_type(link)
if content_type is None or re_match(r'text/html|text/plain', content_type):
process_msg = await sendMessage(message, f"Processing: {link}
")
try:
+ if not is_magnet(link) and (ussr or pssw):
+ link = (link, (ussr, pssw))
link = await sync_to_async(direct_link_generator, link)
- LOGGER.info(f"Generated link: {link}")
- await editMessage(process_msg, f"Generated link: {link}
")
+ if isinstance(link, tuple):
+ link, headers = link
+ if isinstance(link, str):
+ LOGGER.info(f"Generated link: {link}")
+ await editMessage(process_msg, f"Generated link: {link}
")
except DirectDownloadLinkException as e:
- LOGGER.info(str(e))
+ e = str(e)
+ if 'This link requires a password!' not in e:
+ LOGGER.info(e)
if str(e).startswith('ERROR:'):
await editMessage(process_msg, str(e))
await delete_links(message)
@@ -318,11 +333,14 @@ async def __run_multi():
return
listener = MirrorLeechListener(message, compress, extract, isQbit, isLeech, tag, select, seed,
- sameDir, rcf, up, join, drive_id=drive_id, index_link=index_link, source_url=org_link if org_link else link)
+ sameDir, rcf, up, join, drive_id=drive_id, index_link=index_link,
+ source_url=org_link or link, leech_utils={'screenshots': sshots, 'thumb': thumb})
if file_ is not None:
await delete_links(message)
await TelegramDownloadHelper(listener).add_download(reply_to, f'{path}/', name, session)
+ elif isinstance(link, dict):
+ await add_direct_download(link, path, listener, name)
elif is_rclone_path(link):
if link.startswith('mrcc:'):
link = link.split('mrcc:', 1)[1]
@@ -340,16 +358,16 @@ async def __run_multi():
elif is_mega_link(link):
await delete_links(message)
await add_mega_download(link, f'{path}/', listener, name)
- elif isQbit:
+ elif isQbit and 'real-debrid' not in link:
await add_qb_torrent(link, path, listener, ratio, seed_time)
- else:
- ussr = args['-u'] or args['-user']
- pssw = args['-p'] or args['-pass']
+ elif not is_telegram_link(link):
if ussr or pssw:
auth = f"{ussr}:{pssw}"
- auth = "Basic " + b64encode(auth.encode()).decode('ascii')
+ auth = f"authorization: Basic {b64encode(auth.encode()).decode('ascii')}"
else:
auth = ''
+ if headers:
+ auth += f'{auth} {headers}'
await add_aria2c_download(link, path, listener, name, auth, ratio, seed_time)
await delete_links(message)
@@ -390,11 +408,13 @@ def parseline(line):
async with aiopen('log.txt', 'r') as f:
logFile = await f.read()
cget = create_scraper().request
- resp = cget('POST', 'http://stashbin.xyz/api/document', data={'content': logFile}).json()
- if resp['ok']:
+ resp = cget('POST', 'https://spaceb.in/api/v1/documents', data={'content': logFile, 'extension': 'None'}).json()
+ if resp['status'] == 201:
btn = ButtonMaker()
- btn.ubutton('π¨ Web Paste', f"http://stashbin.xyz/{resp['data']['key']}")
+ btn.ubutton('π¨ Web Paste (SB)', f"https://spaceb.in/{resp['payload']['id']}")
await editReplyMarkup(message, btn.build_menu(1))
+ else:
+ LOGGER.error(f"Web Paste Failed : {str(err)}")
elif data[2] == "botpm":
await query.answer(url=f"https://t.me/{bot_name}?start=wzmlx")
elif data[2] == "help":
diff --git a/bot/modules/mydramalist.py b/bot/modules/mydramalist.py
index 215c7daa93..70116f45c2 100644
--- a/bot/modules/mydramalist.py
+++ b/bot/modules/mydramalist.py
@@ -1,11 +1,12 @@
#!/usr/bin/env python3
+from contextlib import suppress
from aiohttp import ClientSession
from requests import get as rget
from urllib.parse import quote as q
from pycountry import countries as conn
from pyrogram.filters import command, regex
-from pyrogram.handlers import MessageHandler, CallbackQueryHandler
+from pyrogram.handlers import MessageHandler, CallbackQueryHandler
from pyrogram.errors import MediaEmpty, PhotoInvalidDimensions, WebpageMediaEmpty, ReplyMarkupInvalid
from bot import LOGGER, bot, config_dict, user_data
@@ -103,11 +104,9 @@ def list_to_hash(k, flagg=False, emoji=False):
for elem in k:
ele = elem.replace(" ", "_").replace("-", "_")
if flagg:
- try:
+ with suppress(AttributeError):
conflag = (conn.get(name=elem)).flag
listing += f'{conflag} '
- except AttributeError:
- pass
if emoji:
listing += f"{IMDB_GENRE_EMOJI.get(elem, '')} "
listing += f'#{ele}, '
diff --git a/bot/modules/rss.py b/bot/modules/rss.py
index 44d0d300df..23fb7a8aba 100644
--- a/bot/modules/rss.py
+++ b/bot/modules/rss.py
@@ -145,7 +145,12 @@ async def rssSub(client, message, pre_event):
if msg:
await sendMessage(message, msg)
await updateRssMenu(pre_event)
-
+ is_sudo = await CustomFilters.sudo(client, message)
+ if scheduler.state == 2:
+ scheduler.resume()
+ elif is_sudo and not scheduler.running:
+ addJob(config_dict['RSS_DELAY'])
+ scheduler.start()
async def getUserId(title):
async with rss_dict_lock:
@@ -554,8 +559,8 @@ async def rssListener(client, query):
async def rssMonitor():
- if not config_dict['RSS_CHAT_ID']:
- LOGGER.warning('RSS_CHAT_ID not added! Shutting down rss scheduler...')
+ if not config_dict['RSS_CHAT']:
+ LOGGER.warning('RSS_CHAT not added! Shutting down rss scheduler...')
scheduler.shutdown(wait=False)
return
if len(rss_dict) == 0:
@@ -564,7 +569,6 @@ async def rssMonitor():
all_paused = True
for user, items in list(rss_dict.items()):
for title, data in list(items.items()):
- await sleep(0)
try:
if data['paused']:
continue
@@ -576,11 +580,11 @@ async def rssMonitor():
last_link = rss_d.entries[0]['links'][1]['href']
except IndexError:
last_link = rss_d.entries[0]['link']
+ finally:
+ all_paused = False
last_title = rss_d.entries[0]['title']
if data['last_feed'] == last_link or data['last_title'] == last_title:
- all_paused = False
continue
- all_paused = False
feed_count = 0
while True:
try:
@@ -637,7 +641,7 @@ async def rssMonitor():
break
except Exception as e:
LOGGER.error(
- f"{e} Feed Name: {title} - Feed Link: {data['link']}")
+ f"{e} - Feed Name: {title} - Feed Link: {data['link']}")
continue
if all_paused:
scheduler.pause()
@@ -647,7 +651,6 @@ def addJob(delay):
scheduler.add_job(rssMonitor, trigger=IntervalTrigger(seconds=delay), id='0', name='RSS', misfire_grace_time=15,
max_instances=1, next_run_time=datetime.now()+timedelta(seconds=20), replace_existing=True)
-
addJob(config_dict['RSS_DELAY'])
scheduler.start()
bot.add_handler(MessageHandler(getRssMenu, filters=command(
diff --git a/bot/modules/save_msg.py b/bot/modules/save_msg.py
index fdb65076e4..9bf1018d41 100644
--- a/bot/modules/save_msg.py
+++ b/bot/modules/save_msg.py
@@ -15,7 +15,7 @@ async def save_message(_, query):
try:
await query.message.copy(usr, reply_markup=InlineKeyboardMarkup(BTN) if (BTN := query.message.reply_markup.inline_keyboard[:-1]) else None)
await query.answer("Message/Media Successfully Saved !", show_alert=True)
- except:
+ except Exception:
if user_dict.get('save_mode'):
await query.answer('Make Bot as Admin and give Post Permissions and Try Again', show_alert=True)
else:
diff --git a/bot/modules/speedtest.py b/bot/modules/speedtest.py
index 11e9753eaf..6952e452e7 100644
--- a/bot/modules/speedtest.py
+++ b/bot/modules/speedtest.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-from speedtest import Speedtest
+from speedtest import Speedtest, ConfigRetrievalError
from pyrogram.handlers import MessageHandler
from pyrogram.filters import command
@@ -11,8 +11,12 @@
@new_task
async def speedtest(_, message):
- speed = await sendMessage(message, "Initializing Speedtest...")
- test = Speedtest()
+ speed = await sendMessage(message, "Initiating Speedtest...")
+ try:
+ test = Speedtest()
+ except ConfigRetrievalError:
+ await editMessage(speed, "ERROR: Can't connect to Server at the Moment, Try Again Later !")
+ return
test.get_best_server()
test.download()
test.upload()
@@ -49,7 +53,7 @@ async def speedtest(_, message):
await deleteMessage(speed)
except Exception as e:
LOGGER.error(str(e))
- pho = await editMessage(speed, string_speed)
+ await editMessage(speed, string_speed)
bot.add_handler(MessageHandler(speedtest, filters=command(
BotCommands.SpeedCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted))
diff --git a/bot/modules/torrent_search.py b/bot/modules/torrent_search.py
index 164c3072a5..8ad050c9fe 100644
--- a/bot/modules/torrent_search.py
+++ b/bot/modules/torrent_search.py
@@ -145,7 +145,7 @@ async def __getResult(search_results, key, message, method):
msg += f"Size: {result['size']}user_login:pass_key
\nTimeout: 60 sec"],
}
fname_dict = {'rcc': 'RClone',
'lprefix': 'Prefix',
@@ -55,7 +56,7 @@
'ddl_servers': 'DDL Servers',
'user_tds': 'User Custom TDs',
'gofile': 'GoFile',
- 'streamsb': 'StreamSB',
+ 'streamtape': 'StreamTape',
}
async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None):
@@ -89,7 +90,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None)
mediainfo = "Force Enabled"
save_mode = "Save As Dump" if user_dict.get('save_mode') else "Save As BotPM"
buttons.ibutton('Save As BotPM' if save_mode == 'Save As Dump' else 'Save As Dump', f"userset {user_id} save_mode")
- dailytl = config_dict['DAILY_TASK_LIMIT'] if config_dict['DAILY_TASK_LIMIT'] else "βΎοΈ"
+ dailytl = config_dict['DAILY_TASK_LIMIT'] or "βΎοΈ"
dailytas = user_dict.get('dly_tasks')[1] if user_dict and user_dict.get('dly_tasks') and user_id != OWNER_ID and config_dict['DAILY_TASK_LIMIT'] else config_dict.get('DAILY_TASK_LIMIT', "βΎοΈ") if user_id != OWNER_ID else "βΎοΈ"
if user_dict.get('dly_tasks', False):
t = str(datetime.now() - user_dict['dly_tasks'][0]).split(':')
@@ -117,7 +118,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None)
ddl_serv = len(val) if (val := user_dict.get('ddl_servers', False)) else 0
buttons.ibutton("DDL Servers", f"userset {user_id} ddl_servers")
- tds_mode = "Enabled" if user_dict.get('td_mode', config_dict['BOT_PM']) else "Disabled"
+ tds_mode = "Enabled" if user_dict.get('td_mode', False) else "Disabled"
if not config_dict['USER_TD_MODE']:
tds_mode = "Force Disabled"
@@ -141,25 +142,25 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None)
dailytlle = get_readable_file_size(config_dict['DAILY_LEECH_LIMIT'] * 1024**3) if config_dict['DAILY_LEECH_LIMIT'] else "οΈβ"
dailyll = get_readable_file_size(await getdailytasks(user_id, check_leech=True)) if config_dict['DAILY_LEECH_LIMIT'] and user_id != OWNER_ID else "β"
- buttons.ibutton("Thumbnail", f"userset {user_id} thumb")
thumbmsg = "Exists" if await aiopath.exists(thumbpath) else "Not Exists"
-
- buttons.ibutton("Leech Splits", f"userset {user_id} split_size")
+ buttons.ibutton(f"{'β
οΈ' if thumbmsg == 'Exists' else ''} Thumbnail", f"userset {user_id} thumb")
+
split_size = get_readable_file_size(config_dict['LEECH_SPLIT_SIZE']) + ' (Default)' if user_dict.get('split_size', '') == '' else get_readable_file_size(user_dict['split_size'])
equal_splits = 'Enabled' if user_dict.get('equal_splits', config_dict.get('EQUAL_SPLITS')) else 'Disabled'
media_group = 'Enabled' if user_dict.get('media_group', config_dict.get('MEDIA_GROUP')) else 'Disabled'
+ buttons.ibutton(f"{'β
οΈ' if user_dict.get('split_size', False) else ''} Leech Splits", f"userset {user_id} split_size")
- buttons.ibutton("Leech Caption", f"userset {user_id} lcaption")
lcaption = 'Not Exists' if (val:=user_dict.get('lcaption', config_dict.get('LEECH_FILENAME_CAPTION', ''))) == '' else val
+ buttons.ibutton(f"{'β
οΈ' if lcaption != 'Not Exists' else ''} Leech Caption", f"userset {user_id} lcaption")
- buttons.ibutton("Leech Prefix", f"userset {user_id} lprefix")
lprefix = 'Not Exists' if (val:=user_dict.get('lprefix', config_dict.get('LEECH_FILENAME_PREFIX', ''))) == '' else val
+ buttons.ibutton(f"{'β
οΈ' if lprefix != 'Not Exists' else ''} Leech Prefix", f"userset {user_id} lprefix")
- buttons.ibutton("Leech Suffix", f"userset {user_id} lsuffix")
lsuffix = 'Not Exists' if (val:=user_dict.get('lsuffix', config_dict.get('LEECH_FILENAME_SUFFIX', ''))) == '' else val
-
- buttons.ibutton("Leech Remname", f"userset {user_id} lremname")
+ buttons.ibutton(f"{'β
οΈ' if lsuffix != 'Not Exists' else ''} Leech Suffix", f"userset {user_id} lsuffix")
+
lremname = 'Not Exists' if (val:=user_dict.get('lremname', config_dict.get('LEECH_FILENAME_REMNAME', ''))) == '' else val
+ buttons.ibutton(f"{'β
οΈ' if lremname != 'Not Exists' else ''} Leech Remname", f"userset {user_id} lremname")
buttons.ibutton("Leech Dump", f"userset {user_id} ldump")
ldump = 'Not Exists' if (val:=user_dict.get('ldump', '')) == '' else len(val)
@@ -174,16 +175,17 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None)
buttons.ibutton("Close", f"userset {user_id} close", "footer")
button = buttons.build_menu(2)
elif key == "ddl_servers":
- ddl_serv = 0
+ ddl_serv, serv_list = 0, []
if (ddl_dict := user_dict.get('ddl_servers', False)):
- for _, (enabled, _) in ddl_dict.items():
+ for serv, (enabled, _) in ddl_dict.items():
if enabled:
+ serv_list.append(serv)
ddl_serv += 1
text = f"γ {fname_dict[key]} Settings :\n\n" \
f"β² Enabled DDL Server(s) : {ddl_serv}\n\n" \
f"β² Description : {desp_dict[key][0]}"
- for btn in ['gofile', 'streamsb']:
- buttons.ibutton(fname_dict[btn], f"userset {user_id} {btn}")
+ for btn in ['gofile', 'streamtape']:
+ buttons.ibutton(f"{'β
οΈ' if btn in serv_list else ''} {fname_dict[btn]}", f"userset {user_id} {btn}")
buttons.ibutton("Back", f"userset {user_id} back mirror", "footer")
buttons.ibutton("Close", f"userset {user_id} close", "footer")
button = buttons.build_menu(2)
@@ -217,7 +219,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None)
elif key in ['mprefix', 'mremname', 'msuffix']:
set_exist = 'Not Exists' if (val:=user_dict.get(key, config_dict.get(f'MIRROR_FILENAME_{key[1:].upper()}', ''))) == '' else val
text += f"β² Mirror Filename {fname_dict[key]} : {set_exist}\n\n"
- elif key in ['gofile', 'streamsb']:
+ elif key in ['gofile', 'streamtape']:
set_exist = 'Exists' if key in (ddl_dict:=user_dict.get('ddl_servers', {})) and ddl_dict[key][1] and ddl_dict[key][1] != '' else 'Not Exists'
ddl_mode = 'Enabled' if key in (ddl_dict:=user_dict.get('ddl_servers', {})) and ddl_dict[key][0] else 'Disabled'
text = f"β² Upload {fname_dict[key]} : {ddl_mode}\n" \
@@ -225,7 +227,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None)
buttons.ibutton('Disable DDL' if ddl_mode == 'Enabled' else 'Enable DDL', f"userset {user_id} s{key}", "header")
elif key == 'user_tds':
set_exist = len(val) if (val:=user_dict.get(key, False)) else 'Not Exists'
- tds_mode = "Enabled" if user_dict.get('td_mode', config_dict['BOT_PM']) else "Disabled"
+ tds_mode = "Enabled" if user_dict.get('td_mode', False) else "Disabled"
buttons.ibutton('Disable UserTDs' if tds_mode == 'Enabled' else 'Enable UserTDs', f"userset {user_id} td_mode", "header")
if not config_dict['USER_TD_MODE']:
tds_mode = "Force Disabled"
@@ -283,7 +285,9 @@ async def user_settings(client, message):
β² Leech User Dump :
/cmd -s ldump''')
else:
- msg, button = await get_user_settings(message.from_user)
+ from_user = message.from_user
+ handler_dict[from_user.id] = False
+ msg, button = await get_user_settings(from_user)
await sendMessage(message, msg, button, 'IMAGES')
@@ -305,9 +309,11 @@ async def set_custom(client, message, pre_event, key, direct=False):
return_key = 'leech'
n_key = key
user_dict = user_data.get(user_id, {})
- if key in ['gofile', 'streamsb']:
+ if key in ['gofile', 'streamtape']:
ddl_dict = user_dict.get('ddl_servers', {})
mode, api = ddl_dict.get(key, [False, ""])
+ if key == "gofile" and not await Gofile.is_goapi(value):
+ value = ""
ddl_dict[key] = [mode, value]
value = ddl_dict
n_key = 'ddl_servers'
@@ -462,7 +468,7 @@ async def edit_user_settings(client, query):
try:
await sendCustomMsg(user_id, msg, debug=True)
await query.answer('User TDs Successfully Send in your PM', show_alert=True)
- except:
+ except Exception:
await query.answer('Start the Bot in PM (Private) and Try Again', show_alert=True)
await update_user_settings(query, 'user_tds', 'mirror')
elif data[2] == "dthumb":
@@ -546,7 +552,7 @@ async def edit_user_settings(client, query):
await update_user_settings(query, 'leech')
if DATABASE_URL:
await DbManger().update_user_data(user_id)
- elif data[2] in ['sgofile', 'sstreamsb', 'dgofile', 'dstreamsb']:
+ elif data[2] in ['sgofile', 'sstreamtape', 'dgofile', 'dstreamtape']:
handler_dict[user_id] = False
ddl_dict = user_dict.get('ddl_servers', {})
key = data[2][1:]
@@ -582,7 +588,7 @@ async def edit_user_settings(client, query):
else:
await query.answer("Old Settings", show_alert=True)
await update_user_settings(query)
- elif data[2] in ['ddl_servers', 'user_tds', 'gofile', 'streamsb']:
+ elif data[2] in ['ddl_servers', 'user_tds', 'gofile', 'streamtape']:
handler_dict[user_id] = False
await query.answer()
edit_mode = len(data) == 4
diff --git a/bot/modules/ytdlp.py b/bot/modules/ytdlp.py
index 402a65dc35..9ee3ca7055 100644
--- a/bot/modules/ytdlp.py
+++ b/bot/modules/ytdlp.py
@@ -81,7 +81,7 @@ async def __event_handler(self):
pfunc, filters=regex('^ytq') & user(self.__user_id)), group=-1)
try:
await wait_for(self.event.wait(), timeout=self.__timeout)
- except:
+ except Exception:
await editMessage(self.__reply_to, 'Timed Out. Task has been cancelled!')
self.qual = None
self.is_cancelled = True
@@ -344,8 +344,6 @@ async def __run_multi():
path = f'{DOWNLOAD_DIR}{message.id}{folder_name}'
- opt = opt or config_dict['YT_DLP_OPTIONS']
-
if len(text) > 1 and text[1].startswith('Tag: '):
tag, id_ = text[1].split('Tag: ')[1].split()
message.from_user = await client.get_users(id_)
@@ -355,6 +353,13 @@ async def __run_multi():
pass
elif sender_chat := message.sender_chat:
tag = sender_chat.title
+
+ user_id = message.from_user.id
+
+ user_dict = user_data.get(user_id, {})
+
+ opt = opt or user_dict.get('yt_opt') or config_dict['YT_DLP_OPTIONS']
+
if username := message.from_user.username:
tag = f'@{username}'
else:
@@ -465,6 +470,12 @@ async def __run_multi():
yt_opt = opt.split('|')
for ytopt in yt_opt:
key, value = map(str.strip, ytopt.split(':', 1))
+ if key == 'format':
+ if select:
+ qual = ''
+ elif value.startswith('ba/b-'):
+ qual = value
+ continue
if value.startswith('^'):
if '.' in value or value == '^inf':
value = float(value.split('^')[1])
@@ -491,13 +502,8 @@ async def __run_multi():
__run_multi()
- if not select:
- user_id = message.from_user.id
- user_dict = user_data.get(user_id, {})
- if 'format' in options:
- qual = options['format']
- elif user_dict.get('yt_opt'):
- qual = user_dict['yt_opt']
+ if not select and (not qual and 'format' in options):
+ qual = options['format']
if not qual:
qual = await YtSelection(client, message).get_quality(result)
diff --git a/bot/version.py b/bot/version.py
index 8454c89de9..d2565d2f73 100644
--- a/bot/version.py
+++ b/bot/version.py
@@ -8,7 +8,7 @@ def get_version() -> str:
'''
MAJOR = '1'
MINOR = '2'
- PATCH = '0'
+ PATCH = '1'
STATE = 'x'
return f"v{MAJOR}.{MINOR}.{PATCH}-{STATE}"
diff --git a/config_sample.env b/config_sample.env
index c1493b0820..3b24b559c5 100644
--- a/config_sample.env
+++ b/config_sample.env
@@ -13,28 +13,36 @@ DOWNLOAD_DIR = "/usr/src/app/downloads/" # Require restart after changing it
CMD_SUFFIX = "" # Require restart after changing it while bot running
AUTHORIZED_CHATS = "" # Require restart after changing it while bot running
SUDO_USERS = "" # Require restart after changing it while bot running
-STATUS_LIMIT = "6"
+BLACKLIST_USERS = "" # Require restart after changing it while bot running
+STATUS_LIMIT = "4"
DEFAULT_UPLOAD = "gd"
STATUS_UPDATE_INTERVAL = "10"
AUTO_DELETE_MESSAGE_DURATION = "60"
-UPTOBOX_TOKEN = ""
-EXTENSION_FILTER = ""
INCOMPLETE_TASK_NOTIFIER = "False"
-YT_DLP_OPTIONS = ""
-USE_SERVICE_ACCOUNTS = "False"
SET_COMMANDS = "False"
+EXTENSION_FILTER = ""
+YT_DLP_OPTIONS = ""
FSUB_IDS = ""
BOT_PM = ""
-BOT_MAX_TASKS = ""
# GDrive Tools
GDRIVE_ID = ""
+USER_TD_MODE = ""
+USER_TD_SA = ""
+INDEX_URL = ""
+USE_SERVICE_ACCOUNTS = "False"
IS_TEAM_DRIVE = "False"
STOP_DUPLICATE = "False"
DISABLE_DRIVE_LINK = "False"
-INDEX_URL = ""
GD_INFO = "Uploaded by WZML-X"
+# API's/Cookies
+REAL_DEBRID_API = ""
+DEBRID_LINK_API = ""
+FILELION_API = ""
+GDTOT_CRYPT = ""
+UPTOBOX_TOKEN = ""
+
# Rclone
RCLONE_PATH = ""
RCLONE_FLAGS = ""
@@ -46,6 +54,7 @@ RCLONE_SERVE_PASS = ""
# Update
UPSTREAM_REPO = ""
UPSTREAM_BRANCH = ""
+UPGRADE_PACKAGES = ""
# Leech & Mirror
LEECH_SPLIT_SIZE = ""
@@ -61,7 +70,7 @@ MIRROR_FILENAME_PREFIX = ""
MIRROR_FILENAME_SUFFIX = ""
MIRROR_FILENAME_REMNAME = ""
-# Log Channel (Single ID)
+# Log Channel/SuperGroup (Topics Support)
LEECH_LOG_ID = ""
MIRROR_LOG_ID = ""
LINKS_LOG_ID = ""
@@ -79,7 +88,7 @@ QUEUE_UPLOAD = ""
# RSS
RSS_DELAY = "600"
-RSS_CHAT_ID = ""
+RSS_CHAT = ""
# Mega
MEGA_EMAIL = ""
@@ -90,6 +99,7 @@ DAILY_TASK_LIMIT = ""
DAILY_MIRROR_LIMIT = ""
DAILY_LEECH_LIMIT = ""
USER_MAX_TASKS = ""
+BOT_MAX_TASKS = ""
TORRENT_LIMIT= ""
DIRECT_LIMIT = ""
GDRIVE_LIMIT = ""
@@ -99,6 +109,7 @@ PLAYLIST_LIMIT = ""
LEECH_LIMIT = ""
MEGA_LIMIT = ""
STORAGE_THRESHOLD = ""
+USER_TIME_INTERVAL = "0"
# Templates
ANIME_TEMPLATE = ""
@@ -109,23 +120,24 @@ MDL_TEMPLATE = ""
TITLE_NAME = "WZ-M/L-X"
AUTHOR_NAME = "WZML-X"
AUTHOR_URL = "https://t.me/WZML_X"
+COVER_IMAGE = ""
# Extra
SAFE_MODE = ""
DELETE_LINKS = ""
CLEAN_LOG_MSG = ""
SHOW_EXTRA_CMDS = ""
-SOURCE_LINK = ""
TIMEZONE = "Asia/Kolkata"
IMAGES = ""
IMG_SEARCH = ""
IMG_PAGE = ""
BOT_THEME = "minimal"
-USER_TIME_INTERVAL = "0"
+EXCEP_CHATS = ""
# M/L Buttons
SHOW_MEDIAINFO = "False"
SAVE_MSG = "False"
+SOURCE_LINK = "False"
# Token system
TOKEN_TIMEOUT = ""
diff --git a/requirements.txt b/requirements.txt
index 6f1330f05d..6dd5c94375 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -18,6 +18,7 @@ google-auth-oauthlib
gunicorn
git+https://github.com/zevtyardt/lk21.git
httpx
+langcodes[data]
lxml
motor
mutagen
@@ -39,4 +40,4 @@ tenacity
tgcrypto
uvloop
xattr
-yt-dlp==2023.3.4
+yt-dlp==2023.7.6
diff --git a/update.py b/update.py
index cced3098c2..e4a8b35f56 100644
--- a/update.py
+++ b/update.py
@@ -1,6 +1,7 @@
from logging import FileHandler, StreamHandler, INFO, basicConfig, error as log_error, info as log_info
from os import path as ospath, environ, remove
-from subprocess import run as srun
+from subprocess import run as srun, call as scall
+from pkg_resources import working_set
from requests import get as rget
from dotenv import load_dotenv, dotenv_values
from pymongo import MongoClient
@@ -9,7 +10,7 @@
with open('log.txt', 'r+') as f:
f.truncate(0)
-if ospath.exists('rlog.txt'): #RClone Logs
+if ospath.exists('rlog.txt'):
remove('rlog.txt')
basicConfig(format="[%(asctime)s] [%(levelname)s] - %(message)s",
@@ -48,8 +49,14 @@
and config_dict is not None:
environ['UPSTREAM_REPO'] = config_dict['UPSTREAM_REPO']
environ['UPSTREAM_BRANCH'] = config_dict['UPSTREAM_BRANCH']
+ environ['UPGRADE_PACKAGES'] = config_dict.get('UPDATE_PACKAGES', 'False')
conn.close()
+UPGRADE_PACKAGES = environ.get('UPGRADE_PACKAGES', 'False')
+if UPGRADE_PACKAGES.lower() == 'true':
+ packages = [dist.project_name for dist in working_set]
+ scall("pip install " + ' '.join(packages), shell=True)
+
UPSTREAM_REPO = environ.get('UPSTREAM_REPO', '')
if len(UPSTREAM_REPO) == 0:
UPSTREAM_REPO = None
@@ -76,5 +83,5 @@
if update.returncode == 0:
log_info('Successfully updated with latest commits !!')
else:
- log_error('Something went Wrong !!')
- log_error(f'UPSTREAM_REPO: {UPSTREAM_REPO} | UPSTREAM_BRANCH: {UPSTREAM_BRANCH}')
+ log_error('Something went Wrong ! Retry or Ask Support !')
+ log_info(f'UPSTREAM_REPO: {UPSTREAM_REPO} | UPSTREAM_BRANCH: {UPSTREAM_BRANCH}')
diff --git a/web/wserver.py b/web/wserver.py
index 29b6481934..f745238f61 100644
--- a/web/wserver.py
+++ b/web/wserver.py
@@ -620,13 +620,13 @@
src="https://graph.org/file/1a6ad157f55bc42b548df.png"
alt="logo"
/>
-
+