diff --git a/README.md b/README.md index 99b8faafc0..6cd20413a9 100644 --- a/README.md +++ b/README.md @@ -62,6 +62,15 @@ - Download using premium account if available - Download restricted messages (document or link) by tg private/public/super links + +
  • + DDL Upload + + - Upload Files/Folder in the Supported Sites + - Gofile.io + - Streamtape.com + - Upload on multiple sites at a time + - User api/key is need for the upload
  • Google Drive @@ -118,6 +127,7 @@ - Torrent search support - Search on torrents with Torrent Search API - Search on torrents with variable plugins using qBittorrent search engine + - Cached Magnet support for real-derbid api
  • @@ -196,7 +206,7 @@ - Extract these filetypes > ZIP, RAR, TAR, 7z, ISO, WIM, CAB, GZIP, BZIP2, APM, ARJ, CHM, CPIO, CramFS, DEB, DMG, FAT, HFS, LZH, LZMA, LZMA2, MBR, MSI, MSLZ, NSIS, NTFS, RPM, SquashFS, UDF, VHD, XAR, Z, TAR.XZ - Direct links Supported: - > mediafire, letsupload.io, hxfile.co, antfiles, fembed.com, fembed.net, femax20.com, layarkacaxxi.icu, fcdn.stream, sbplay.org, naniplay.com, naniplay.nanime.in, naniplay.nanime.biz, sbembed.com, streamtape.com, streamsb.net, feurl.com, upload.ee, pixeldrain.com, racaty.net, 1fichier.com, 1drv.ms (Only works for file not folder or business account), uptobox.com and solidfiles.com, linkbox.to, shrdsk.me (sharedisk.io), akmfiles.com, wetransfer.com, mdisk.me (with ytdl), terabox.com (you need to add cookies txt with name) [terabox.txt](https://github.com/ytdl-org/youtube-dl#how-do-i-pass-cookies-to-youtube-dl) and almost every anonfiles based sites + > mediafire(folder), letsupload.io, hxfile.co, antfiles, fembed.com, fembed.net, femax20.com, layarkacaxxi.icu, fcdn.stream, sbplay.org, naniplay.com, naniplay.nanime.in, naniplay.nanime.biz, sbembed.com, streamtape.com, streamsb.net, feurl.com, upload.ee, pixeldrain.com, racaty.net, 1fichier.com, 1drv.ms (Only works for file not folder or business account), uptobox.com and solidfiles.com, linkbox.to, shrdsk.me (sharedisk.io), akmfiles.com, wetransfer.com, mdisk.me (with ytdl), gofile.io(folder), filelion, terabox.com (you need to add cookies txt with name) [terabox.txt](https://github.com/ytdl-org/youtube-dl#how-do-i-pass-cookies-to-youtube-dl) and almost every anonfiles based sites
    @@ -432,34 +442,45 @@ help - All cmds with description - `CMD_SUFFIX`: commands index number. This number will added at the end all commands. `Str`|`Int` - `AUTHORIZED_CHATS`: Fill user_id and chat_id of groups/users you want to authorize. Separate them by space. `Int` - `SUDO_USERS`: Fill user_id of users whom you want to give sudo permission. Separate them by space. `Int` + - `BLACKLIST_USERS` : Fill user_id of users you wish to prohibit from using the bot. Separate them by space. `Int` + - `STATUS_LIMIT`: Limit the no. of tasks shown in status message with buttons. Default is `10`. **NOTE**: Recommended limit is `4` tasks. `Int` - `DEFAULT_UPLOAD`: Whether `rc` to upload to `RCLONE_PATH` or `gd` to upload to `GDRIVE_ID` or `ddl` to upload to `DDL`. Default is `gd`. Read More [HERE](https://github.com/weebzone/WZML-X/tree/master#upload).`Str` - `STATUS_UPDATE_INTERVAL`: Time in seconds after which the progress/status message will be updated. Recommended `10` seconds at least. `Int` - `AUTO_DELETE_MESSAGE_DURATION`: Interval of time (in seconds), after which the bot deletes it's message and command message which is expected to be viewed instantly. **NOTE**: Set to `-1` to disable auto message deletion. `Int` - - `STATUS_LIMIT`: Limit the no. of tasks shown in status message with buttons. Default is `10`. **NOTE**: Recommended limit is `4` tasks. `Int` - - `EXTENSION_FILTER`: File extensions that won't upload/clone. Separate them by space. `Str` - `INCOMPLETE_TASK_NOTIFIER`: Get incomplete task messages after restart. Require database and superGroup. Default is `False`. `Bool` - - `UPTOBOX_TOKEN`: Uptobox token to mirror uptobox links. Get it from [Uptobox Premium Account](https://uptobox.com/my_account). `str` + - `SET_COMMANDS`: Automatically set the Bot Commands no need to set from `@botfather`. Default is `False`. `Bool` + - `EXTENSION_FILTER`: File extensions that won't upload/clone. Separate them by space. No need to add `.` `Str` - `YT_DLP_OPTIONS`: Default yt-dlp options. Check all possible options [HERE](https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/YoutubeDL.py#L184) or use this [script](https://t.me/mltb_official/177) to convert cli arguments to api options. Format: key:value|key:value|key:value. Add `^` before integer or float, some numbers must be numeric and some string. `str` - Example: "format:bv*+mergeall[vcodec=none]|nocheckcertificate:True" - - `USE_SERVICE_ACCOUNTS`: Whether to use Service Accounts or not, with google-api-python-client. For this to work see [Using Service Accounts](https://github.com/weebzone/WZML-X#generate-service-accounts-what-is-service-account) section below. Default is `False`. `Bool` - - `SAVE_MSG`: Save Button in each file and link so that every user direcly save it without forwarding. Default is `False`. `Bool` - - `SET_COMMANDS`: Automatically set the Bot Commands no need to set from `@botfather`. Default is `False`. `Bool` - `FSUB_IDS`: Fill chat_id(-100xxxxxx) of groups/channel you want to force subscribe. Separate them by space. Int - Note: Bot should be added in the filled chat_id as admin - `BOT_PM`: File/links send to the BOT PM. Default is `False`. `Bool` - - `BOT_MAX_TASKS`: Limit the Maximum task for bots of group at a time. `Int` +
  • GDrive Tools - `GDRIVE_ID`: This is the Folder/TeamDrive ID of the Google Drive OR `root` to which you want to upload all the mirrors using google-api-python-client. `Str` - - `IS_TEAM_DRIVE`: Set `True` if uploading to TeamDrive using google-api-python-client. Default is `False`. `Bool` + - `USER_TD_MODE`: Enable the User-TD feature i.e user can upload file into their own drive. Default is `False`. `Bool` + - `USER_TD_SA`: Show your SA account in the usetting so that user can add that in its own TD to enable uploading Using SA. SA will be an email/group_email like `wzml-x@googlegroups.com` - `INDEX_URL`: Refer to . `Str` + - `USE_SERVICE_ACCOUNTS`: Whether to use Service Accounts or not, with google-api-python-client. For this to work see [Using Service Accounts](https://github.com/weebzone/WZML-X#generate-service-accounts-what-is-service-account) section below. Default is `False`. `Bool` + - `IS_TEAM_DRIVE`: Set `True` if uploading to TeamDrive using google-api-python-client. Default is `False`. `Bool` - `STOP_DUPLICATE`: Bot will check file/folder name in Drive incase uploading to `GDRIVE_ID`. If it's present in Drive then downloading or cloning will be stopped. (**NOTE**: Item will be checked using name and not hash, so this feature is not perfect yet). Default is `False`. `Bool` - `DISABLE_DRIVE_LINK`: Disable drive link button. Default is `False`. `Bool` - `GD_INFO`: Description of file/folder uploaded to Google Drive. - +
  • +
  • + API's/Cookies + + - `REAL_DEBRID_API`: Api of `real-debrid.com`. Support the Premium `Bypass Download limit` and support the `Cached Magnets`. + - `DEBRID_LINK_API` : Api of `debrid-link.com`. Support the Premium `Bypass Download limit`. + - `FILELION_API`: Api of `filelions.com` to download the link from the filelion site + - `GDTOT_CRYPT`: Use Gdtot crpyt to bypass the GDTOT links. + - `UPTOBOX_TOKEN`: Uptobox token to mirror uptobox links. Get it from [Uptobox Premium Account](https://uptobox.com/my_account). `str` +
  • +
  • RClone @@ -496,13 +517,15 @@ help - All cmds with description
  • - Log Channel + Log Channel/superGroup(Support topics) - - `LEECH_LOG_ID`: Chat ID to where leeched files would be uploaded. `Int`. **NOTE**: Only available for superGroup/channel. Add `-100` before channel/superGroup id. In short don't add bot id or your id! - - `MIRROR_LOG_ID`: Chat ID to where Mirror files would be Send. `Int`. **NOTE**: Only available for superGroup/channel. Add `-100` before channel/superGroup id. In short don't add bot id or your id!. + - `LEECH_LOG_ID`: Chat ID to where leeched files would be uploaded. `Int`. **NOTE**: Only available for superGroup/channel. Add `-100` before channel/superGroup id. In short don't add bot id or your id!. For topic support make this format `chat_id:topic_id` + - `MIRROR_LOG_ID`: Chat ID to where Mirror files would be Send. `Int`. **NOTE**: Only available for superGroup/channel. Add `-100` before channel/superGroup id. In short don't add bot id or your id!. For topic support make this format `chat_id:topic_id` - `LINKS_LOG_ID`: Chat ID to where Link logs would be Send. `Int`. **NOTE**: Only available for superGroup/channel. Add `-100` before channel/superGroup id. In short don't add bot id or your id!. - - **Note**: LEECH_LOG_ID & MIRROR_LOG_ID it's multiple, For multiple id Separate them by space. + - **Note**: LEECH_LOG_ID & MIRROR_LOG_ID it's multiple, For multiple id Separate them by space. + - **Warning**: leech log is multiple only in case of channel. i.e it doesn't support multiple Topics (for now)
  • +
  • qBittorrent/Aria2c @@ -543,6 +566,7 @@ help - All cmds with description - `DAILY_MIRROR_LIMIT`: Total size upto which user can Mirror in one day. the default unit is `GB`. `Int` - `DAILY_LEECH_LIMIT`: Total size upto which user can Leech in one day. the default unit is `GB`. `Int` - `USER_MAX_TASKS`: Limit the Maximum task for users of group at a time. `Int` + - `BOT_MAX_TASKS`: Limit the Maximum task for bots of group at a time. `Int` - `TORRENT_LIMIT`: To limit the size of torrent download. the default unit is `GB`. `Int` - `DIRECT_LIMIT`: To limit the size of direct link download. the default unit is `GB`. `Int` - `GDRIVE_LIMIT`: To limit the size of Google Drive folder/file link for leech, Zip, Unzip. the default unit is `GB`. `Int` @@ -552,12 +576,15 @@ help - All cmds with description - `LEECH_LIMIT`: To limit the Torrent/Direct/ytdlp leech size. the default unit is `GB`. `Int` - `MEGA_LIMIT`: To limit the size of Mega download. the default unit is `GB`. `Int` - `STORAGE_THRESHOLD`: To leave specific storage free and any download will lead to leave free storage less than this value will be cancelled the default unit is `GB`. `Int` + - `USER_TIME_INTERVAL`: Time Gap between two consecutive link/file mirror/leech operations. The default value is set to `0` seconds. unit is `sec`. `int`
  • +
  • Templates - `ANIME_TEMPLATE`: Set template for anime... - `IMDB_TEMPLATE`: Set your imdb template... + - `MDL_TEMPLATE`: Set your MyDramaList template...
  • @@ -566,24 +593,36 @@ help - All cmds with description - `TITLE_NAME`: Title `name` for Telegraph pages (while using /list command) - `AUTHOR_NAME`: Author `name` for Telegraph pages - `AUTHOR_URL`: Author `URL` for Telegraph page + - `COVER_IMAGE`: telegraph header image. use `graph.org` link of image
  • -
  • Extra - - `SAFE_MODE`: Remove all file names. Not from BOT_PM & LINK_LOGS. Default is `False`. `Bool` + - `SAFE_MODE`: Include + - Remove filename from the auth group + - Remove Leech file index link from auth group (leech index directly send to BOT_PM) + - Remove Mirror link from auth group (Link directly send to BOT_PM) + - **NOTE**: `BOT_PM` Should be `True` to get the Links in PM while `SAFE_MODE` is `True` - `DELETE_LINKS`: Delete links after used. Default is `False`. `Bool` - - `CLEAN_LOG_MSG`: Clean log msg. Default is `False`. `Bool` + - `CLEAN_LOG_MSG`: Clean log messages i.e remove the LEECH started msg from the leech log. Default is `False`. `Bool` - `SHOW_EXTRA_CMDS`: Add old cmds like zipleech... Default is `False`. `Bool` - - `SOURCE_LINK`: Source button of files and links. Default is `False`. `Bool` + - `TIMEZONE`: Set timezone for the bot. Checkout the [list](https://gist.github.com/heyalexej/8bf688fd67d7199be4a1682b3eec7568) to get the proper timezone. Default is `Asia/Kolkata`. - `IMAGES`: Add multiple telgraph(graph.org) image links that are seperated by spaces. - `IMG_SEARCH`: Put Keyword to Download Images. Sperarte each name by , like `anime`, `iron man`, `god of war` - `IMG_PAGE`: Set the page value for downloading a image. Each page have approx 70 images. Deafult is `1`. `Int` - `BOT_THEME`: Change the theme of bot. For now theme availabe is `minimal`. - You can make your own theme checkout this link https://t.ly/9rVXq -
  • + - `EXCEP_CHATS`: `CHAT_ID` of a `SuperGroup` where you want to disable the logging feature for that specific group. Logging features will remain active in other authorized chats that have been filled. Add `-100` before superGroup id. In short don't add bot id or your id!. + +
  • + M/L Buttons + + - `SHOW_MEDIAINFO`: Mediainfo button of file. Default is `False`. `Bool` + - `SAVE_MSG`: Save Button in each file and link so that every user direcly save it without forwarding. Default is `False`. `Bool` + - `SOURCE_LINK`: Source button of files and links. Default is `False`. `Bool` +
  • Token system @@ -940,13 +979,14 @@ Where host is the name of extractor (eg. instagram, Twitch). Multiple accounts o ----- -### πŸ… ***Bot Authors*** +## πŸ… **Bot Authors**
    Click Here For Description -|![](https://avatars.githubusercontent.com/u/105407900)|![](https://avatars.githubusercontent.com/u/113664541)|![](https://avatars.githubusercontent.com/u/84721324)|![](https://avatars.githubusercontent.com/u/77075674)| -|:---:|:---:|:---:|:---:| -|[`SilentDemonSD`](https://github.com/SilentDemonSD)|[`CodeWithWeeb`](https://github.com/weebzone)|[`Maverick`](https://github.com/MajnuRangeela)|[`Anasty17`](https://github.com/anasty17)| -|Author and DDL, UI Design, More Customs..|Author and Wraps Up Features|Co-Author & Bug Tester|Base Repo: MLTB| +|||| +|:---:|:---:|:---:| +|[`SilentDemonSD`](https://github.com/SilentDemonSD)|[`CodeWithWeeb`](https://github.com/weebzone)|[`Maverick`](https://github.com/MajnuRangeela)| +|Author and DDL, UI Design, More Customs..|Author and Wraps Up Features|Co-Author & Bug Tester|
    + diff --git a/bot/__init__.py b/bot/__init__.py index 1bb6edabae..076fd005c2 100644 --- a/bot/__init__.py +++ b/bot/__init__.py @@ -165,10 +165,14 @@ def changetz(*args): DOWNLOAD_DIR = f'{DOWNLOAD_DIR}/' AUTHORIZED_CHATS = environ.get('AUTHORIZED_CHATS', '') -if len(AUTHORIZED_CHATS) != 0: +if AUTHORIZED_CHATS: aid = AUTHORIZED_CHATS.split() for id_ in aid: - user_data[int(id_.strip())] = {'is_auth': True} + chat_id, *topic_ids = id_.split(':') + chat_id = int(chat_id) + user_data.setdefault(chat_id, {'is_auth': True}) + if topic_ids: + user_data[chat_id].setdefault('topic_ids', []).extend(map(int, topic_ids)) SUDO_USERS = environ.get('SUDO_USERS', '') if len(SUDO_USERS) != 0: @@ -195,7 +199,7 @@ def changetz(*args): log_info("Creating client from USER_SESSION_STRING") try: user = tgClient('user', TELEGRAM_API, TELEGRAM_HASH, session_string=USER_SESSION_STRING, - parse_mode=enums.ParseMode.HTML).start() + parse_mode=enums.ParseMode.HTML, no_updates=True).start() IS_PREMIUM_USER = user.me.is_premium except Exception as e: log_error(f"Failed making client from USER_SESSION_STRING : {e}") @@ -216,9 +220,13 @@ def changetz(*args): if len(GDTOT_CRYPT) == 0: GDTOT_CRYPT = '' -DEBRID_API_KEY = environ.get('DEBRID_API_KEY', '') -if len(DEBRID_API_KEY) == 0: - DEBRID_API_KEY = '' +REAL_DEBRID_API = environ.get('REAL_DEBRID_API', '') +if len(REAL_DEBRID_API) == 0: + REAL_DEBRID_API = '' + +DEBRID_LINK_API = environ.get('DEBRID_LINK_API', '') +if len(DEBRID_LINK_API) == 0: + DEBRID_LINK_API = '' INDEX_URL = environ.get('INDEX_URL', '').rstrip("/") if len(INDEX_URL) == 0: @@ -299,8 +307,10 @@ def changetz(*args): CMD_SUFFIX = environ.get('CMD_SUFFIX', '') -RSS_CHAT_ID = environ.get('RSS_CHAT_ID', '') -RSS_CHAT_ID = '' if len(RSS_CHAT_ID) == 0 else int(RSS_CHAT_ID) +RSS_CHAT = environ.get('RSS_CHAT', '') +RSS_CHAT = '' if len(RSS_CHAT) == 0 else RSS_CHAT +if RSS_CHAT.isdigit() or RSS_CHAT.startswith('-'): + RSS_CHAT = int(RSS_CHAT) RSS_DELAY = environ.get('RSS_DELAY', '') RSS_DELAY = 600 if len(RSS_DELAY) == 0 else int(RSS_DELAY) @@ -371,6 +381,9 @@ def changetz(*args): UPSTREAM_BRANCH = environ.get('UPSTREAM_BRANCH', '') if len(UPSTREAM_BRANCH) == 0: UPSTREAM_BRANCH = 'master' + +UPGRADE_PACKAGES = environ.get('UPGRADE_PACKAGES', '') +UPGRADE_PACKAGES = UPGRADE_PACKAGES.lower() == 'true' RCLONE_SERVE_URL = environ.get('RCLONE_SERVE_URL', '') if len(RCLONE_SERVE_URL) == 0: @@ -516,6 +529,10 @@ def changetz(*args): if len(LOGIN_PASS) == 0: LOGIN_PASS = None +FILELION_API = environ.get('FILELION_API', '') +if len(FILELION_API) == 0: + FILELION_API = '' + IMDB_TEMPLATE = environ.get('IMDB_TEMPLATE', '') if len(IMDB_TEMPLATE) == 0: IMDB_TEMPLATE = '''Title: {title} [{year}] @@ -576,7 +593,9 @@ def changetz(*args): 'CAP_FONT': CAP_FONT, 'CMD_SUFFIX': CMD_SUFFIX, 'DATABASE_URL': DATABASE_URL, - 'DEBRID_API_KEY': DEBRID_API_KEY, + 'REAL_DEBRID_API': REAL_DEBRID_API, + 'DEBRID_LINK_API': DEBRID_LINK_API, + 'FILELION_API': FILELION_API, 'DELETE_LINKS': DELETE_LINKS, 'DEFAULT_UPLOAD': DEFAULT_UPLOAD, 'DOWNLOAD_DIR': DOWNLOAD_DIR, @@ -643,7 +662,7 @@ def changetz(*args): 'RCLONE_SERVE_USER': RCLONE_SERVE_USER, 'RCLONE_SERVE_PASS': RCLONE_SERVE_PASS, 'RCLONE_SERVE_PORT': RCLONE_SERVE_PORT, - 'RSS_CHAT_ID': RSS_CHAT_ID, + 'RSS_CHAT': RSS_CHAT, 'RSS_DELAY': RSS_DELAY, 'SAVE_MSG': SAVE_MSG, 'SAFE_MODE': SAFE_MODE, @@ -664,6 +683,7 @@ def changetz(*args): 'TORRENT_TIMEOUT': TORRENT_TIMEOUT, 'UPSTREAM_REPO': UPSTREAM_REPO, 'UPSTREAM_BRANCH': UPSTREAM_BRANCH, + 'UPGRADE_PACKAGES': UPGRADE_PACKAGES, 'UPTOBOX_TOKEN': UPTOBOX_TOKEN, 'USER_SESSION_STRING': USER_SESSION_STRING, 'USER_TD_MODE':USER_TD_MODE, @@ -787,6 +807,4 @@ def aria2c_init(): parse_mode=enums.ParseMode.HTML).start() bot_loop = bot.loop bot_name = bot.me.username -scheduler = AsyncIOScheduler(timezone=str( - get_localzone()), event_loop=bot_loop) - +scheduler = AsyncIOScheduler(timezone=str(get_localzone()), event_loop=bot_loop) diff --git a/bot/__main__.py b/bot/__main__.py index 6e40f4375c..296e13828a 100644 --- a/bot/__main__.py +++ b/bot/__main__.py @@ -2,9 +2,10 @@ from datetime import datetime from sys import executable from os import execl as osexecl -from asyncio import create_subprocess_exec, gather +from asyncio import create_subprocess_exec, gather, run as asyrun from uuid import uuid4 from base64 import b64decode +from importlib import import_module, reload from requests import get as rget from pytz import timezone @@ -12,11 +13,12 @@ from signal import signal, SIGINT from aiofiles.os import path as aiopath, remove as aioremove from aiofiles import open as aiopen +from pyrogram import idle from pyrogram.handlers import MessageHandler, CallbackQueryHandler from pyrogram.filters import command, private, regex from pyrogram.types import InlineKeyboardMarkup, InlineKeyboardButton -from bot import bot, bot_name, config_dict, user_data, botStartTime, LOGGER, Interval, DATABASE_URL, QbInterval, INCOMPLETE_TASK_NOTIFIER, scheduler +from bot import bot, user, bot_name, config_dict, user_data, botStartTime, LOGGER, Interval, DATABASE_URL, QbInterval, INCOMPLETE_TASK_NOTIFIER, scheduler from bot.version import get_version from .helper.ext_utils.fs_utils import start_cleanup, clean_all, exit_clean_up from .helper.ext_utils.bot_utils import get_readable_time, cmd_exec, sync_to_async, new_task, set_commands, update_user_ldata, get_stats @@ -48,17 +50,15 @@ async def start(client, message): encrypted_url = message.command[1] input_token, pre_uid = (b64decode(encrypted_url.encode()).decode()).split('&&') if int(pre_uid) != userid: - return await sendMessage(message, 'Temporary Token is not yours!\n\nKindly generate your own.') + return await sendMessage(message, BotTheme('OWN_TOKEN_GENERATE')) data = user_data.get(userid, {}) if 'token' not in data or data['token'] != input_token: - return await sendMessage(message, 'Temporary Token already used!\n\nKindly generate a new one.') + return await sendMessage(message, BotTheme('USED_TOKEN')) elif config_dict['LOGIN_PASS'] is not None and data['token'] == config_dict['LOGIN_PASS']: - return await sendMessage(message, 'Bot Already Logged In via Password\n\nNo Need to Accept Temp Tokens.') - buttons.ibutton('Activate Temporary Token', f'pass {input_token}', 'header') + return await sendMessage(message, BotTheme('LOGGED_PASSWORD')) + buttons.ibutton(BotTheme('ACTIVATE_BUTTON'), f'pass {input_token}', 'header') reply_markup = buttons.build_menu(2) - msg = 'Generated Temporary Login Token!\n\n' - msg += f'Temp Token: {input_token}\n\n' - msg += f'Validity: {get_readable_time(int(config_dict["TOKEN_TIMEOUT"]))}' + msg = BotTheme('TOKEN_MSG', token=input_token, validity=get_readable_time(int(config_dict["TOKEN_TIMEOUT"]))) return await sendMessage(message, msg, reply_markup) elif await CustomFilters.authorized(client, message): start_string = BotTheme('ST_MSG', help_command=f"/{BotCommands.HelpCommand}") @@ -80,7 +80,7 @@ async def token_callback(_, query): update_user_ldata(user_id, 'time', time()) await query.answer('Activated Temporary Token!', show_alert=True) kb = query.message.reply_markup.inline_keyboard[1:] - kb.insert(0, [InlineKeyboardButton('βœ…οΈ Activated βœ…', callback_data='pass activated')]) + kb.insert(0, [InlineKeyboardButton(BotTheme('ACTIVATED'), callback_data='pass activated')]) await editReplyMarkup(query.message, InlineKeyboardMarkup(kb)) @@ -91,14 +91,13 @@ async def login(_, message): user_id = message.from_user.id input_pass = message.command[1] if user_data.get(user_id, {}).get('token', '') == config_dict['LOGIN_PASS']: - return await sendMessage(message, 'Already Bot Login In!') - if input_pass == config_dict['LOGIN_PASS']: - update_user_ldata(user_id, 'token', config_dict['LOGIN_PASS']) - return await sendMessage(message, 'Bot Permanent Login Successfully!') - else: - return await sendMessage(message, 'Invalid Password!\n\nKindly put the correct Password .') + return await sendMessage(message, BotTheme('LOGGED_IN')) + if input_pass != config_dict['LOGIN_PASS']: + return await sendMessage(message, BotTheme('INVALID_PASS')) + update_user_ldata(user_id, 'token', config_dict['LOGIN_PASS']) + return await sendMessage(message, BotTheme('PASS_LOGGED')) else: - await sendMessage(message, 'Bot Login Usage :\n\n/cmd {password}') + await sendMessage(message, BotTheme('LOGIN_USED')) async def restart(client, message): @@ -127,46 +126,47 @@ async def ping(_, message): async def log(_, message): buttons = ButtonMaker() - buttons.ibutton('πŸ“‘ Log Display', f'wzmlx {message.from_user.id} logdisplay') - buttons.ibutton('πŸ“¨ Web Paste', f'wzmlx {message.from_user.id} webpaste') + buttons.ibutton(BotTheme('LOG_DISPLAY_BT'), f'wzmlx {message.from_user.id} logdisplay') + buttons.ibutton(BotTheme('WEB_PASTE_BT'), f'wzmlx {message.from_user.id} webpaste') await sendFile(message, 'log.txt', buttons=buttons.build_menu(1)) async def search_images(): - if query_list := config_dict['IMG_SEARCH']: - try: - total_pages = config_dict['IMG_PAGE'] - base_url = "https://www.wallpaperflare.com/search" - for query in query_list: - query = query.strip().replace(" ", "+") - for page in range(1, total_pages + 1): - url = f"{base_url}?wallpaper={query}&width=1280&height=720&page={page}" - r = rget(url) - soup = BeautifulSoup(r.text, "html.parser") - images = soup.select('img[data-src^="https://c4.wallpaperflare.com/wallpaper"]') - if len(images) == 0: - LOGGER.info("Maybe Site is Blocked on your Server, Add Images Manually !!") - for img in images: - img_url = img['data-src'] - if img_url not in config_dict['IMAGES']: - config_dict['IMAGES'].append(img_url) - if len(config_dict['IMAGES']) != 0: - config_dict['STATUS_LIMIT'] = 2 - if DATABASE_URL: - await DbManger().update_config({'IMAGES': config_dict['IMAGES'], 'STATUS_LIMIT': config_dict['STATUS_LIMIT']}) - except Exception as e: - LOGGER.error(f"An error occurred: {e}") + if not (query_list := config_dict['IMG_SEARCH']): + return + try: + total_pages = config_dict['IMG_PAGE'] + base_url = "https://www.wallpaperflare.com/search" + for query in query_list: + query = query.strip().replace(" ", "+") + for page in range(1, total_pages + 1): + url = f"{base_url}?wallpaper={query}&width=1280&height=720&page={page}" + r = rget(url) + soup = BeautifulSoup(r.text, "html.parser") + images = soup.select('img[data-src^="https://c4.wallpaperflare.com/wallpaper"]') + if len(images) == 0: + LOGGER.info("Maybe Site is Blocked on your Server, Add Images Manually !!") + for img in images: + img_url = img['data-src'] + if img_url not in config_dict['IMAGES']: + config_dict['IMAGES'].append(img_url) + if len(config_dict['IMAGES']) != 0: + config_dict['STATUS_LIMIT'] = 2 + if DATABASE_URL: + await DbManger().update_config({'IMAGES': config_dict['IMAGES'], 'STATUS_LIMIT': config_dict['STATUS_LIMIT']}) + except Exception as e: + LOGGER.error(f"An error occurred: {e}") async def bot_help(client, message): buttons = ButtonMaker() user_id = message.from_user.id - buttons.ibutton('Basic', f'wzmlx {user_id} guide basic') - buttons.ibutton('Users', f'wzmlx {user_id} guide users') - buttons.ibutton('Mics', f'wzmlx {user_id} guide miscs') - buttons.ibutton('Owner & Sudos', f'wzmlx {user_id} guide admin') - buttons.ibutton('Close', f'wzmlx {user_id} close') - await sendMessage(message, "γŠ‚ Help Guide Menu!\n\nNOTE: Click on any CMD to see more minor detalis.", buttons.build_menu(2)) + buttons.ibutton(BotTheme('BASIC_BT'), f'wzmlx {user_id} guide basic') + buttons.ibutton(BotTheme('USER_BT'), f'wzmlx {user_id} guide users') + buttons.ibutton(BotTheme('MICS_BT'), f'wzmlx {user_id} guide miscs') + buttons.ibutton(BotTheme('O_S_BT'), f'wzmlx {user_id} guide admin') + buttons.ibutton(BotTheme('CLOSE_BT'), f'wzmlx {user_id} close') + await sendMessage(message, BotTheme('HELP_HEADER'), buttons.build_menu(2)) async def restart_notification(): @@ -180,7 +180,7 @@ async def restart_notification(): async def send_incompelete_task_message(cid, msg): try: if msg.startswith("⌬ Restarted Successfully!"): - await bot.edit_message_text(chat_id=chat_id, message_id=msg_id, text=msg) + await bot.edit_message_text(chat_id=chat_id, message_id=msg_id, text=msg, disable_web_page_preview=True) await aioremove(".restartmsg") else: await bot.send_message(chat_id=cid, text=msg, disable_web_page_preview=True, disable_notification=True) @@ -193,9 +193,10 @@ async def send_incompelete_task_message(cid, msg): msg = BotTheme('RESTART_SUCCESS', time=now.strftime('%I:%M:%S %p'), date=now.strftime('%d/%m/%y'), timz=config_dict['TIMEZONE'], version=get_version()) if cid == chat_id else BotTheme('RESTARTED') msg += "\n\n⌬ Incomplete Tasks!" for tag, links in data.items(): - msg += f"\n➲ {tag}: " + msg += f"\n➲ User: {tag}\nβ”– Tasks:" for index, link in enumerate(links, start=1): - msg += f" {index} |" + msg_link, source = next(iter(link.items())) + msg += f" {index}. S -> L |" if len(msg.encode()) > 4000: await send_incompelete_task_message(cid, msg) msg = '' @@ -231,7 +232,18 @@ async def main(): bot.add_handler(MessageHandler(stats, filters=command( BotCommands.StatsCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) LOGGER.info(f"WZML-X Bot [@{bot_name}] Started!") + if user: + LOGGER.info(f"WZ's User [@{user.me.first_name}] Ready!") signal(SIGINT, exit_clean_up) -bot.loop.run_until_complete(main()) -bot.loop.run_forever() +async def stop_signals(): + if user: + await gather(bot.stop(), user.stop()) + else: + await bot.stop() + + +bot_run = bot.loop.run_until_complete +bot_run(main()) +bot_run(idle()) +bot_run(stop_signals()) diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py index e23c4954c8..c55ded9f42 100644 --- a/bot/helper/ext_utils/bot_utils.py +++ b/bot/helper/ext_utils/bot_utils.py @@ -3,7 +3,7 @@ from base64 import b64encode from datetime import datetime from os import path as ospath -from pkg_resources import get_distribution +from pkg_resources import get_distribution, DistributionNotFound from aiofiles import open as aiopen from aiofiles.os import remove as aioremove, path as aiopath, mkdir from re import match as re_match @@ -55,7 +55,6 @@ class MirrorStatus: STATUS_SPLITTING = "Split" STATUS_CHECKING = "CheckUp" STATUS_SEEDING = "Seed" - STATUS_UPLOADDDL = "Upload DDL" class setInterval: @@ -125,8 +124,8 @@ async def get_telegraph_list(telegraph_content): if len(path) > 1: await telegraph.edit_telegraph(path, telegraph_content) buttons = ButtonMaker() - buttons.ubutton("πŸ”Ž VIEW", f"https://telegra.ph/{path[0]}") - buttons = extra_btns(buttons) + buttons.ubutton("πŸ”Ž VIEW", f"https://te.legra.ph/{path[0]}") + buttons, _ = extra_btns(buttons) return buttons.build_menu(1) def handleIndex(index, dic): @@ -165,13 +164,20 @@ def get_all_versions(): vr = result.stdout.split('\n')[0].split(' ')[1] except FileNotFoundError: vr = '' + try: + vpy = get_distribution('pyrogram').version + except DistributionNotFound: + try: + vpy = get_distribution('pyrofork').version + except DistributionNotFound: + vpy = "2.xx.xx" bot_cache['eng_versions'] = {'p7zip':vp, 'ffmpeg': vf, 'rclone': vr, 'aria': aria2.client.get_version()['version'], 'aiohttp': get_distribution('aiohttp').version, 'gapi': get_distribution('google-api-python-client').version, 'mega': MegaApi('test').getVersion(), 'qbit': get_client().app.version, - 'pyro': get_distribution('pyrogram').version, + 'pyro': vpy, 'ytdlp': get_distribution('yt-dlp').version} @@ -185,7 +191,7 @@ def __init__(self): self.STATUS_GD = f"Google-API v{version_cache['gapi']}" self.STATUS_MEGA = f"MegaSDK v{version_cache['mega']}" self.STATUS_QB = f"qBit {version_cache['qbit']}" - self.STATUS_TG = f"Pyrogram v{version_cache['pyro']}" + self.STATUS_TG = f"PyroMulti v{version_cache['pyro']}" self.STATUS_YT = f"yt-dlp v{version_cache['ytdlp']}" self.STATUS_EXT = "pExtract v2" self.STATUS_SPLIT_MERGE = f"ffmpeg v{version_cache['ffmpeg']}" @@ -206,16 +212,15 @@ def get_readable_message(): for download in list(download_dict.values())[STATUS_START:STATUS_LIMIT+STATUS_START]: msg_link = download.message.link if download.message.chat.type in [ ChatType.SUPERGROUP, ChatType.CHANNEL] and not config_dict['DELETE_LINKS'] else '' - msg += BotTheme('STATUS_NAME', Name="Task is being Processed!" if config_dict['SAFE_MODE'] else escape(f'{download.name()}')) + elapsed = time() - download.message.date.timestamp() + msg += BotTheme('STATUS_NAME', Name="Task is being Processed!" if config_dict['SAFE_MODE'] and elapsed >= config_dict['STATUS_UPDATE_INTERVAL'] else escape(f'{download.name()}')) if download.status() not in [MirrorStatus.STATUS_SPLITTING, MirrorStatus.STATUS_SEEDING]: - if download.status() != MirrorStatus.STATUS_UPLOADDDL: - msg += BotTheme('BAR', Bar=f"{get_progress_bar_string(download.progress())} {download.progress()}") - msg += BotTheme('PROCESSED', Processed=f"{download.processed_bytes()} of {download.size()}") + msg += BotTheme('BAR', Bar=f"{get_progress_bar_string(download.progress())} {download.progress()}") + msg += BotTheme('PROCESSED', Processed=f"{download.processed_bytes()} of {download.size()}") msg += BotTheme('STATUS', Status=download.status(), Url=msg_link) - if download.status() != MirrorStatus.STATUS_UPLOADDDL: - msg += BotTheme('ETA', Eta=download.eta()) - msg += BotTheme('SPEED', Speed=download.speed()) - msg += BotTheme('ELAPSED', Elapsed=get_readable_time(time() - download.message.date.timestamp())) + msg += BotTheme('ETA', Eta=download.eta()) + msg += BotTheme('SPEED', Speed=download.speed()) + msg += BotTheme('ELAPSED', Elapsed=get_readable_time(elapsed)) msg += BotTheme('ENGINE', Engine=download.eng()) msg += BotTheme('STA_MODE', Mode=download.upload_details['mode']) if hasattr(download, 'seeders_num'): @@ -254,6 +259,10 @@ def convert_speed_to_bytes_per_second(spd): return float(spd.split('K')[0]) * 1024 elif 'M' in spd: return float(spd.split('M')[0]) * 1048576 + elif 'G' in spd: + return float(spd.split('G')[0]) * 1073741824 + elif 'T' in spd: + return float(spd.split('T')[0]) * 1099511627776 else: return 0 @@ -340,6 +349,10 @@ def is_share_link(url): return bool(re_match(r'https?:\/\/.+\.gdtot\.\S+|https?:\/\/(filepress|filebee|appdrive|gdflix)\.\S+', url)) +def is_index_link(url): + return bool(re_match(r'https?:\/\/.+\/\d+\:\/', url)) + + def is_mega_link(url): return "mega.nz" in url or "mega.co.nz" in url @@ -471,8 +484,8 @@ def wrapper(*args, wait=False, **kwargs): async def compare_versions(v1, v2): - v1_parts = [int(part) for part in v1[1:-2].split('.')] - v2_parts = [int(part) for part in v2[1:-2].split('.')] + v1_parts = [int(part) for part in v1.split('-')[0][1:].split('.')] + v2_parts = [int(part) for part in v2.split('-')[0][1:].split('.')] for i in range(3): v1_part, v2_part = v1_parts[i], v2_parts[i] if v1_part < v2_part: @@ -497,6 +510,7 @@ async def get_stats(event, key="home"): total, used, free, disk = disk_usage('/') swap = swap_memory() memory = virtual_memory() + disk_io = disk_io_counters() msg = BotTheme('BOT_STATS', bot_uptime=get_readable_time(time() - botStartTime), ram_bar=get_progress_bar_string(memory.percent), @@ -511,8 +525,8 @@ async def get_stats(event, key="home"): swap_t=get_readable_file_size(swap.total), disk=disk, disk_bar=get_progress_bar_string(disk), - disk_read=get_readable_file_size(disk_io_counters().read_bytes) + f" ({get_readable_time(disk_io_counters().read_time / 1000)})", - disk_write=get_readable_file_size(disk_io_counters().write_bytes) + f" ({get_readable_time(disk_io_counters().write_time / 1000)})", + disk_read=get_readable_file_size(disk_io.read_bytes) + f" ({get_readable_time(disk_io.read_time / 1000)})" if disk_io else "Access Denied", + disk_write=get_readable_file_size(disk_io.write_bytes) + f" ({get_readable_time(disk_io.write_time / 1000)})" if disk_io else "Access Denied", disk_t=get_readable_file_size(total), disk_u=get_readable_file_size(used), disk_f=get_readable_file_size(free), @@ -542,7 +556,7 @@ async def get_stats(event, key="home"): if await aiopath.exists('.git'): last_commit = (await cmd_exec("git log -1 --pretty='%cd ( %cr )' --date=format-local:'%d/%m/%Y'", True))[0] changelog = (await cmd_exec("git log -1 --pretty=format:'%s By %an'", True))[0] - official_v = (await cmd_exec("curl -o latestversion.py https://raw.githubusercontent.com/weebzone/WZML-X/master/bot/version.py -s && python3 latestversion.py && rm latestversion.py", True))[0] + official_v = (await cmd_exec(f"curl -o latestversion.py https://raw.githubusercontent.com/weebzone/WZML-X/{config_dict['UPSTREAM_BRANCH']}/bot/version.py -s && python3 latestversion.py && rm latestversion.py", True))[0] msg = BotTheme('REPO_STATS', last_commit=last_commit, bot_version=get_version(), @@ -647,11 +661,11 @@ async def checking_access(user_id, button=None): return None, button -def extra_btns(buttons): - if extra_buttons: +def extra_btns(buttons, already=False): + if extra_buttons and not already: for btn_name, btn_url in extra_buttons.items(): - buttons.ubutton(btn_name, btn_url) - return buttons + buttons.ubutton(btn_name, btn_url, 'l_body') + return buttons, True async def set_commands(client): @@ -699,9 +713,3 @@ async def set_commands(client): LOGGER.info('Bot Commands have been Set & Updated') except Exception as err: LOGGER.error(err) - - -def is_valid_token(url, token): - resp = rget(url=f"{url}getAccountDetails?token={token}&allDetails=true").json() - if resp["status"] == "error-wrongToken": - raise Exception("Invalid Gofile Token, Get your Gofile token from --> https://gofile.io/myProfile") diff --git a/bot/helper/ext_utils/db_handler.py b/bot/helper/ext_utils/db_handler.py index 5dc567c173..cb0c344f18 100644 --- a/bot/helper/ext_utils/db_handler.py +++ b/bot/helper/ext_utils/db_handler.py @@ -169,10 +169,10 @@ async def rss_delete(self, user_id): await self.__db.rss[bot_id].delete_one({'_id': user_id}) self.__conn.close - async def add_incomplete_task(self, cid, link, tag): + async def add_incomplete_task(self, cid, link, tag, msg_link): if self.__err: return - await self.__db.tasks[bot_id].insert_one({'_id': link, 'cid': cid, 'tag': tag}) + await self.__db.tasks[bot_id].insert_one({'_id': link, 'cid': cid, 'tag': tag, 'source': msg_link}) self.__conn.close async def rm_complete_task(self, link): @@ -186,20 +186,19 @@ async def get_incomplete_tasks(self): if self.__err: return notifier_dict if await self.__db.tasks[bot_id].find_one(): - # return a dict ==> {_id, cid, tag} + # return a dict ==> {_id, cid, tag, source} rows = self.__db.tasks[bot_id].find({}) async for row in rows: if row['cid'] in list(notifier_dict.keys()): if row['tag'] in list(notifier_dict[row['cid']]): - notifier_dict[row['cid']][row['tag']].append( - row['_id']) + notifier_dict[row['cid']][row['tag']].append({row['_id']: row['source']}) else: - notifier_dict[row['cid']][row['tag']] = [row['_id']] + notifier_dict[row['cid']][row['tag']] = [{row['_id']: row['source']}] else: - notifier_dict[row['cid']] = {row['tag']: [row['_id']]} + notifier_dict[row['cid']] = {row['tag']: [{row['_id']: row['source']}]} await self.__db.tasks[bot_id].drop() self.__conn.close - return notifier_dict # return a dict ==> {cid: {tag: [_id, _id, ...]}} + return notifier_dict # return a dict ==> {cid: {tag: [{_id: source}, {_id, source}, ...]}} async def trunc_table(self, name): if self.__err: @@ -207,6 +206,5 @@ async def trunc_table(self, name): await self.__db[name][bot_id].drop() self.__conn.close - if DATABASE_URL: bot_loop.run_until_complete(DbManger().db_load()) diff --git a/bot/helper/ext_utils/fs_utils.py b/bot/helper/ext_utils/fs_utils.py index 10973a015b..40011e90d0 100644 --- a/bot/helper/ext_utils/fs_utils.py +++ b/bot/helper/ext_utils/fs_utils.py @@ -64,7 +64,7 @@ async def start_cleanup(): await aiormtree(DOWNLOAD_DIR) except: pass - await makedirs(DOWNLOAD_DIR) + await makedirs(DOWNLOAD_DIR, exist_ok=True) def clean_all(): @@ -167,7 +167,10 @@ async def join_files(path): LOGGER.error(f'Failed to join {final_name}, stderr: {stderr}') else: results.append(final_name) + else: + LOGGER.warning('No Binary files to join!') if results: + LOGGER.info('Join Completed!') for res in results: for file_ in files: if re_search(fr"{res}\.0[0-9]+$", file_): diff --git a/bot/helper/ext_utils/help_messages.py b/bot/helper/ext_utils/help_messages.py index dc8a06da21..2632cee10b 100644 --- a/bot/helper/ext_utils/help_messages.py +++ b/bot/helper/ext_utils/help_messages.py @@ -119,6 +119,7 @@ 17. -index: Index url for gdrive_arg 18. -c or -category : Gdrive category to Upload, Specific Name (case insensitive) 19. -ud or -dump : Dump category to Upload, Specific Name (case insensitive) or chat_id or chat_username +20. -ss or -screenshots : Generate Screenshots for Leeched Files, Specify 1, 3, .. after this. """, """ ➲ By along the cmd: /cmd link -n new name @@ -133,6 +134,12 @@ ➲ Direct Link Authorization: -u -p or -user -pass /cmd link -u username -p password +➲ Direct link custom headers: -h or -headers +/cmd link -h key: value key1: value1 + +➲ Screenshot Generation: -ss or -screenshots +/cmd link -ss number ,Screenshots for each Video File + ➲ Extract / Zip: -uz -z or -zip -unzip or -e -extract /cmd link -e password (extract password protected) /cmd link -z password (zip password protected) @@ -396,6 +403,15 @@ β”– /{BotCommands.MyDramaListCommand}: Search in MyDramaList. '''] + +PASSWORD_ERROR_MESSAGE = """ +This link requires a password! +- Insert sign :: after the link and write the password after the sign. +Example: {}::love you +Note: No spaces between the signs :: +For the password, you can use a space! +""" + default_desp = {'AS_DOCUMENT': 'Default type of Telegram file upload. Default is False mean as media.', 'ANIME_TEMPLATE': 'Set template for AniList Template. HTML Tags supported', 'AUTHORIZED_CHATS': 'Fill user_id and chat_id of groups/users you want to authorize. Separate them by space.', @@ -450,6 +466,8 @@ 'LEECH_FILENAME_REMNAME': 'Remove custom word from the leeched file name. Str', 'LOGIN_PASS': 'Permanent pass for user to skip the token system', 'TOKEN_TIMEOUT': 'Token timeout for each group member in sec. Int', + 'DEBRID_LINK_API': 'Set debrid-link.com API for 172 Supported Hosters Leeching Support. Str', + 'REAL_DEBRID_API': 'Set real-debrid.com API for Torrent Cache & Few Supported Hosters (VPN Maybe). Str', 'LEECH_SPLIT_SIZE': 'Size of split in bytes. Default is 2GB. Default is 4GB if your account is premium.', 'MEDIA_GROUP': 'View Uploaded splitted file parts in media group. Default is False.', 'MEGA_EMAIL': 'E-Mail used to sign-in on mega.nz for using premium account. Str', @@ -479,6 +497,7 @@ 'TORRENT_TIMEOUT': 'Timeout of dead torrents downloading with qBittorrent and Aria2c in seconds. Int', 'UPSTREAM_REPO': "Your github repository link, if your repo is private add https://username:{githubtoken}@github.com/{username}/{reponame} format. Get token from Github settings. So you can update your bot from filled repository on each restart.", 'UPSTREAM_BRANCH': 'Upstream branch for update. Default is master.', + 'UPGRADE_PACKAGES': 'Install New Requirements File without thinking of Crash. Bool', 'SAVE_MSG': 'Add button of save message. Bool', 'SET_COMMANDS': 'Set bot command automatically. Bool', 'UPTOBOX_TOKEN': 'Uptobox token to mirror uptobox links. Get it from Uptobox Premium Account.', diff --git a/bot/helper/ext_utils/leech_utils.py b/bot/helper/ext_utils/leech_utils.py index 6c2a60ac74..1a2253dbf4 100644 --- a/bot/helper/ext_utils/leech_utils.py +++ b/bot/helper/ext_utils/leech_utils.py @@ -1,12 +1,17 @@ -import hashlib +from hashlib import md5 +from time import strftime, gmtime from re import sub as re_sub from shlex import split as ssplit +from natsort import natsorted from os import path as ospath -from aiofiles.os import remove as aioremove, path as aiopath, mkdir +from aiofiles.os import remove as aioremove, path as aiopath, mkdir, makedirs, listdir +from aioshutil import rmtree as aiormtree from time import time from re import search as re_search -from asyncio import create_subprocess_exec +from asyncio import create_subprocess_exec, create_task, gather from asyncio.subprocess import PIPE +from telegraph import upload_file +from langcodes import Language from bot import LOGGER, MAX_SPLIT_SIZE, config_dict, user_data from bot.modules.mediainfo import parseinfo @@ -37,23 +42,32 @@ async def is_multi_streams(path): return videos > 1 or audios > 1 -async def get_media_info(path): +async def get_media_info(path, metadata=False): try: result = await cmd_exec(["ffprobe", "-hide_banner", "-loglevel", "error", "-print_format", - "json", "-show_format", path]) + "json", "-show_format", "-show_streams", path]) if res := result[1]: LOGGER.warning(f'Get Media Info: {res}') except Exception as e: LOGGER.error(f'Get Media Info: {e}. Mostly File not found!') return 0, None, None - fields = eval(result[0]).get('format') + ffresult = eval(result[0]) + fields = ffresult.get('format') if fields is None: - LOGGER.error(f"get_media_info: {result}") + LOGGER.error(f"Get Media Info: {result}") return 0, None, None duration = round(float(fields.get('duration', 0))) tags = fields.get('tags', {}) - artist = tags.get('artist') or tags.get('ARTIST') - title = tags.get('title') or tags.get('TITLE') + artist = tags.get('artist') or tags.get('ARTIST') or tags.get("Artist") + title = tags.get('title') or tags.get('TITLE') or tags.get("Title") + if metadata: + lang, qual = "", "" + if (streams := ffresult.get('streams')) and streams[0].get('codec_type') == 'video': + qual = f"{streams[0].get('height')}p" + for stream in streams: + if stream.get('codec_type') == 'audio' and (lc := stream.get('tags', {}).get('language')): + lang += Language.get(lc).display_name() + ", " + return duration, qual, lang[:-2] return duration, artist, title @@ -88,27 +102,48 @@ async def get_document_type(path): return is_video, is_audio, is_image -async def take_ss(video_file, duration): +async def get_audio_thumb(audio_file): des_dir = 'Thumbnails' if not await aiopath.exists(des_dir): await mkdir(des_dir) des_dir = ospath.join(des_dir, f"{time()}.jpg") - if duration is None: - duration = (await get_media_info(video_file))[0] - if duration == 0: - duration = 3 - duration = duration // 2 - cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", str(duration), - "-i", video_file, "-vf", "thumbnail", "-frames:v", "1", des_dir] + cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", + "-i", audio_file, "-an", "-vcodec", "copy", des_dir] status = await create_subprocess_exec(*cmd, stderr=PIPE) if await status.wait() != 0 or not await aiopath.exists(des_dir): err = (await status.stderr.read()).decode().strip() LOGGER.error( - f'Error while extracting thumbnail. Name: {video_file} stderr: {err}') + f'Error while extracting thumbnail from audio. Name: {audio_file} stderr: {err}') return None return des_dir +async def take_ss(video_file, duration=None, total=1, gen_ss=False): + des_dir = ospath.join('Thumbnails', f"{time()}") + await makedirs(des_dir, exist_ok=True) + if duration is None: + duration = (await get_media_info(video_file))[0] + if duration == 0: + duration = 3 + duration = duration - (duration * 2 / 100) + cmd = ["ffmpeg", "-hide_banner", "-loglevel", "error", "-ss", "", + "-i", video_file, "-vf", "thumbnail", "-frames:v", "1", des_dir] + tasks = [] + tstamps = {} + for eq_thumb in range(1, total+1): + cmd[5] = str((duration // total) * eq_thumb) + tstamps[f"wz_thumb_{eq_thumb}.jpg"] = strftime("%H:%M:%S", gmtime(float(cmd[5]))) + cmd[-1] = ospath.join(des_dir, f"wz_thumb_{eq_thumb}.jpg") + tasks.append(create_task(create_subprocess_exec(*cmd, stderr=PIPE))) + status = await gather(*tasks) + for task, eq_thumb in zip(status, range(1, total+1)): + if await task.wait() != 0 or not await aiopath.exists(ospath.join(des_dir, f"wz_thumb_{eq_thumb}.jpg")): + err = (await task.stderr.read()).decode().strip() + LOGGER.error(f'Error while extracting thumbnail no. {eq_thumb} from video. Name: {video_file} stderr: {err}') + return None + return (des_dir, tstamps) if gen_ss else ospath.join(des_dir, "wz_thumb_1.jpg") + + async def split_file(path, size, file_, dirpath, split_size, listener, start_time=0, i=1, inLoop=False, multi_streams=True): if listener.suproc == 'cancelled' or listener.suproc is not None and listener.suproc.returncode == -9: return False @@ -121,7 +156,7 @@ async def split_file(path, size, file_, dirpath, split_size, listener, start_tim leech_split_size = user_dict.get( 'split_size') or config_dict['LEECH_SPLIT_SIZE'] parts = -(-size // leech_split_size) - if (user_dict.get('equal_splits') or config_dict['EQUAL_SPLITS']) and not inLoop: + if (user_dict.get('equal_splits') or config_dict['EQUAL_SPLITS'] and 'equal_splits' not in user_dict) and not inLoop: split_size = ((size + parts - 1) // parts) + 1000 if (await get_document_type(path))[0]: if multi_streams: @@ -199,9 +234,7 @@ async def format_filename(file_, user_id, dirpath=None, isMirror=False): lcaption = config_dict['LEECH_FILENAME_CAPTION'] if (val:=user_dict.get('lcaption', '')) == '' else val prefile_ = file_ - # SD-Style V2 ~ WZML-X - if file_.startswith('www'): #Remove all www.xyz.xyz domains - file_ = ' '.join(file_.split()[1:]) + file_ = re_sub(r'www\S+', '', file_) if remname: if not remname.startswith('|'): @@ -223,7 +256,7 @@ async def format_filename(file_, user_id, dirpath=None, isMirror=False): nfile_ = file_ if prefix: nfile_ = prefix.replace('\s', ' ') + file_ - prefix = re_sub('<.*?>', '', prefix).replace('\s', ' ') + prefix = re_sub(r'<.*?>', '', prefix).replace('\s', ' ') if not file_.startswith(prefix): file_ = f"{prefix}{file_}" @@ -251,10 +284,13 @@ async def format_filename(file_, user_id, dirpath=None, isMirror=False): lcaption = lcaption.replace('\|', '%%').replace('\s', ' ') slit = lcaption.split("|") up_path = ospath.join(dirpath, prefile_) + dur, qual, lang = await get_media_info(up_path, True) cap_mono = slit[0].format( filename = nfile_, size = get_readable_file_size(await aiopath.getsize(up_path)), - duration = get_readable_time((await get_media_info(up_path))[0]), + duration = get_readable_time(dur), + quality = qual, + languages = lang, md5_hash = get_md5_hash(up_path) ) if len(slit) > 1: @@ -268,8 +304,17 @@ async def format_filename(file_, user_id, dirpath=None, isMirror=False): cap_mono = cap_mono.replace(args[0], '') cap_mono = cap_mono.replace('%%', '|') return file_, cap_mono - - + + +async def get_ss(up_path, ss_no): + thumbs_path, tstamps = await take_ss(up_path, total=ss_no, gen_ss=True) + th_html = f"πŸ“Œ

    {ospath.basename(up_path)}


    πŸ“‡ Total Screenshots: {ss_no}

    " + th_html += ''.join(f'
    Screenshot at {tstamps[thumb]}
    ' for thumb in natsorted(await listdir(thumbs_path))) + await aiormtree(thumbs_path) + link_id = (await telegraph.create_page(title="ScreenShots X", content=th_html))["path"] + return f"https://graph.org/{link_id}" + + async def get_mediainfo_link(up_path): stdout, __, _ = await cmd_exec(ssplit(f'mediainfo "{up_path}"')) tc = f"πŸ“Œ

    {ospath.basename(up_path)}



    " @@ -280,7 +325,7 @@ async def get_mediainfo_link(up_path): def get_md5_hash(up_path): - md5_hash = hashlib.md5() + md5_hash = md5() with open(up_path, "rb") as f: for byte_block in iter(lambda: f.read(4096), b""): md5_hash.update(byte_block) diff --git a/bot/helper/ext_utils/telegraph_helper.py b/bot/helper/ext_utils/telegraph_helper.py index 284bc00784..7972ff5abf 100644 --- a/bot/helper/ext_utils/telegraph_helper.py +++ b/bot/helper/ext_utils/telegraph_helper.py @@ -23,7 +23,7 @@ async def create_account(self): author_url=self.author_url ) self.access_token = self.telegraph.get_access_token() - LOGGER.info("Creating Telegraph Account") + LOGGER.info(f"Telegraph Account Generated : {self.short_name}") async def create_page(self, title, content): try: @@ -34,8 +34,7 @@ async def create_page(self, title, content): html_content=content ) except RetryAfterError as st: - LOGGER.warning( - f'Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.') + LOGGER.warning(f'Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.') await sleep(st.retry_after) return await self.create_page(title, content) @@ -49,8 +48,7 @@ async def edit_page(self, path, title, content): html_content=content ) except RetryAfterError as st: - LOGGER.warning( - f'Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.') + LOGGER.warning(f'Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds.') await sleep(st.retry_after) return await self.edit_page(path, title, content) diff --git a/bot/helper/listeners/aria2_listener.py b/bot/helper/listeners/aria2_listener.py index 46492eed32..c47008e5ca 100644 --- a/bot/helper/listeners/aria2_listener.py +++ b/bot/helper/listeners/aria2_listener.py @@ -16,6 +16,8 @@ @new_thread async def __onDownloadStarted(api, gid): download = await sync_to_async(api.get_download, gid) + if download.options.follow_torrent == 'false': + return if download.is_metadata: LOGGER.info(f'onDownloadStarted: {gid} METADATA') await sleep(1) @@ -101,6 +103,8 @@ async def __onDownloadComplete(api, gid): download = await sync_to_async(api.get_download, gid) except: return + if download.options.follow_torrent == 'false': + return if download.followed_by_ids: new_gid = download.followed_by_ids[0] LOGGER.info(f'Gid changed from {gid} to {new_gid}') @@ -133,6 +137,8 @@ async def __onBtDownloadComplete(api, gid): seed_start_time = time() await sleep(1) download = await sync_to_async(api.get_download, gid) + if download.options.follow_torrent == 'false': + return LOGGER.info(f"onBtDownloadComplete: {download.name} - Gid: {gid}") if dl := await getDownloadByGid(gid): listener = dl.listener() @@ -193,6 +199,8 @@ async def __onDownloadError(api, gid): error = "None" try: download = await sync_to_async(api.get_download, gid) + if download.options.follow_torrent == 'false': + return error = download.error_message LOGGER.info(f"Download Error: {error}") except: diff --git a/bot/helper/listeners/direct_listener.py b/bot/helper/listeners/direct_listener.py new file mode 100644 index 0000000000..7e0eddd141 --- /dev/null +++ b/bot/helper/listeners/direct_listener.py @@ -0,0 +1,77 @@ +from time import sleep + +from bot import LOGGER, aria2 +from bot.helper.ext_utils.bot_utils import async_to_sync, sync_to_async + + +class DirectListener: + def __init__(self, foldername, total_size, path, listener, a2c_opt): + self.name = foldername + self.total_size = total_size + self.__path = path + self.__listener = listener + self.__download = None + self.is_downloading = False + self.__a2c_opt = a2c_opt + self.proc_bytes = 0 + self.file_processed_bytes = 0 + self.failed = 0 + + @property + def processed_bytes(self): + if self.__download: + return self.file_processed_bytes + self.__download.completed_length + return self.file_processed_bytes + + @property + def speed(self): + return self.__download.download_speed if self.__download else 0 + + def download(self, contents): + self.is_downloading = True + for content in contents: + if not self.is_downloading: + break + if content['path']: + self.__a2c_opt['dir'] = f"{self.__path}/{content['path']}" + else: + self.__a2c_opt['dir'] = self.__path + filename = content['filename'] + self.__a2c_opt['out'] = filename + try: + self.__download = aria2.add_uris([content['url']], self.__a2c_opt) + except Exception as e: + self.failed += 1 + LOGGER.error(f'Unable to download {filename} due to: {e}') + continue + self.__download = self.__download.live + while True: + if not self.is_downloading: + if self.__download: + self.__download.remove(True, True) + break + self.__download = self.__download.live + if error_message:= self.__download.error_message: + self.failed += 1 + LOGGER.error(f'Unable to download {self.__download.name} due to: {error_message}') + self.__download.remove(True, True) + break + elif self.__download.is_complete: + self.file_processed_bytes += self.__download.completed_length + self.__download.remove(True) + break + sleep(1) + self.__download = None + if not self.is_downloading: + return + if self.failed == len(contents): + async_to_sync(self.__listener.onDownloadError, 'All files are failed to download!') + return + async_to_sync(self.__listener.onDownloadComplete) + + async def cancel_download(self): + self.is_downloading = False + LOGGER.info(f"Cancelling Download: {self.name}") + await self.__listener.onDownloadError("Download Cancelled by User!") + if self.__download: + await sync_to_async(self.__download.remove, force=True, files=True) diff --git a/bot/helper/listeners/tasks_listener.py b/bot/helper/listeners/tasks_listener.py index 9b25e25316..3d5613dfe9 100644 --- a/bot/helper/listeners/tasks_listener.py +++ b/bot/helper/listeners/tasks_listener.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 from random import choice from time import time +from copy import deepcopy from pytz import timezone from datetime import datetime from urllib.parse import unquote, quote @@ -40,7 +41,8 @@ class MirrorLeechListener: - def __init__(self, message, compress=False, extract=False, isQbit=False, isLeech=False, tag=None, select=False, seed=False, sameDir=None, rcFlags=None, upPath=None, isClone=False, join=False, drive_id=None, index_link=None, isYtdlp=False, source_url=None, ): + def __init__(self, message, compress=False, extract=False, isQbit=False, isLeech=False, tag=None, select=False, seed=False, sameDir=None, rcFlags=None, upPath=None, isClone=False, + join=False, drive_id=None, index_link=None, isYtdlp=False, source_url=None, logMessage=None, leech_utils={}): if sameDir is None: sameDir = {} self.message = message @@ -61,6 +63,9 @@ def __init__(self, message, compress=False, extract=False, isQbit=False, isLeech self.select = select self.isSuperGroup = message.chat.type in [ChatType.SUPERGROUP, ChatType.CHANNEL] self.isPrivate = message.chat.type == ChatType.BOT + self.user_id = self.message.from_user.id + self.user_dict = user_data.get(self.user_id, {}) + self.isPM = config_dict['BOT_PM'] or self.user_dict.get('bot_pm') self.suproc = None self.sameDir = sameDir self.rcFlags = rcFlags @@ -69,9 +74,11 @@ def __init__(self, message, compress=False, extract=False, isQbit=False, isLeech self.join = join self.drive_id = drive_id self.index_link = index_link + self.logMessage = logMessage self.linkslogmsg = None self.botpmmsg = None self.upload_details = {} + self.leech_utils = leech_utils self.source_url = source_url if source_url and source_url.startswith('http') else ("https://t.me/share/url?url=" + source_url) if source_url else message.link self.source_msg = '' self.__setModeEng() @@ -85,7 +92,7 @@ async def clean(self): Interval.clear() await sync_to_async(aria2.purge) await delete_all_messages() - except: + except Exception: pass def __setModeEng(self): @@ -132,11 +139,10 @@ async def onDownloadStart(self): if config_dict['LINKS_LOG_ID'] and not self.excep_chat: dispTime = datetime.now(timezone(config_dict['TIMEZONE'])).strftime('%d/%m/%y, %I:%M:%S %p') self.linkslogmsg = await sendCustomMsg(config_dict['LINKS_LOG_ID'], BotTheme('LINKS_START', Mode=self.upload_details['mode'], Tag=self.tag) + BotTheme('LINKS_SOURCE', On=dispTime, Source=self.source_msg)) - user_dict = user_data.get(self.message.from_user.id, {}) - if config_dict['BOT_PM'] or user_dict.get('bot_pm'): + if self.isPM and self.isSuperGroup: self.botpmmsg = await sendCustomMsg(self.message.from_user.id, BotTheme('PM_START', msg_link=self.source_url)) if self.isSuperGroup and config_dict['INCOMPLETE_TASK_NOTIFIER'] and DATABASE_URL: - await DbManger().add_incomplete_task(self.message.chat.id, self.source_url, self.tag) + await DbManger().add_incomplete_task(self.message.chat.id, self.message.link, self.tag, self.source_url) async def onDownloadComplete(self): multi_links = False @@ -190,9 +196,8 @@ async def onDownloadComplete(self): await start_from_queued() user_dict = user_data.get(self.message.from_user.id, {}) - if self.join: - if await aiopath.isdir(dl_path): - await join_files(dl_path) + if self.join and await aiopath.isdir(dl_path): + await join_files(dl_path) if self.extract: pswd = self.extract if isinstance(self.extract, str) else '' @@ -392,7 +397,7 @@ async def onDownloadComplete(self): elif self.upPath == 'ddl': size = await get_path_size(up_path) LOGGER.info(f"Upload Name: {up_name} via DDL") - ddl = DDLUploader(up_name, up_dir, self) + ddl = DDLUploader(self, up_name, up_dir) ddl_upload_status = DDLStatus(ddl, size, self.message, gid, self.upload_details) async with download_dict_lock: download_dict[self.uid] = ddl_upload_status @@ -408,74 +413,89 @@ async def onDownloadComplete(self): await update_all_messages() await RCTransfer.upload(up_path, size) - async def onUploadComplete(self, link, size, files, folders, mime_type, name, rclonePath=''): + async def onUploadComplete(self, link, size, files, folders, mime_type, name, rclonePath='', private=False): if self.isSuperGroup and config_dict['INCOMPLETE_TASK_NOTIFIER'] and DATABASE_URL: await DbManger().rm_complete_task(self.message.link) user_id = self.message.from_user.id name, _ = await format_filename(name, user_id, isMirror=not self.isLeech) user_dict = user_data.get(user_id, {}) - msg = BotTheme('NAME', Name="Task has been Completed!"if config_dict['SAFE_MODE'] else escape(name)) + msg = BotTheme('NAME', Name="Task has been Completed!"if config_dict['SAFE_MODE'] and self.isSuperGroup else escape(name)) msg += BotTheme('SIZE', Size=get_readable_file_size(size)) msg += BotTheme('ELAPSE', Time=get_readable_time(time() - self.message.date.timestamp())) msg += BotTheme('MODE', Mode=self.upload_details['mode']) LOGGER.info(f'Task Done: {name}') + buttons = ButtonMaker() if self.isLeech: msg += BotTheme('L_TOTAL_FILES', Files=folders) if mime_type != 0: msg += BotTheme('L_CORRUPTED_FILES', Corrupt=mime_type) msg += BotTheme('L_CC', Tag=self.tag) + btn_added = False + if not files: - if self.isPrivate: - msg += BotTheme('PM_BOT_MSG') await sendMessage(self.message, msg, photo=self.random_pic) else: - dispTime = datetime.now(timezone(config_dict['TIMEZONE'])).strftime('%d/%m/%y, %I:%M:%S %p') - attachmsg, saved = True, False - fmsg, totalmsg = '\n\n', '' + btn = ButtonMaker() + saved = False + if self.source_url and config_dict['SOURCE_LINK']: + btn.ubutton(BotTheme('SOURCE_URL'), self.source_url) + if self.isSuperGroup: + btn = extra_btns(btn)[0] + message = msg + btns = btn.build_menu(2) + buttons = btn + if self.isSuperGroup and not self.isPM: + message += BotTheme('L_LL_MSG') + elif self.isSuperGroup and self.isPM: + message += BotTheme('L_LL_MSG') + message += BotTheme('L_BOT_MSG') + buttons.ibutton(BotTheme('CHECK_PM'), f"wzmlx {user_id} botpm", 'header') + + fmsg = '\n' for index, (link, name) in enumerate(files.items(), start=1): fmsg += f"{index}. {name}\n" - totalmsg = (msg + BotTheme('LINKS_SOURCE', On=dispTime, Source=self.source_msg) + BotTheme('L_LL_MSG') + fmsg) if attachmsg else fmsg - if len(totalmsg.encode()) > 4000: - if config_dict['SAVE_MSG'] and not saved: - saved = True - buttons.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') - if self.linkslogmsg: - await editMessage(self.linkslogmsg, totalmsg, buttons.build_menu(1)) - self.linkslogmsg = await sendMessage(self.linkslogmsg, "Fetching Details...") - elif not (config_dict['BOT_PM'] or user_dict.get('bot_pm')): - await sendMessage(self.message, msg + BotTheme('L_LL_MSG') + fmsg, buttons.build_menu(1)) - attachmsg = False + if len(msg.encode() + fmsg.encode()) > (4000 if len(config_dict['IMAGES']) == 0 else 1000): + + if config_dict['SAFE_MODE']: + if self.isSuperGroup: + await sendMessage(self.botpmmsg, msg + BotTheme('L_LL_MSG') + fmsg, btns, photo=self.random_pic) + if config_dict['SAVE_MSG'] and not saved and self.isSuperGroup: + saved = True + buttons.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') + if self.isPM: + await sendMessage(self.message, message, buttons.build_menu(2), photo=self.random_pic) + else: + await sendMessage(self.message, message + BotTheme('L_PM_WARN'), buttons.build_menu(2), photo=self.random_pic) + else: + await sendMessage(self.message, message + fmsg, buttons.build_menu(2), photo=self.random_pic) + else: + if config_dict['SAVE_MSG'] and not saved and self.isSuperGroup: + saved = True + buttons.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') + await sendMessage(self.message, message + fmsg, buttons.build_menu(2), photo=self.random_pic) await sleep(1.5) fmsg = '' - if fmsg != '\n\n': - if config_dict['SAVE_MSG'] and not saved: - saved = True - buttons.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') - if self.linkslogmsg: - await editMessage(self.linkslogmsg, totalmsg, buttons.build_menu(1)) - elif not (config_dict['BOT_PM'] or user_dict.get('bot_pm')): - await sendMessage(self.message, msg + BotTheme('L_LL_MSG') + fmsg, buttons.build_menu(1)) - btn = ButtonMaker() - if config_dict['BOT_PM'] or user_dict.get('bot_pm'): - await sendMessage(self.botpmmsg, msg + BotTheme('PM_BOT_MSG'), photo=self.random_pic) - if self.isSuperGroup: - btn.ibutton(BotTheme('CHECK_PM'), f"wzmlx {user_id} botpm", 'header') - if self.linkslogmsg: - btn.ubutton(BotTheme('CHECK_LL'), self.linkslogmsg.link) - if self.source_url and config_dict['SOURCE_LINK']: - btn.ubutton(BotTheme('SOURCE_URL'), self.source_url) - btn = extra_btns(btn) - await sendMessage(self.message, msg + BotTheme('L_BOT_MSG'), btn.build_menu(2), self.random_pic) + + if fmsg != '\n': + if config_dict['SAFE_MODE']: + if self.isSuperGroup: + await sendMessage(self.botpmmsg, msg + BotTheme('L_LL_MSG') + fmsg, btns, photo=self.random_pic) + if config_dict['SAVE_MSG'] and not saved and self.isSuperGroup: + saved = True + buttons.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') + if self.isPM: + await sendMessage(self.message, message, buttons.build_menu(2), photo=self.random_pic) + else: + await sendMessage(self.message, message + BotTheme('L_PM_WARN'), buttons.build_menu(2), photo=self.random_pic) + else: + await sendMessage(self.message, message + fmsg, buttons.build_menu(2), photo=self.random_pic) else: - await deleteMessage(self.botpmmsg) - elif self.linkslogmsg: - btn.ubutton(BotTheme('CHECK_LL'), self.linkslogmsg.link) - if self.source_url and config_dict['SOURCE_LINK']: - btn.ubutton(BotTheme('SOURCE_URL'), self.source_url) - btn = extra_btns(btn) - await sendMessage(self.message, msg + BotTheme('L_LL_MSG'), btn.build_menu(2), self.random_pic) - + if config_dict['SAVE_MSG'] and not saved and self.isSuperGroup: + saved = True + buttons.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') + await sendMessage(self.message, message + fmsg, buttons.build_menu(2), photo=self.random_pic) + if self.seed: if self.newDir: await clean_target(self.newDir) @@ -485,18 +505,15 @@ async def onUploadComplete(self, link, size, files, folders, mime_type, name, rc await start_from_queued() return else: - is_DDL = 'gofile' in link or 'streamsb' in link msg += BotTheme('M_TYPE', Mimetype=mime_type) if mime_type == "Folder": - if not is_DDL: - msg += BotTheme('M_SUBFOLD', Folder=folders) - msg += BotTheme('TOTAL_FILES', Files=files) - if link or rclonePath and config_dict['RCLONE_SERVE_URL']: - - if is_DDL: - buttons.ubutton(BotTheme('DDL_LINK', Serv='GoFile'), link) - elif link: - if user_id == OWNER_ID or not config_dict['DISABLE_DRIVE_LINK']: + msg += BotTheme('M_SUBFOLD', Folder=folders) + msg += BotTheme('TOTAL_FILES', Files=files) + if link or rclonePath and config_dict['RCLONE_SERVE_URL'] and not private: + if (is_DDL := isinstance(link, dict)): + for dlup, dlink in link.items(): + buttons.ubutton(BotTheme('DDL_LINK', Serv=dlup), dlink) + elif link and (user_id == OWNER_ID or not config_dict['DISABLE_DRIVE_LINK']): buttons.ubutton(BotTheme('CLOUD_LINK'), link) else: msg += BotTheme('RCPATH', RCpath=rclonePath) @@ -520,45 +537,56 @@ async def onUploadComplete(self, link, size, files, folders, mime_type, name, rc if mime_type.startswith(('image', 'video', 'audio')): share_urls = f'{INDEX_URL}/{url_path}?a=view' buttons.ubutton(BotTheme('VIEW_LINK'), share_urls) - buttons = extra_btns(buttons) - button = buttons.build_menu(2) + else: msg += BotTheme('RCPATH', RCpath=rclonePath) - button = None msg += BotTheme('M_CC', Tag=self.tag) - + message = msg + + btns = ButtonMaker() + #
    if config_dict['MIRROR_LOG_ID']: - buttonss = button - if button is not None and config_dict['SAVE_MSG']: - buttons.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') - buttonss = buttons.build_menu(2) - log_msg = list((await sendMultiMessage(config_dict['MIRROR_LOG_ID'], msg, buttonss, self.random_pic)).values())[0] + m_btns = deepcopy(buttons) + if self.source_url and config_dict['SOURCE_LINK']: + m_btns.ubutton(BotTheme('SOURCE_URL'), self.source_url) + if config_dict['SAVE_MSG']: + m_btns.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') + log_msg = list((await sendMultiMessage(config_dict['MIRROR_LOG_ID'], message, m_btns.build_menu(2), self.random_pic)).values())[0] if self.linkslogmsg: dispTime = datetime.now(timezone(config_dict['TIMEZONE'])).strftime('%d/%m/%y, %I:%M:%S %p') - btns = ButtonMaker() + _btns = ButtonMaker() if config_dict['SAVE_MSG']: - btns.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') - await editMessage(self.linkslogmsg, (msg + BotTheme('LINKS_SOURCE', On=dispTime, Source=self.source_msg) + BotTheme('L_LL_MSG') + f"\n\n{escape(name)}\n"), btns.build_menu(1)) + _btns.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') + await editMessage(self.linkslogmsg, (msg + BotTheme('LINKS_SOURCE', On=dispTime, Source=self.source_msg) + BotTheme('L_LL_MSG') + f"\n\n{escape(name)}\n"), _btns.build_menu(1)) - buttons = ButtonMaker() - if config_dict['BOT_PM'] or user_dict.get('bot_pm'): - await sendMessage(self.botpmmsg, msg, button, self.random_pic) + #
    + if self.isPM and self.isSuperGroup: + message += BotTheme('M_BOT_MSG') + buttons = extra_btns(buttons)[0] + btns = extra_btns(btns)[0] + if self.isPM: if self.isSuperGroup: - buttons.ibutton(BotTheme('CHECK_PM'), f"wzmlx {user_id} botpm", 'header') - if self.linkslogmsg: - buttons.ubutton(BotTheme('CHECK_LL'), self.linkslogmsg.link) + s_btn = deepcopy(btns) if config_dict['MIRROR_LOG_ID'] else deepcopy(buttons) if self.source_url and config_dict['SOURCE_LINK']: buttons.ubutton(BotTheme('SOURCE_URL'), self.source_url) - buttons = extra_btns(buttons) - await sendMessage(self.message, msg + BotTheme('M_BOT_MSG'), buttons.build_menu(2), self.random_pic) + if not config_dict['SAFE_MODE']: + s_btn.ubutton(BotTheme('SOURCE_URL'), self.source_url) + if self.botpmmsg: + await sendMessage(self.botpmmsg, message, buttons.build_menu(2), photo=self.random_pic) + if config_dict['SAVE_MSG']: + s_btn.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') + s_btn.ibutton(BotTheme('CHECK_PM'), f"wzmlx {user_id} botpm", 'header') + await sendMessage(self.message, message, s_btn.build_menu(2), photo=self.random_pic) else: - await deleteMessage(self.botpmmsg) - elif self.linkslogmsg: - buttons.ubutton(BotTheme('CHECK_LL'), self.linkslogmsg.link) - if self.source_url and config_dict['SOURCE_LINK']: + if self.source_url and config_dict['SOURCE_LINK']: + buttons.ubutton(BotTheme('SOURCE_URL'), self.source_url) + await sendMessage(self.message, message, buttons.build_menu(2), photo=self.random_pic) + else: + if self.source_url and config_dict['SOURCE_LINK'] and (not self.isSuperGroup or not config_dict['SAFE_MODE']): buttons.ubutton(BotTheme('SOURCE_URL'), self.source_url) - buttons = extra_btns(buttons) - await sendMessage(self.message, msg, buttons.build_menu(2), self.random_pic) + if config_dict['SAVE_MSG'] and self.isSuperGroup: + buttons.ibutton(BotTheme('SAVE_MSG'), 'save', 'footer') + await sendMessage(self.message, message, buttons.build_menu(2), photo=self.random_pic) if self.seed: if self.newDir: @@ -570,7 +598,10 @@ async def onUploadComplete(self, link, size, files, folders, mime_type, name, rc non_queued_up.remove(self.uid) await start_from_queued() return - + + if self.botpmmsg and (not config_dict['DELETE_LINKS'] or config_dict['CLEAN_LOG_MSG']): + await deleteMessage(self.botpmmsg) + await clean_download(self.dir) async with download_dict_lock: if self.uid in download_dict.keys(): @@ -588,6 +619,7 @@ async def onUploadComplete(self, link, size, files, folders, mime_type, name, rc await start_from_queued() await delete_links(self.message) + async def onDownloadError(self, error, button=None): async with download_dict_lock: if self.uid in download_dict.keys(): diff --git a/bot/helper/mirror_utils/download_utils/aria2_download.py b/bot/helper/mirror_utils/download_utils/aria2_download.py index 18881db5fc..cd4c18e93d 100644 --- a/bot/helper/mirror_utils/download_utils/aria2_download.py +++ b/bot/helper/mirror_utils/download_utils/aria2_download.py @@ -15,7 +15,7 @@ async def add_aria2c_download(link, path, listener, filename, auth, ratio, seed_ if filename: a2c_opt['out'] = filename if auth: - a2c_opt['header'] = f"authorization: {auth}" + a2c_opt['header'] = auth if ratio: a2c_opt['seed-ratio'] = ratio if seed_time: diff --git a/bot/helper/mirror_utils/download_utils/direct_downloader.py b/bot/helper/mirror_utils/download_utils/direct_downloader.py new file mode 100644 index 0000000000..758858fdc5 --- /dev/null +++ b/bot/helper/mirror_utils/download_utils/direct_downloader.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +from secrets import token_hex + +from bot import (LOGGER, aria2_options, aria2c_global, download_dict, + download_dict_lock, non_queued_dl, queue_dict_lock) +from bot.helper.ext_utils.bot_utils import sync_to_async +from bot.helper.ext_utils.task_manager import is_queued, stop_duplicate_check +from bot.helper.listeners.direct_listener import DirectListener +from bot.helper.mirror_utils.status_utils.direct_status import DirectStatus +from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus +from bot.helper.telegram_helper.message_utils import (sendMessage, + sendStatusMessage) + + +async def add_direct_download(details, path, listener, foldername): + if not (contents:= details.get('contents')): + await sendMessage(listener.message, 'There is nothing to download!') + return + size = details['total_size'] + + if foldername: + path = f'{path}/{foldername}' + + if not foldername: + foldername = details['title'] + msg, button = await stop_duplicate_check(foldername, listener) + if msg: + await sendMessage(listener.message, msg, button) + return + + gid = token_hex(5) + added_to_queue, event = await is_queued(listener.uid) + if added_to_queue: + LOGGER.info(f"Added to Queue/Download: {foldername}") + async with download_dict_lock: + download_dict[listener.uid] = QueueStatus( + foldername, size, gid, listener, 'dl') + await listener.onDownloadStart() + await sendStatusMessage(listener.message) + await event.wait() + async with download_dict_lock: + if listener.uid not in download_dict: + return + from_queue = True + else: + from_queue = False + + a2c_opt = {**aria2_options} + [a2c_opt.pop(k) for k in aria2c_global if k in aria2_options] + if header:= details.get('header'): + a2c_opt['header'] = header + a2c_opt['follow-torrent'] = 'false' + a2c_opt['follow-metalink'] = 'false' + directListener = DirectListener(foldername, size, path, listener, a2c_opt) + async with download_dict_lock: + download_dict[listener.uid] = DirectStatus(directListener, gid, listener, listener.upload_details) + + async with queue_dict_lock: + non_queued_dl.add(listener.uid) + + if from_queue: + LOGGER.info(f'Start Queued Download from Direct Download: {foldername}') + else: + LOGGER.info(f"Download from Direct Download: {foldername}") + await listener.onDownloadStart() + await sendStatusMessage(listener.message) + + await sync_to_async(directListener.download, contents) diff --git a/bot/helper/mirror_utils/download_utils/direct_link_generator.py b/bot/helper/mirror_utils/download_utils/direct_link_generator.py index 4944cd1bc2..90bf237df7 100644 --- a/bot/helper/mirror_utils/download_utils/direct_link_generator.py +++ b/bot/helper/mirror_utils/download_utils/direct_link_generator.py @@ -1,23 +1,28 @@ #!/usr/bin/env python3 +from threading import Thread from base64 import b64decode -from http.cookiejar import MozillaCookieJar from json import loads from os import path -from re import findall, match, search, sub -from time import sleep -from urllib.parse import parse_qs, quote, unquote, urlparse from uuid import uuid4 +from hashlib import sha256 +from time import sleep +from re import findall, match, search -from bs4 import BeautifulSoup +from requests.adapters import HTTPAdapter +from urllib3.util.retry import Retry +from lxml.etree import HTML +from requests import Session, session as req_session, post +from urllib.parse import parse_qs, quote, unquote, urlparse, urljoin from cloudscraper import create_scraper from lk21 import Bypass -from lxml import etree -from requests import session -import requests +from http.cookiejar import MozillaCookieJar from bot import LOGGER, config_dict -from bot.helper.ext_utils.bot_utils import get_readable_time, is_share_link +from bot.helper.ext_utils.bot_utils import get_readable_time, is_share_link, is_index_link, is_magnet from bot.helper.ext_utils.exceptions import DirectDownloadLinkException +from bot.helper.ext_utils.help_messages import PASSWORD_ERROR_MESSAGE + +_caches = {} fmed_list = ['fembed.net', 'fembed.com', 'femax20.com', 'fcdn.stream', 'feurl.com', 'layarkacaxxi.icu', 'naniplay.nanime.in', 'naniplay.nanime.biz', 'naniplay.com', 'mm9842.com'] @@ -27,37 +32,96 @@ 'openload.cc', 'share-online.is', 'upvid.cc'] debrid_sites = ['1fichier.com', '2shared.com', '4shared.com', 'alfafile.net', 'anzfile.net', 'backin.net', - 'bayfiles.com', 'bdupload.in', 'brupload.net', 'btafile.com', 'catshare.net', 'clicknupload.me', - 'clipwatching.com', 'cosmobox.org', 'dailymotion.com', 'dailyuploads.net', 'daofile.com', - 'datafilehost.com', 'ddownload.com', 'depositfiles.com', 'dl.free.fr', 'douploads.net', - 'drop.download', 'earn4files.com', 'easybytez.com', 'ex-load.com', 'extmatrix.com', - 'down.fast-down.com', 'fastclick.to', 'faststore.org', 'file.al', 'file4safe.com', 'fboom.me', - 'filefactory.com', 'filefox.cc', 'filenext.com', 'filer.net', 'filerio.in', 'filesabc.com', 'filespace.com', - 'file-up.org', 'fileupload.pw', 'filezip.cc', 'fireget.com', 'flashbit.cc', 'flashx.tv', 'florenfile.com', - 'fshare.vn', 'gigapeta.com', 'goloady.com', 'docs.google.com', 'gounlimited.to', 'heroupload.com', - 'hexupload.net', 'hitfile.net', 'hotlink.cc', 'hulkshare.com', 'icerbox.com', 'inclouddrive.com', - 'isra.cloud', 'katfile.com', 'keep2share.cc', 'letsupload.cc', 'load.to', 'down.mdiaload.com', 'mediafire.com', - 'mega.co.nz', 'mixdrop.co', 'mixloads.com', 'mp4upload.com', 'nelion.me', 'ninjastream.to', 'nitroflare.com', - 'nowvideo.club', 'oboom.com', 'prefiles.com', 'sky.fm', 'rapidgator.net', 'rapidrar.com', 'rapidu.net', - 'rarefile.net', 'real-debrid.com', 'redbunker.net', 'redtube.com', 'rockfile.eu', 'rutube.ru', 'scribd.com', - 'sendit.cloud', 'sendspace.com', 'simfileshare.net', 'solidfiles.com', 'soundcloud.com', 'speed-down.org', - 'streamon.to', 'streamtape.com', 'takefile.link', 'tezfiles.com', 'thevideo.me', 'turbobit.net', 'tusfiles.com', - 'ubiqfile.com', 'uloz.to', 'unibytes.com', 'uploadbox.io', 'uploadboy.com', 'uploadc.com', 'uploaded.net', - 'uploadev.org', 'uploadgig.com', 'uploadrar.com', 'uppit.com', 'upstore.net', 'upstream.to', 'uptobox.com', - 'userscloud.com', 'usersdrive.com', 'vidcloud.ru', 'videobin.co', 'vidlox.tv', 'vidoza.net', 'vimeo.com', - 'vivo.sx', 'vk.com', 'voe.sx', 'wdupload.com', 'wipfiles.net', 'world-files.com', 'worldbytez.com', 'wupfile.com', + 'bayfiles.com', 'bdupload.in', 'brupload.net', 'btafile.com', 'catshare.net', 'clicknupload.me', + 'clipwatching.com', 'cosmobox.org', 'dailymotion.com', 'dailyuploads.net', 'daofile.com', + 'datafilehost.com', 'ddownload.com', 'depositfiles.com', 'dl.free.fr', 'douploads.net', + 'drop.download', 'earn4files.com', 'easybytez.com', 'ex-load.com', 'extmatrix.com', + 'down.fast-down.com', 'fastclick.to', 'faststore.org', 'file.al', 'file4safe.com', 'fboom.me', + 'filefactory.com', 'filefox.cc', 'filenext.com', 'filer.net', 'filerio.in', 'filesabc.com', 'filespace.com', + 'file-up.org', 'fileupload.pw', 'filezip.cc', 'fireget.com', 'flashbit.cc', 'flashx.tv', 'florenfile.com', + 'fshare.vn', 'gigapeta.com', 'goloady.com', 'docs.google.com', 'gounlimited.to', 'heroupload.com', + 'hexupload.net', 'hitfile.net', 'hotlink.cc', 'hulkshare.com', 'icerbox.com', 'inclouddrive.com', + 'isra.cloud', 'katfile.com', 'keep2share.cc', 'letsupload.cc', 'load.to', 'down.mdiaload.com', 'mediafire.com', + 'mega.co.nz', 'mixdrop.co', 'mixloads.com', 'mp4upload.com', 'nelion.me', 'ninjastream.to', 'nitroflare.com', + 'nowvideo.club', 'oboom.com', 'prefiles.com', 'sky.fm', 'rapidgator.net', 'rapidrar.com', 'rapidu.net', + 'rarefile.net', 'real-debrid.com', 'redbunker.net', 'redtube.com', 'rockfile.eu', 'rutube.ru', 'scribd.com', + 'sendit.cloud', 'sendspace.com', 'simfileshare.net', 'solidfiles.com', 'soundcloud.com', 'speed-down.org', + 'streamon.to', 'streamtape.com', 'takefile.link', 'tezfiles.com', 'thevideo.me', 'turbobit.net', 'tusfiles.com', + 'ubiqfile.com', 'uloz.to', 'unibytes.com', 'uploadbox.io', 'uploadboy.com', 'uploadc.com', 'uploaded.net', + 'uploadev.org', 'uploadgig.com', 'uploadrar.com', 'uppit.com', 'upstore.net', 'upstream.to', 'uptobox.com', + 'userscloud.com', 'usersdrive.com', 'vidcloud.ru', 'videobin.co', 'vidlox.tv', 'vidoza.net', 'vimeo.com', + 'vivo.sx', 'vk.com', 'voe.sx', 'wdupload.com', 'wipfiles.net', 'world-files.com', 'worldbytez.com', 'wupfile.com', 'wushare.com', 'xubster.com', 'youporn.com', 'youtube.com'] -def direct_link_generator(link: str): +debrid_link_sites = ["1dl.net", "1fichier.com", "alterupload.com", "cjoint.net", "desfichiers.com", "dfichiers.com", "megadl.org", + "megadl.fr", "mesfichiers.fr", "mesfichiers.org", "piecejointe.net", "pjointe.com", "tenvoi.com", "dl4free.com", + "apkadmin.com", "bayfiles.com", "clicknupload.link", "clicknupload.org", "clicknupload.co", "clicknupload.cc", + "clicknupload.link", "clicknupload.download", "clicknupload.club", "clickndownload.org", "ddl.to", "ddownload.com", + "depositfiles.com", "dfile.eu", "dropapk.to", "drop.download", "dropbox.com", "easybytez.com", "easybytez.eu", + "easybytez.me", "elitefile.net", "elfile.net", "wdupload.com", "emload.com", "fastfile.cc", "fembed.com", + "feurl.com", "anime789.com", "24hd.club", "vcdn.io", "sharinglink.club", "votrefiles.club", "there.to", "femoload.xyz", + "dailyplanet.pw", "jplayer.net", "xstreamcdn.com", "gcloud.live", "vcdnplay.com", "vidohd.com", "vidsource.me", + "votrefile.xyz", "zidiplay.com", "fcdn.stream", "femax20.com", "sexhd.co", "mediashore.org", "viplayer.cc", "dutrag.com", + "mrdhan.com", "embedsito.com", "diasfem.com", "superplayxyz.club", "albavido.xyz", "ncdnstm.com", "fembed-hd.com", + "moviemaniac.org", "suzihaza.com", "fembed9hd.com", "vanfem.com", "fikper.com", "file.al", "fileaxa.com", "filecat.net", + "filedot.xyz", "filedot.to", "filefactory.com", "filenext.com", "filer.net", "filerice.com", "filesfly.cc", "filespace.com", + "filestore.me", "flashbit.cc", "dl.free.fr", "transfert.free.fr", "free.fr", "gigapeta.com", "gofile.io", "highload.to", + "hitfile.net", "hitf.cc", "hulkshare.com", "icerbox.com", "isra.cloud", "goloady.com", "jumploads.com", "katfile.com", + "k2s.cc", "keep2share.com", "keep2share.cc", "kshared.com", "load.to", "mediafile.cc", "mediafire.com", "mega.nz", + "mega.co.nz", "mexa.sh", "mexashare.com", "mx-sh.net", "mixdrop.co", "mixdrop.to", "mixdrop.club", "mixdrop.sx", + "modsbase.com", "nelion.me", "nitroflare.com", "nitro.download", "e.pcloud.link", "pixeldrain.com", "prefiles.com", "rg.to", + "rapidgator.net", "rapidgator.asia", "scribd.com", "sendspace.com", "sharemods.com", "soundcloud.com", "noregx.debrid.link", + "streamlare.com", "slmaxed.com", "sltube.org", "slwatch.co", "streamtape.com", "subyshare.com", "supervideo.tv", "terabox.com", + "tezfiles.com", "turbobit.net", "turbobit.cc", "turbobit.pw", "turbobit.online", "turbobit.ru", "turbobit.live", "turbo.to", + "turb.to", "turb.cc", "turbabit.com", "trubobit.com", "turb.pw", "turboblt.co", "turboget.net", "ubiqfile.com", "ulozto.net", + "uloz.to", "zachowajto.pl", "ulozto.cz", "ulozto.sk", "upload-4ever.com", "up-4ever.com", "up-4ever.net", "uptobox.com", + "uptostream.com", "uptobox.fr", "uptostream.fr", "uptobox.eu", "uptostream.eu", "uptobox.link", "uptostream.link", "upvid.pro", + "upvid.live", "upvid.host", "upvid.co", "upvid.biz", "upvid.cloud", "opvid.org", "opvid.online", "uqload.com", "uqload.co", + "uqload.io", "userload.co", "usersdrive.com", "vidoza.net", "voe.sx", "voe-unblock.com", "voeunblock1.com", "voeunblock2.com", + "voeunblock3.com", "voeunbl0ck.com", "voeunblck.com", "voeunblk.com", "voe-un-block.com", "voeun-block.net", + "reputationsheriffkennethsand.com", "449unceremoniousnasoseptal.com", "world-files.com", "worldbytez.com", "salefiles.com", + "wupfile.com", "youdbox.com", "yodbox.com", "youtube.com", "youtu.be", "4tube.com", "academicearth.org", "acast.com", + "add-anime.net", "air.mozilla.org", "allocine.fr", "alphaporno.com", "anysex.com", "aparat.com", "www.arte.tv", "video.arte.tv", + "sites.arte.tv", "creative.arte.tv", "info.arte.tv", "future.arte.tv", "ddc.arte.tv", "concert.arte.tv", "cinema.arte.tv", + "audi-mediacenter.com", "audioboom.com", "audiomack.com", "beeg.com", "camdemy.com", "chilloutzone.net", "clubic.com", "clyp.it", + "daclips.in", "dailymail.co.uk", "www.dailymail.co.uk", "dailymotion.com", "touch.dailymotion.com", "democracynow.org", + "discovery.com", "investigationdiscovery.com", "discoverylife.com", "animalplanet.com", "ahctv.com", "destinationamerica.com", + "sciencechannel.com", "tlc.com", "velocity.com", "dotsub.com", "ebaumsworld.com", "eitb.tv", "ellentv.com", "ellentube.com", + "flipagram.com", "footyroom.com", "formula1.com", "video.foxnews.com", "video.foxbusiness.com", "video.insider.foxnews.com", + "franceculture.fr", "gameinformer.com", "gamersyde.com", "gorillavid.in", "hbo.com", "hellporno.com", "hentai.animestigma.com", + "hornbunny.com", "imdb.com", "instagram.com", "itar-tass.com", "tass.ru", "jamendo.com", "jove.com", "keek.com", "k.to", + "keezmovies.com", "khanacademy.org", "kickstarter.com", "krasview.ru", "la7.it", "lci.fr", "play.lcp.fr", "libsyn.com", + "html5-player.libsyn.com", "liveleak.com", "livestream.com", "new.livestream.com", "m6.fr", "www.m6.fr", "metacritic.com", + "mgoon.com", "m.mgoon.com", "mixcloud.com", "mojvideo.com", "movieclips.com", "movpod.in", "musicplayon.com", "myspass.de", + "myvidster.com", "odatv.com", "onionstudios.com", "ora.tv", "unsafespeech.com", "play.fm", "plays.tv", "playvid.com", + "pornhd.com", "pornhub.com", "www.pornhub.com", "pyvideo.org", "redtube.com", "embed.redtube.com", "www.redtube.com", + "reverbnation.com", "revision3.com", "animalist.com", "seeker.com", "rts.ch", "rtve.es", "videos.sapo.pt", "videos.sapo.cv", + "videos.sapo.ao", "videos.sapo.mz", "videos.sapo.tl", "sbs.com.au", "www.sbs.com.au", "screencast.com", "skysports.com", + "slutload.com", "soundgasm.net", "store.steampowered.com", "steampowered.com", "steamcommunity.com", "stream.cz", "streamable.com", + "streamcloud.eu", "sunporno.com", "teachertube.com", "teamcoco.com", "ted.com", "tfo.org", "thescene.com", "thesixtyone.com", + "tnaflix.com", "trutv.com", "tu.tv", "turbo.fr", "tweakers.net", "ustream.tv", "vbox7.com", "veehd.com", "veoh.com", "vid.me", + "videodetective.com", "vimeo.com", "vimeopro.com", "player.vimeo.com", "player.vimeopro.com", "wat.tv", "wimp.com", "xtube.com", + "yahoo.com", "screen.yahoo.com", "news.yahoo.com", "sports.yahoo.com", "video.yahoo.com", "youporn.com"] + + +def direct_link_generator(link): + auth = None + if isinstance(link, tuple): + link, auth = link + if is_magnet(link): + return real_debrid(link, True) + domain = urlparse(link).hostname if not domain: raise DirectDownloadLinkException("ERROR: Invalid URL") if 'youtube.com' in domain or 'youtu.be' in domain: raise DirectDownloadLinkException("ERROR: Use ytdl cmds for Youtube links") - elif config_dict['DEBRID_API_KEY'] and any(x in domain for x in debrid_sites): - return debrid_extractor(link) - elif 'yadi.sk' in domain or 'disk.yandex.com' in domain: - return yandex_disk(link) + elif config_dict['DEBRID_LINK_API'] and not config_dict['UPTOBOX_TOKEN'] and any(x in domain for x in debrid_link_sites): + return debrid_link(link) + elif config_dict['REAL_DEBRID_API'] and not config_dict['UPTOBOX_TOKEN'] and any(x in domain for x in debrid_sites): + return real_debrid(link) + elif any(x in domain for x in ['filelions.com', 'filelions.live', 'filelions.to']): + return filelions(link) elif 'mediafire.com' in domain: return mediafire(link) elif 'uptobox.com' in domain: @@ -74,8 +138,6 @@ def direct_link_generator(link: str): return pixeldrain(link) elif 'antfiles.com' in domain: return antfiles(link) - elif 'streamtape.com' in domain: - return streamtape(link) elif 'racaty' in domain: return racaty(link) elif '1fichier.com' in domain: @@ -94,16 +156,26 @@ def direct_link_generator(link: str): return shrdsk(link) elif 'letsupload.io' in domain: return letsupload(link) + elif 'gofile.io' in domain: + return gofile(link, auth) + elif 'easyupload.io' in domain: + return easyupload(link) + elif any(x in domain for x in ['dood.watch', 'doodstream.com', 'dood.to', 'dood.so', 'dood.cx', 'dood.la', 'dood.ws', 'dood.sh', 'doodstream.co', 'dood.pm', 'dood.wf', 'dood.re', 'dood.video', 'dooood.com', 'dood.yt', 'dood.stream', 'doods.pro']): + return doods(link) + elif any(x in domain for x in ['streamtape.com', 'streamtape.co', 'streamtape.cc', 'streamtape.to', 'streamtape.net', 'streamta.pe', 'streamtape.xyz']): + return streamtape(link) elif any(x in domain for x in ['wetransfer.com', 'we.tl']): return wetransfer(link) elif any(x in domain for x in anonfilesBaseSites): - return anonfilesBased(link) - elif any(x in domain for x in ['terabox', 'nephobox', '4funbox', 'mirrobox', 'momerybox', 'teraboxapp', '1024tera']): + raise DirectDownloadLinkException('ERROR: R.I.P Anon Sites!') + elif any(x in domain for x in ['terabox.com', 'nephobox.com', '4funbox.com', 'mirrobox.com', 'momerybox.com', 'teraboxapp.com', '1024tera.com']): return terabox(link) elif any(x in domain for x in fmed_list): return fembed(link) elif any(x in domain for x in ['sbembed.com', 'watchsb.com', 'streamsb.net', 'sbplay.org']): return sbembed(link) + elif is_index_link(link) and link.endswith('/'): + return gd_index(link, auth) elif is_share_link(link): if 'gdtot' in domain: return gdtot(link) @@ -117,170 +189,227 @@ def direct_link_generator(link: str): raise DirectDownloadLinkException(f'No Direct link function found for {link}') -def debrid_extractor(url: str) -> str: - """ Debrid Link Extractor (VPN Must)""" - cget = create_scraper().request - try: - resp = cget('POST', f"https://api.real-debrid.com/rest/1.0/unrestrict/link?auth_token={config_dict['DEBRID_API_KEY']}", data={'link': url}) +def real_debrid(url: str, tor=False): + """ Real-Debrid Link Extractor (VPN Maybe Needed) + Based on Real-Debrid v1 API (Heroku/VPS) [Without VPN]""" + def __unrestrict(url, tor=False): + cget = create_scraper().request + resp = cget('POST', f"https://api.real-debrid.com/rest/1.0/unrestrict/link?auth_token={config_dict['REAL_DEBRID_API']}", data={'link': url}) if resp.status_code == 200: - return resp.json()['download'] + if tor: + _res = resp.json() + return (_res['filename'], _res['download']) + else: + return resp.json()['download'] else: - raise DirectDownloadLinkException(f"ERROR: {resp['error']}") - except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {resp.json()['error']}") + + def __addMagnet(magnet): + cget = create_scraper().request + hash_ = search(r'(?<=xt=urn:btih:)[a-zA-Z0-9]+', magnet).group(0) + resp = cget('GET', f"https://api.real-debrid.com/rest/1.0/torrents/instantAvailability/{hash_}?auth_token={config_dict['REAL_DEBRID_API']}") + if resp.status_code != 200 or len(resp.json()[hash_.lower()]['rd']) == 0: + return magnet + resp = cget('POST', f"https://api.real-debrid.com/rest/1.0/torrents/addMagnet?auth_token={config_dict['REAL_DEBRID_API']}", data={'magnet': magnet}) + if resp.status_code == 201: + _id = resp.json()['id'] + else: + raise DirectDownloadLinkException(f"ERROR: {resp.json()['error']}") + if _id: + _file = cget('POST', f"https://api.real-debrid.com/rest/1.0/torrents/selectFiles/{_id}?auth_token={config_dict['REAL_DEBRID_API']}", data={'files': 'all'}) + if _file.status_code != 204: + raise DirectDownloadLinkException(f"ERROR: {resp.json()['error']}") + + contents = {'links': []} + while len(contents['links']) == 0: + _res = cget('GET', f"https://api.real-debrid.com/rest/1.0/torrents/info/{_id}?auth_token={config_dict['REAL_DEBRID_API']}") + if _res.status_code == 200: + contents = _res.json() + else: + raise DirectDownloadLinkException(f"ERROR: {_res.json()['error']}") + sleep(0.5) + + details = {'contents': [], 'title': contents['original_filename'], 'total_size': contents['bytes']} + + for file_info, link in zip(contents['files'], contents['links']): + link_info = __unrestrict(link, tor=True) + item = { + "path": path.join(details['title'], path.dirname(file_info['path']).lstrip("/")), + "filename": unquote(link_info[0]), + "url": link_info[1], + } + details['contents'].append(item) + return details - -def yandex_disk(url: str) -> str: - """ Yandex.Disk direct link generator - Based on https://github.com/wldhx/yadisk-direct """ try: - link = findall(r'\b(https?://(yadi.sk|disk.yandex.com)\S+)', url)[0][0] - except IndexError: - return "No Yandex.Disk links found\n" - api = 'https://cloud-api.yandex.net/v1/disk/public/resources/download?public_key={}' + if tor: + details = __addMagnet(url) + else: + return __unrestrict(url) + except Exception as e: + raise DirectDownloadLinkException(e) + if isinstance(details, dict) and len(details['contents']) == 1: + return details['contents'][0]['url'] + return details + + +def debrid_link(url): cget = create_scraper().request - try: - return cget('get', api.format(link)).json()['href'] - except KeyError: - raise DirectDownloadLinkException( - "ERROR: File not found/Download limit reached") + resp = cget('POST', f"https://debrid-link.com/api/v2/downloader/add?access_token={config_dict['DEBRID_LINK_API']}", data={'url': url}).json() + if resp['success'] != True: + raise DirectDownloadLinkException(f"ERROR: {resp['error']} & ERROR ID: {resp['error_id']}") + if isinstance(resp['value'], dict): + return resp['value']['downloadUrl'] + elif isinstance(resp['value'], list): + details = {'contents': [], 'title': unquote(url.rstrip('/').split('/')[-1]), 'total_size': 0} + for dl in resp['value']: + if dl.get('expired', False): + continue + item = { + "path": path.join(details['title']), + "filename": dl['name'], + "url": dl['downloadUrl'] + } + if 'size' in dl: + details['total_size'] += dl['size'] + details['contents'].append(item) + return details + + +def get_captcha_token(session, params): + recaptcha_api = 'https://www.google.com/recaptcha/api2' + res = session.get(f'{recaptcha_api}/anchor', params=params) + anchor_html = HTML(res.text) + if not (anchor_token:= anchor_html.xpath('//input[@id="recaptcha-token"]/@value')): + return + params['c'] = anchor_token[0] + params['reason'] = 'q' + res = session.post(f'{recaptcha_api}/reload', params=params) + if token := findall(r'"rresp","(.*?)"', res.text): + return token[0] def uptobox(url: str) -> str: - """ Uptobox direct link generator - based on https://github.com/jovanzers/WinTenCermin and https://github.com/sinoobie/noobie-mirror """ try: link = findall(r'\bhttps?://.*uptobox\.com\S+', url)[0] - except IndexError: - raise DirectDownloadLinkException("No Uptobox links found") + except IndexError as e: + raise DirectDownloadLinkException("No Uptobox links found") from e if link := findall(r'\bhttps?://.*\.uptobox\.com/dl\S+', url): return link[0] - cget = create_scraper().request - try: - file_id = findall(r'\bhttps?://.*uptobox\.com/(\w+)', url)[0] - if UPTOBOX_TOKEN := config_dict['UPTOBOX_TOKEN']: - file_link = f'https://uptobox.com/api/link?token={UPTOBOX_TOKEN}&file_code={file_id}' + with create_scraper() as session: + try: + file_id = findall(r'\bhttps?://.*uptobox\.com/(\w+)', url)[0] + if UPTOBOX_TOKEN := config_dict['UPTOBOX_TOKEN']: + file_link = f'https://uptobox.com/api/link?token={UPTOBOX_TOKEN}&file_code={file_id}' + else: + file_link = f'https://uptobox.com/api/link?file_code={file_id}' + res = session.get(file_link).json() + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + if res['statusCode'] == 0: + return res['data']['dlLink'] + elif res['statusCode'] == 16: + sleep(1) + waiting_token = res["data"]["waitingToken"] + sleep(res["data"]["waiting"]) + elif res['statusCode'] == 39: + raise DirectDownloadLinkException( + f"ERROR: Uptobox is being limited please wait {get_readable_time(res['data']['waiting'])}") else: - file_link = f'https://uptobox.com/api/link?file_code={file_id}' - res = cget('get', file_link).json() - except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - if res['statusCode'] == 0: - return res['data']['dlLink'] - elif res['statusCode'] == 16: - sleep(1) - waiting_token = res["data"]["waitingToken"] - sleep(res["data"]["waiting"]) - elif res['statusCode'] == 39: - raise DirectDownloadLinkException( - f"ERROR: Uptobox is being limited please wait {get_readable_time(res['data']['waiting'])}") - else: - raise DirectDownloadLinkException(f"ERROR: {res['message']}") - try: - res = cget('get', f"{file_link}&waitingToken={waiting_token}").json() - return res['data']['dlLink'] - except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {res['message']}") + try: + res = session.get(f"{file_link}&waitingToken={waiting_token}").json() + return res['data']['dlLink'] + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e -def mediafire(url: str) -> str: +def mediafire(url, session=None): + if '/folder/' in url: + return mediafireFolder(url) if final_link := findall(r'https?:\/\/download\d+\.mediafire\.com\/\S+\/\S+\/\S+', url): return final_link[0] - cget = create_scraper().request + if session is None: + session = Session() try: - url = cget('get', url).url - page = cget('get', url).text + html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - if not (final_link := findall(r"\'(https?:\/\/download\d+\.mediafire\.com\/\S+\/\S+\/\S+)\'", page)): - raise DirectDownloadLinkException("ERROR: No links found in this page") + session.close() + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + if error:= html.xpath('//p[@class="notranslate"]/text()'): + session.close() + raise DirectDownloadLinkException(f"ERROR: {error[0]}") + if not (final_link := html.xpath("//a[@id='downloadButton']/@href")): + session.close() + raise DirectDownloadLinkException("ERROR: No links found in this page Try Again") + if final_link[0].startswith('//'): + return mediafire(f'https://{final_link[0][2:]}', session) + session.close() return final_link[0] -def osdn(url: str) -> str: - """ OSDN direct link generator """ - osdn_link = 'https://osdn.net' - try: - link = findall(r'\bhttps?://.*osdn\.net\S+', url)[0] - except IndexError: - raise DirectDownloadLinkException("No OSDN links found") - cget = create_scraper().request - try: - page = BeautifulSoup( - cget('get', link, allow_redirects=True).content, 'lxml') - except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - info = page.find('a', {'class': 'mirror_link'}) - link = unquote(osdn_link + info['href']) - mirrors = page.find('form', {'id': 'mirror-select-form'}).findAll('tr') - urls = [] - for data in mirrors[1:]: - mirror = data.find('input')['value'] - urls.append(sub(r'm=(.*)&f', f'm={mirror}&f', link)) - return urls[0] - - -def github(url: str) -> str: - """ GitHub direct links generator """ +def osdn(url): + with create_scraper() as session: + try: + html = HTML(session.get(url).text) + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + if not (direct_link:= html.xapth('//a[@class="mirror_link"]/@href')): + raise DirectDownloadLinkException("ERROR: Direct link not found") + return f'https://osdn.net{direct_link[0]}' + + +def github(url): try: findall(r'\bhttps?://.*github\.com.*releases\S+', url)[0] - except IndexError: - raise DirectDownloadLinkException("No GitHub Releases links found") - cget = create_scraper().request - download = cget('get', url, stream=True, allow_redirects=False) - try: - return download.headers["location"] - except KeyError: + except IndexError as e: + raise DirectDownloadLinkException("No GitHub Releases links found") from e + with create_scraper() as session: + _res = session.get(url, stream=True, allow_redirects=False) + if 'location' in _res.headers: + return _res.headers["location"] raise DirectDownloadLinkException("ERROR: Can't extract the link") -def hxfile(url: str) -> str: - """ Hxfile direct link generator - Based on https://github.com/zevtyardt/lk21 - """ +def hxfile(url): try: return Bypass().bypass_filesIm(url) except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e -def letsupload(url: str) -> str: - cget = create_scraper().request - try: - res = cget("POST", url) - except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') - if direct_link := findall(r"(https?://letsupload\.io\/.+?)\'", res.text): - return direct_link[0] - else: - raise DirectDownloadLinkException('ERROR: Direct Link not found') +def letsupload(url): + with create_scraper() as session: + try: + res = session.post(url) + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e + if direct_link := findall(r"(https?://letsupload\.io\/.+?)\'", res.text): + return direct_link[0] + else: + raise DirectDownloadLinkException('ERROR: Direct Link not found') -def anonfilesBased(url: str) -> str: - cget = create_scraper().request - try: - soup = BeautifulSoup(cget('get', url).content, 'lxml') - except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - if sa := soup.find(id="download-url"): - return sa['href'] - raise DirectDownloadLinkException("ERROR: File not found!") - +def anonfilesBased(url): + with create_scraper() as session: + try: + html = HTML(session.get(url).text) + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + if sa := html.xpath('//*[@id="download-url"]/@href'): + return sa[0] + raise DirectDownloadLinkException("ERROR: File not found!") -def fembed(link: str) -> str: - """ Fembed direct link generator - Based on https://github.com/zevtyardt/lk21 - """ +def fembed(link): try: dl_url = Bypass().bypass_fembed(link) count = len(dl_url) lst_link = [dl_url[i] for i in dl_url] return lst_link[count-1] except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e -def sbembed(link: str) -> str: +def sbembed(link): """ Sbembed direct link generator Based on https://github.com/zevtyardt/lk21 """ @@ -290,57 +419,53 @@ def sbembed(link: str) -> str: lst_link = [dl_url[i] for i in dl_url] return lst_link[count-1] except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e -def onedrive(link: str) -> str: - """ Onedrive direct link generator - By https://github.com/junedkh """ - cget = create_scraper().request - try: - link = cget('get', link).url - parsed_link = urlparse(link) - link_data = parse_qs(parsed_link.query) - except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - if not link_data: - raise DirectDownloadLinkException("ERROR: Unable to find link_data") - folder_id = link_data.get('resid') - if not folder_id: - raise DirectDownloadLinkException('ERROR: folder id not found') - folder_id = folder_id[0] - authkey = link_data.get('authkey') - if not authkey: - raise DirectDownloadLinkException('ERROR: authkey not found') - authkey = authkey[0] - boundary = uuid4() - headers = {'content-type': f'multipart/form-data;boundary={boundary}'} - data = f'--{boundary}\r\nContent-Disposition: form-data;name=data\r\nPrefer: Migration=EnableRedirect;FailOnMigratedFiles\r\nX-HTTP-Method-Override: GET\r\nContent-Type: application/json\r\n\r\n--{boundary}--' - try: - resp = cget( - 'get', f'https://api.onedrive.com/v1.0/drives/{folder_id.split("!", 1)[0]}/items/{folder_id}?$select=id,@content.downloadUrl&ump=1&authKey={authkey}', headers=headers, data=data).json() - except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') +def onedrive(link): + with create_scraper() as session: + try: + link = session.get(link).url + parsed_link = urlparse(link) + link_data = parse_qs(parsed_link.query) + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + if not link_data: + raise DirectDownloadLinkException("ERROR: Unable to find link_data") + folder_id = link_data.get('resid') + if not folder_id: + raise DirectDownloadLinkException('ERROR: folder id not found') + folder_id = folder_id[0] + authkey = link_data.get('authkey') + if not authkey: + raise DirectDownloadLinkException('ERROR: authkey not found') + authkey = authkey[0] + boundary = uuid4() + headers = {'content-type': f'multipart/form-data;boundary={boundary}'} + data = f'--{boundary}\r\nContent-Disposition: form-data;name=data\r\nPrefer: Migration=EnableRedirect;FailOnMigratedFiles\r\nX-HTTP-Method-Override: GET\r\nContent-Type: application/json\r\n\r\n--{boundary}--' + try: + resp = session.get( f'https://api.onedrive.com/v1.0/drives/{folder_id.split("!", 1)[0]}/items/{folder_id}?$select=id,@content.downloadUrl&ump=1&authKey={authkey}', headers=headers, data=data).json() + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e if "@content.downloadUrl" not in resp: raise DirectDownloadLinkException('ERROR: Direct link not found') return resp['@content.downloadUrl'] -def pixeldrain(url: str) -> str: - """ Based on https://github.com/yash-dk/TorToolkit-Telegram """ +def pixeldrain(url): url = url.strip("/ ") file_id = url.split("/")[-1] if url.split("/")[-2] == "l": info_link = f"https://pixeldrain.com/api/list/{file_id}" - dl_link = f"https://pixeldrain.com/api/list/{file_id}/zip" + dl_link = f"https://pixeldrain.com/api/list/{file_id}/zip?download" else: info_link = f"https://pixeldrain.com/api/file/{file_id}/info" - dl_link = f"https://pixeldrain.com/api/file/{file_id}" - cget = create_scraper().request - try: - resp = cget('get', info_link).json() - except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") + dl_link = f"https://pixeldrain.com/api/file/{file_id}?download" + with create_scraper() as session: + try: + resp = session.get(info_link).json() + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e if resp["success"]: return dl_link else: @@ -348,55 +473,46 @@ def pixeldrain(url: str) -> str: f"ERROR: Cant't download due {resp['message']}.") -def antfiles(url: str) -> str: - """ Antfiles direct link generator - Based on https://github.com/zevtyardt/lk21 - """ +def antfiles(url): try: - link = Bypass().bypass_antfiles(url) + return Bypass().bypass_antfiles(url) except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - if not link: - raise DirectDownloadLinkException("ERROR: Download link not found") - return link + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e -def streamtape(url: str) -> str: - """ Streamtape direct link generator - Based on https://github.com/zevtyardt/lk21 - """ +def streamtape(url): + splitted_url = url.split("/") + _id = splitted_url[4] if len(splitted_url) >= 6 else splitted_url[-1] try: - link = Bypass().bypass_streamtape(url) + with Session() as session: + html = HTML(session.get(url).text) except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - if not link: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + if not (script := html.xpath("//script[contains(text(),'ideoooolink')]/text()")): + raise DirectDownloadLinkException("ERROR: requeries script not found") + if not (link := findall(r"(&expires\S+)'", script[0])): raise DirectDownloadLinkException("ERROR: Download link not found") - return link + return f"https://streamtape.com/get_video?id={_id}{link[-1]}" -def racaty(url: str) -> str: - """ Racaty direct link generator - By https://github.com/junedkh """ - cget = create_scraper().request - try: - url = cget('GET', url).url - json_data = { - 'op': 'download2', - 'id': url.split('/')[-1] - } - res = cget('POST', url, data=json_data) - except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') - if (direct_link := etree.HTML(res.text).xpath("//a[contains(@id,'uniqueExpirylink')]/@href")): +def racaty(url): + with create_scraper() as session: + try: + url = session.get(url).url + json_data = { + 'op': 'download2', + 'id': url.split('/')[-1] + } + html = HTML(session.post(url, data=json_data).text) + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e + if (direct_link := html.xpath("//a[@id='uniqueExpirylink']/@href")): return direct_link[0] else: raise DirectDownloadLinkException('ERROR: Direct link not found') -def fichier(link: str) -> str: - """ 1Fichier direct link generator - Based on https://github.com/Maujar - """ +def fichier(link): regex = r"^([http:\/\/|https:\/\/]+)?.*1fichier\.com\/\?.+" gan = match(regex, link) if not gan: @@ -416,202 +532,354 @@ def fichier(link: str) -> str: pw = {"pass": pswd} req = cget('post', url, data=pw) except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e if req.status_code == 404: - raise DirectDownloadLinkException( - "ERROR: File not found/The link you entered is wrong!") - soup = BeautifulSoup(req.content, 'lxml') - if soup.find("a", {"class": "ok btn-general btn-orange"}): - if dl_url := soup.find("a", {"class": "ok btn-general btn-orange"})["href"]: - return dl_url - raise DirectDownloadLinkException( - "ERROR: Unable to generate Direct Link 1fichier!") - elif len(soup.find_all("div", {"class": "ct_warn"})) == 3: - str_2 = soup.find_all("div", {"class": "ct_warn"})[-1] - if "you must wait" in str(str_2).lower(): - if numbers := [int(word) for word in str(str_2).split() if word.isdigit()]: - raise DirectDownloadLinkException( - f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute.") + raise DirectDownloadLinkException("ERROR: File not found/The link you entered is wrong!") + html = HTML(req.text) + if dl_url:= html.xpath('//a[@class="ok btn-general btn-orange"]/@href'): + return dl_url[0] + if not (ct_warn := html.xpath('//div[@class="ct_warn"]')): + raise DirectDownloadLinkException("ERROR: Error trying to generate Direct Link from 1fichier!") + if len(ct_warn) == 3: + str_2 = ct_warn[-1].text + if "you must wait" in str_2.lower(): + if numbers := [int(word) for word in str_2.split() if word.isdigit()]: + raise DirectDownloadLinkException(f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute.") else: - raise DirectDownloadLinkException( - "ERROR: 1fichier is on a limit. Please wait a few minutes/hour.") - elif "protect access" in str(str_2).lower(): - raise DirectDownloadLinkException( - "ERROR: This link requires a password!\n\nThis link requires a password!\n- Insert sign :: after the link and write the password after the sign.\n\nExample: https://1fichier.com/?smmtd8twfpm66awbqz04::love you\n\n* No spaces between the signs ::\n* For the password, you can use a space!") + raise DirectDownloadLinkException("ERROR: 1fichier is on a limit. Please wait a few minutes/hour.") + elif "protect access" in str_2.lower(): + raise DirectDownloadLinkException(f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(link)}") else: - raise DirectDownloadLinkException( - "ERROR: Failed to generate Direct Link from 1fichier!") - elif len(soup.find_all("div", {"class": "ct_warn"})) == 4: - str_1 = soup.find_all("div", {"class": "ct_warn"})[-2] - str_3 = soup.find_all("div", {"class": "ct_warn"})[-1] - if "you must wait" in str(str_1).lower(): - if numbers := [int(word) for word in str(str_1).split() if word.isdigit()]: - raise DirectDownloadLinkException( - f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute.") + raise DirectDownloadLinkException("ERROR: Failed to generate Direct Link from 1fichier!") + elif len(ct_warn) == 4: + str_1 = ct_warn[-2].text + str_3 = ct_warn[-1].text + if "you must wait" in str_1.lower(): + if numbers := [int(word) for word in str_1.split() if word.isdigit()]: + raise DirectDownloadLinkException(f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute.") else: - raise DirectDownloadLinkException( - "ERROR: 1fichier is on a limit. Please wait a few minutes/hour.") - elif "bad password" in str(str_3).lower(): - raise DirectDownloadLinkException( - "ERROR: The password you entered is wrong!") + raise DirectDownloadLinkException("ERROR: 1fichier is on a limit. Please wait a few minutes/hour.") + elif "bad password" in str_3.lower(): + raise DirectDownloadLinkException("ERROR: The password you entered is wrong!") else: - raise DirectDownloadLinkException( - "ERROR: Error trying to generate Direct Link from 1fichier!") - else: - raise DirectDownloadLinkException( - "ERROR: Error trying to generate Direct Link from 1fichier!") + raise DirectDownloadLinkException("ERROR: Error trying to generate Direct Link from 1fichier!") -def solidfiles(url: str) -> str: - """ Solidfiles direct link generator - Based on https://github.com/Xonshiz/SolidFiles-Downloader - By https://github.com/Jusidama18 """ - cget = create_scraper().request - try: - headers = { - 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36' - } - pageSource = cget('get', url, headers=headers).text - mainOptions = str( - search(r'viewerOptions\'\,\ (.*?)\)\;', pageSource).group(1)) - return loads(mainOptions)["downloadUrl"] - except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") +def solidfiles(url): + with create_scraper() as session: + try: + headers = { + 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36' + } + pageSource = session.get(url, headers=headers).text + mainOptions = str( + search(r'viewerOptions\'\,\ (.*?)\)\;', pageSource).group(1)) + return loads(mainOptions)["downloadUrl"] + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e -def krakenfiles(page_link: str) -> str: - """ krakenfiles direct link generator - Based on https://github.com/tha23rd/py-kraken - By https://github.com/junedkh """ - cget = create_scraper().request - try: - page_resp = cget('get', page_link) - except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - soup = BeautifulSoup(page_resp.text, "lxml") - try: - token = soup.find("input", id="dl-token")["value"] - except: - raise DirectDownloadLinkException( - f"ERROR: Page link is wrong: {page_link}") - hashes = [ - item["data-file-hash"] - for item in soup.find_all("div", attrs={"data-file-hash": True}) - ] - if not hashes: - raise DirectDownloadLinkException( - f"ERROR: Hash not found for : {page_link}") - dl_hash = hashes[0] - payload = f'------WebKitFormBoundary7MA4YWxkTrZu0gW\r\nContent-Disposition: form-data; name="token"\r\n\r\n{token}\r\n------WebKitFormBoundary7MA4YWxkTrZu0gW--' - headers = { - "content-type": "multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW", - "cache-control": "no-cache", - "hash": dl_hash, - } - dl_link_resp = cget( - 'post', f"https://krakenfiles.com/download/{hash}", data=payload, headers=headers) - dl_link_json = dl_link_resp.json() - if "url" in dl_link_json: - return dl_link_json["url"] - else: - raise DirectDownloadLinkException( - f"ERROR: Failed to acquire download URL from kraken for : {page_link}") +def krakenfiles(url): + with Session() as session: + try: + _res = session.get(url) + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e + html = HTML(_res.text) + if post_url:= html.xpath('//form[@id="dl-form"]/@action'): + post_url = f'https:{post_url[0]}' + else: + raise DirectDownloadLinkException('ERROR: Unable to find post link.') + if token:= html.xpath('//input[@id="dl-token"]/@value'): + data = {'token': token[0]} + else: + raise DirectDownloadLinkException('ERROR: Unable to find token for post.') + try: + _json = session.post(post_url, data=data).json() + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__} While send post request') from e + if _json['status'] != 'ok': + raise DirectDownloadLinkException("ERROR: Unable to find download after post request") + return _json['url'] -def uploadee(url: str) -> str: - """ uploadee direct link generator - By https://github.com/iron-heart-x""" - cget = create_scraper().request - try: - soup = BeautifulSoup(cget('get', url).content, 'lxml') - sa = soup.find('a', attrs={'id': 'd_l'}) - return sa['href'] - except: - raise DirectDownloadLinkException( - f"ERROR: Failed to acquire download URL from upload.ee for : {url}") +def uploadee(url): + with create_scraper() as session: + try: + html = HTML(session.get(url).text) + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e + if link := html.xpath("//a[@id='d_l']/@href"): + return link[0] + else: + raise DirectDownloadLinkException("ERROR: Direct Link not found") -def terabox(url) -> str: +def terabox(url): if not path.isfile('terabox.txt'): raise DirectDownloadLinkException("ERROR: terabox.txt not found") - session = create_scraper() try: jar = MozillaCookieJar('terabox.txt') jar.load() - cookie_string = '' - for cookie in jar: cookie_string += f'{cookie.name}={cookie.value}; ' - session.cookies.update(jar) - res = session.request('GET', url) - key = res.url.split('?surl=')[-1] - soup = BeautifulSoup(res.content, 'lxml') - jsToken = None - for fs in soup.find_all('script'): - fstring = fs.string - if fstring and fstring.startswith('try {eval(decodeURIComponent'): - jsToken = fstring.split('%22')[1] - headers = {"Cookie": cookie_string} - res = session.request( - 'GET', f'https://www.terabox.com/share/list?app_id=250528&jsToken={jsToken}&shorturl={key}&root=1', headers=headers) - result = res.json() except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - if result['errno'] != 0: raise DirectDownloadLinkException(f"ERROR: '{result['errmsg']}' Check cookies") - result = result['list'] - if len(result) > 1: - raise DirectDownloadLinkException( - "ERROR: Can't download mutiple files") - result = result[0] - - if result['isdir'] != '0': - raise DirectDownloadLinkException("ERROR: Can't download folder") - + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e + cookies = {} + for cookie in jar: + cookies[cookie.name] = cookie.value + details = {'contents':[], 'title': '', 'total_size': 0} + details["header"] = ' '.join(f'{key}: {value}' for key, value in cookies.items()) + + def __fetch_links(session, dir_='', folderPath=''): + params = { + 'app_id': '250528', + 'jsToken': jsToken, + 'shorturl': shortUrl + } + if dir_: + params['dir'] = dir_ + else: + params['root'] = '1' + try: + _json = session.get("https://www.1024tera.com/share/list", params=params, cookies=cookies).json() + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') + if _json['errno'] not in [0, '0']: + if 'errmsg' in _json: + raise DirectDownloadLinkException(f"ERROR: {_json['errmsg']}") + else: + raise DirectDownloadLinkException('ERROR: Something went wrong!') + + if "list" not in _json: + return + contents = _json["list"] + for content in contents: + if content['isdir'] in ['1', 1]: + if not folderPath: + if not details['title']: + details['title'] = content['server_filename'] + newFolderPath = path.join(details['title']) + else: + newFolderPath = path.join(details['title'], content['server_filename']) + else: + newFolderPath = path.join(folderPath, content['server_filename']) + __fetch_links(session, content['path'], newFolderPath) + else: + if not folderPath: + if not details['title']: + details['title'] = content['server_filename'] + folderPath = details['title'] + item = { + 'url': content['dlink'], + 'filename': content['server_filename'], + 'path' : path.join(folderPath), + } + if 'size' in content: + size = content["size"] + if isinstance(size, str) and size.isdigit(): + size = float(size) + details['total_size'] += size + details['contents'].append(item) + + with Session() as session: + try: + _res = session.get(url, cookies=cookies) + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') + if jsToken := findall(r'window\.jsToken.*%22(.*)%22', _res.text): + jsToken = jsToken[0] + else: + raise DirectDownloadLinkException('ERROR: jsToken not found!.') + shortUrl = parse_qs(urlparse(_res.url).query).get('surl') + if not shortUrl: + raise DirectDownloadLinkException("ERROR: Could not find surl") + try: + __fetch_links(session) + except Exception as e: + raise DirectDownloadLinkException(e) + if len(details['contents']) == 1: + return details['contents'][0]['url'] + return details + + +def gofile(url, auth): try: - dlink = result['dlink'] + _password = sha256(auth[1].encode("utf-8")).hexdigest() if auth else '' + _id = url.split("/")[-1] except Exception as e: - raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}, Check cookies") + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") - return dlink + def __get_token(session): + if 'gofile_token' in _caches: + __url = f"https://api.gofile.io/getAccountDetails?token={_caches['gofile_token']}" + else: + __url = 'https://api.gofile.io/createAccount' + try: + __res = session.get(__url, verify=False).json() + if __res["status"] != 'ok': + if 'gofile_token' in _caches: + del _caches['gofile_token'] + return __get_token(session) + _caches['gofile_token'] = __res["data"]["token"] + return _caches['gofile_token'] + except Exception as e: + raise e + def __fetch_links(session, _id, folderPath=''): + _url = f"https://api.gofile.io/getContent?contentId={_id}&token={token}&websiteToken=7fd94ds12fds4&cache=true" + if _password: + _url += f"&password={_password}" + try: + _json = session.get(_url, verify=False).json() + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") + if _json['status'] in 'error-passwordRequired': + raise DirectDownloadLinkException(f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}") + if _json['status'] in 'error-passwordWrong': + raise DirectDownloadLinkException('ERROR: This password is wrong !') + if _json['status'] in 'error-notFound': + raise DirectDownloadLinkException("ERROR: File not found on gofile's server") + if _json['status'] in 'error-notPublic': + raise DirectDownloadLinkException("ERROR: This folder is not public") + + data = _json["data"] + + if not details['title']: + details['title'] = data['name'] if data['type'] == "folder" else _id + + contents = data["contents"] + for content in contents.values(): + if content["type"] == "folder": + if not content['public']: + continue + if not folderPath: + newFolderPath = path.join( + details['title'], content["name"]) + else: + newFolderPath = path.join(folderPath, content["name"]) + __fetch_links(session, content["id"], newFolderPath) + else: + if not folderPath: + folderPath = details['title'] + item = { + "path": path.join(folderPath), + "filename": content["name"], + "url": content["link"], + } + if 'size' in content: + size = content["size"] + if isinstance(size, str) and size.isdigit(): + size = float(size) + details['total_size'] += size + details['contents'].append(item) + + details = {'contents':[], 'title': '', 'total_size': 0} + with Session() as session: + try: + token = __get_token(session) + except Exception as e: + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") + details["header"] = f'Cookie: accountToken={token}' + try: + __fetch_links(session, _id) + except Exception as e: + raise DirectDownloadLinkException(e) -def filepress(url): - cget = create_scraper().request + if len(details['contents']) == 1: + return (details['contents'][0]['url'], details['header']) + return details + + +def gd_index(url, auth): + if not auth: + auth = ("admin", "admin") try: - url = cget('GET', url).url - raw = urlparse(url) - json_data = { - 'id': raw.path.split('/')[-1], - 'method': 'publicDownlaod', - } - api = f'{raw.scheme}://{raw.hostname}/api/file/downlaod/' - res = cget('POST', api, headers={ - 'Referer': f'{raw.scheme}://{raw.hostname}'}, json=json_data).json() + _title = url.rstrip('/').split("/")[-1] except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') + raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") + + details = {'contents': [], 'title': unquote(_title), 'total_size': 0} + + def __fetch_links(url, folderPath, username, password): + with create_scraper() as session: + payload = { + "id": "", + "type": "folder", + "username": username, + "password": password, + "page_token": "", + "page_index": 0 + } + try: + data = (session.post(url, json=payload)).json() + except: + raise DirectDownloadLinkException("Use Latest Bhadoo Index Link") + + if "data" in data: + for file_info in data["data"]["files"]: + if file_info.get("mimeType", "") == "application/vnd.google-apps.folder": + if not folderPath: + newFolderPath = path.join(details['title'], file_info["name"]) + else: + newFolderPath = path.join(folderPath, file_info["name"]) + __fetch_links(f"{url}{file_info['name']}/", newFolderPath, username, password) + else: + if not folderPath: + folderPath = details['title'] + item = { + "path": path.join(folderPath), + "filename": unquote(file_info["name"]), + "url": urljoin(url, file_info.get("link", "") or ""), + } + if 'size' in file_info: + details['total_size'] += int(file_info["size"]) + details['contents'].append(item) + + try: + __fetch_links(url, "", auth[0], auth[1]) + except Exception as e: + raise DirectDownloadLinkException(e) + if len(details['contents']) == 1: + return details['contents'][0]['url'] + return details + + +def filepress(url): + with create_scraper() as session: + try: + url = session.get(url).url + raw = urlparse(url) + json_data = { + 'id': raw.path.split('/')[-1], + 'method': 'publicDownlaod', + } + api = f'{raw.scheme}://{raw.hostname}/api/file/downlaod/' + res = session.post(api, headers={'Referer': f'{raw.scheme}://{raw.hostname}'}, json=json_data).json() + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e if 'data' not in res: raise DirectDownloadLinkException(f'ERROR: {res["statusText"]}') return f'https://drive.google.com/uc?id={res["data"]}&export=download' - def gdtot(url): cget = create_scraper().request try: res = cget('GET', f'https://gdtot.pro/file/{url.split("/")[-1]}') except Exception as e: raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') - token_url = etree.HTML(res.content).xpath("//a[contains(@class,'inline-flex items-center justify-center')]/@href") + token_url = HTML(res.text).xpath("//a[contains(@class,'inline-flex items-center justify-center')]/@href") if not token_url: try: url = cget('GET', url).url p_url = urlparse(url) - res = cget("GET", f"{p_url.scheme}://{p_url.hostname}/ddl/{url.split('/')[-1]}") + res = cget("POST", f"{p_url.scheme}://{p_url.hostname}/ddl", data={'dl': str(url.split('/')[-1])}) except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e if (drive_link := findall(r"myDl\('(.*?)'\)", res.text)) and "drive.google.com" in drive_link[0]: return drive_link[0] elif config_dict['GDTOT_CRYPT']: cget('GET', url, cookies={'crypt': config_dict['GDTOT_CRYPT']}) p_url = urlparse(url) - js_script = cget('GET', f"{p_url.scheme}://{p_url.hostname}/dld?id={url.split('/')[-1]}") + js_script = cget('POST', f"{p_url.scheme}://{p_url.hostname}/dld", data={'dwnld': url.split('/')[-1]}) g_id = findall('gd=(.*?)&', js_script.text) try: decoded_id = b64decode(str(g_id[0])).decode('utf-8') @@ -624,7 +892,9 @@ def gdtot(url): try: token_page = cget('GET', token_url) except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__} with {token_url}') + raise DirectDownloadLinkException( + f'ERROR: {e.__class__.__name__} with {token_url}' + ) from e path = findall('\("(.*?)"\)', token_page.text) if not path: raise DirectDownloadLinkException('ERROR: Cannot bypass this') @@ -639,18 +909,16 @@ def sharer_scraper(url): try: url = cget('GET', url).url raw = urlparse(url) - header = { - "useragent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.548.0 Safari/534.10"} + header = {"useragent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.548.0 Safari/534.10"} res = cget('GET', url, headers=header) except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e key = findall('"key",\s+"(.*?)"', res.text) if not key: raise DirectDownloadLinkException("ERROR: Key not found!") key = key[0] - if not etree.HTML(res.content).xpath("//button[@id='drc']"): - raise DirectDownloadLinkException( - "ERROR: This link don't have direct download button") + if not HTML(res.text).xpath("//button[@id='drc']"): + raise DirectDownloadLinkException("ERROR: This link don't have direct download button") boundary = uuid4() headers = { 'Content-Type': f'multipart/form-data; boundary=----WebKitFormBoundary{boundary}', @@ -668,32 +936,32 @@ def sharer_scraper(url): except Exception as e: raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') if "url" not in res: - raise DirectDownloadLinkException( - 'ERROR: Drive Link not found, Try in your broswer') + raise DirectDownloadLinkException('ERROR: Drive Link not found, Try in your broswer') if "drive.google.com" in res["url"]: return res["url"] try: res = cget('GET', res["url"]) except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') - if (drive_link := etree.HTML(res.content).xpath("//a[contains(@class,'btn')]/@href")) and "drive.google.com" in drive_link[0]: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e + if (drive_link := HTML(res.text).xpath("//a[contains(@class,'btn')]/@href")) and "drive.google.com" in drive_link[0]: return drive_link[0] else: - raise DirectDownloadLinkException( - 'ERROR: Drive Link not found, Try in your broswer') + raise DirectDownloadLinkException('ERROR: Drive Link not found, Try in your broswer') + + def wetransfer(url): - cget = create_scraper().request - try: - url = cget('GET', url).url - json_data = { - 'security_hash': url.split('/')[-1], - 'intent': 'entire_transfer' - } - res = cget( - 'POST', f'https://wetransfer.com/api/v4/transfers/{url.split("/")[-2]}/download', json=json_data).json() - except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') + with create_scraper() as session: + try: + url = session.get(url).url + splited_url = url.split('/') + json_data = { + 'security_hash': splited_url[-1], + 'intent': 'entire_transfer' + } + res = session.post(f'https://wetransfer.com/api/v4/transfers/{splited_url[-2]}/download', json=json_data).json() + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e if "direct_link" in res: return res["direct_link"] elif "message" in res: @@ -705,33 +973,30 @@ def wetransfer(url): def akmfiles(url): - cget = create_scraper().request - try: - url = cget('GET', url).url - json_data = { - 'op': 'download2', - 'id': url.split('/')[-1] - } - res = cget('POST', url, data=json_data) - except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') - if (direct_link := etree.HTML(res.content).xpath("//a[contains(@class,'btn btn-dow')]/@href")): + with create_scraper() as session: + try: + url = session.get(url).url + json_data = { + 'op': 'download2', + 'id': url.split('/')[-1] + } + res = session.post('POST', url, data=json_data) + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e + if (direct_link := HTML(res.text).xpath("//a[contains(@class,'btn btn-dow')]/@href")): return direct_link[0] else: raise DirectDownloadLinkException('ERROR: Direct link not found') - def shrdsk(url): - cget = create_scraper().request - try: - url = cget('GET', url).url - res = cget( - 'GET', f'https://us-central1-affiliate2apk.cloudfunctions.net/get_data?shortid={url.split("/")[-1]}') - except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') + with create_scraper() as session: + try: + url = session.get(url).url + res = session.get(f'https://us-central1-affiliate2apk.cloudfunctions.net/get_data?shortid={url.split("/")[-1]}') + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e if res.status_code != 200: - raise DirectDownloadLinkException( - f'ERROR: Status Code {res.status_code}') + raise DirectDownloadLinkException(f'ERROR: Status Code {res.status_code}') res = res.json() if ("type" in res and res["type"].lower() == "upload" and "video_url" in res): return res["video_url"] @@ -739,13 +1004,12 @@ def shrdsk(url): def linkbox(url): - cget = create_scraper().request - try: - url = cget('GET', url).url - res = cget( - 'GET', f'https://www.linkbox.to/api/file/detail?itemId={url.split("/")[-1]}').json() - except Exception as e: - raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') + with create_scraper() as session: + try: + url = session.get(url).url + res = session.get(f'https://www.linkbox.to/api/file/detail?itemId={url.split("/")[-1]}').json() + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e if 'data' not in res: raise DirectDownloadLinkException('ERROR: Data not found!!') data = res['data'] @@ -757,8 +1021,7 @@ def linkbox(url): if 'url' not in itemInfo: raise DirectDownloadLinkException('ERROR: url not found in itemInfo!!') if "name" not in itemInfo: - raise DirectDownloadLinkException( - 'ERROR: Name not found in itemInfo!!') + raise DirectDownloadLinkException('ERROR: Name not found in itemInfo!!') name = quote(itemInfo["name"]) raw = itemInfo['url'].split("/", 3)[-1] return f'https://wdl.nuplink.net/{raw}&filename={name}' @@ -771,4 +1034,228 @@ def route_intercept(route, request): route.continue_() +def mediafireFolder(url): + try: + raw = url.split('/', 4)[-1] + folderkey = raw.split('/', 1)[0] + folderkey = folderkey.split(',') + except: + raise DirectDownloadLinkException('ERROR: Could not parse ') + if len(folderkey) == 1: + folderkey = folderkey[0] + details = {'contents': [], 'title': '', 'total_size': 0, 'header': ''} + + session = req_session() + adapter = HTTPAdapter(max_retries=Retry( + total=10, read=10, connect=10, backoff_factor=0.3)) + session.mount("http://", adapter) + session.mount("https://", adapter) + session = create_scraper( + browser={"browser": "firefox", "platform": "windows", "mobile": False}, + delay=10, + sess=session, + ) + folder_infos = [] + + def __get_info(folderkey): + try: + if isinstance(folderkey, list): + folderkey = ','.join(folderkey) + _json = session.post('https://www.mediafire.com/api/1.5/folder/get_info.php', data={ + 'recursive': 'yes', + 'folder_key': folderkey, + 'response_format': 'json' + }).json() + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While getting info") + _res = _json['response'] + if 'folder_infos' in _res: + folder_infos.extend(_res['folder_infos']) + elif 'folder_info' in _res: + folder_infos.append(_res['folder_info']) + elif 'message' in _res: + raise DirectDownloadLinkException(f"ERROR: {_res['message']}") + else: + raise DirectDownloadLinkException("ERROR: something went wrong!") + + try: + __get_info(folderkey) + except Exception as e: + raise DirectDownloadLinkException(e) + details['title'] = folder_infos[0]["name"] + + def __scraper(url): + try: + html = HTML(session.get(url).text) + except Exception: + return + if final_link := html.xpath("//a[@id='downloadButton']/@href"): + return final_link[0] + + def __get_content(folderKey, folderPath='', content_type='folders'): + try: + params = { + 'content_type': content_type, + 'folder_key': folderKey, + 'response_format': 'json', + } + _json = session.get( + 'https://www.mediafire.com/api/1.5/folder/get_content.php', params=params).json() + except Exception as e: + raise DirectDownloadLinkException( + f"ERROR: {e.__class__.__name__} While getting content") + _res = _json['response'] + if 'message' in _res: + raise DirectDownloadLinkException(f"ERROR: {_res['message']}") + _folder_content = _res['folder_content'] + if content_type == 'folders': + folders = _folder_content['folders'] + for folder in folders: + if folderPath: + newFolderPath = path.join(folderPath, folder["name"]) + else: + newFolderPath = path.join(folder["name"]) + __get_content(folder['folderkey'], newFolderPath) + __get_content(folderKey, folderPath, 'files') + else: + files = _folder_content['files'] + for file in files: + item = {} + if not (_url := __scraper(file['links']['normal_download'])): + continue + item['filename'] = file["filename"] + if not folderPath: + folderPath = details['title'] + item['path'] = path.join(folderPath) + item['url'] = _url + if 'size' in file: + size = file["size"] + if isinstance(size, str) and size.isdigit(): + size = float(size) + details['total_size'] += size + details['contents'].append(item) + try: + threads = [] + for folder in folder_infos: + thread = Thread(target=__get_content, args=( + folder['folderkey'], folder['name'])) + threads.append(thread) + for thread in threads: + thread.start() + for thread in threads: + thread.join() + except Exception as e: + session.close() + raise DirectDownloadLinkException(e) + session.close() + if len(details['contents']) == 1: + return (details['contents'][0]['url'], details['header']) + return details + + +def doods(url): + if "/e/" in url: + url = url.replace("/e/", "/d/") + parsed_url = urlparse(url) + with create_scraper() as session: + try: + _res = session.get(url) + html = HTML(_res.text) + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__} While fetching token link') from e + if not (link := html.xpath("//div[@class='download-content']//a/@href")): + raise DirectDownloadLinkException('ERROR: Token Link not found') + link = f'{parsed_url.scheme}://{parsed_url.hostname}/{link[0]}' + try: + _res = session.get(link) + except Exception as e: + raise DirectDownloadLinkException( + f'ERROR: {e.__class__.__name__} While fetching download link') from e + if not (link := search(r"window\.open\('(\S+)'", _res.text)): + raise DirectDownloadLinkException("ERROR: Download link not found try again") + return (link.group(1), f'Referer: {parsed_url.scheme}://{parsed_url.hostname}/') + +def easyupload(url): + if "::" in url: + _password = url.split("::")[-1] + url = url.split("::")[-2] + else: + _password = '' + file_id = url.split("/")[-1] + with create_scraper() as session: + try: + _res = session.get(url) + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e + first_page_html = HTML(_res.text) + if first_page_html.xpath("//h6[contains(text(),'Password Protected')]") and not _password: + raise DirectDownloadLinkException(f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}") + if not (match := search(r'https://eu(?:[1-9][0-9]?|100)\.easyupload\.io/action\.php', _res.text)): + raise DirectDownloadLinkException("ERROR: Failed to get server for EasyUpload Link") + action_url = match.group() + session.headers.update({'referer': 'https://easyupload.io/'}) + recaptcha_params = { + 'k': '6LfWajMdAAAAAGLXz_nxz2tHnuqa-abQqC97DIZ3', + 'ar': '1', + 'co': 'aHR0cHM6Ly9lYXN5dXBsb2FkLmlvOjQ0Mw..', + 'hl': 'en', + 'v': '0hCdE87LyjzAkFO5Ff-v7Hj1', + 'size': 'invisible', + 'cb': 'c3o1vbaxbmwe' + } + if not (captcha_token :=get_captcha_token(session, recaptcha_params)): + raise DirectDownloadLinkException('ERROR: Captcha token not found') + try: + data = {'type': 'download-token', + 'url': file_id, + 'value': _password, + 'captchatoken': captcha_token, + 'method': 'regular'} + json_resp = session.post(url=action_url, data=data).json() + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e + if 'download_link' in json_resp: + return json_resp['download_link'] + elif 'data' in json_resp: + raise DirectDownloadLinkException( + f"ERROR: Failed to generate direct link due to {json_resp['data']}") + raise DirectDownloadLinkException( + "ERROR: Failed to generate direct link from EasyUpload.") + + + +def filelions(url): + if not config_dict['FILELION_API']: + raise DirectDownloadLinkException('ERROR: FILELION_API is not provided get it from https://filelions.com/?op=my_account') + file_code = url.split('/')[-1] + quality = '' + if bool(file_code.endswith(('_o', '_h', '_n', '_l'))): + spited_file_code = file_code.rsplit('_', 1) + quality = spited_file_code[1] + file_code = spited_file_code[0] + with Session() as session: + try: + _res = session.get('https://api.filelions.com/api/file/direct_link', params={'key': config_dict['FILELION_API'], 'file_code': file_code, 'hls': '1'}).json() + except Exception as e: + raise DirectDownloadLinkException(f'ERROR: {e.__class__.__name__}') from e + if _res['status'] != 200: + raise DirectDownloadLinkException(f"ERROR: {_res['msg']}") + result = _res['result'] + if not result['versions']: + raise DirectDownloadLinkException("ERROR: No versions available") + error = '\nProvide a quality to download the video\nAvailable Quality:' + for version in result['versions']: + if quality == version['name']: + return version['url'] + elif version['name'] == 'l': + error += f"\nLow" + elif version['name'] == 'n': + error += f"\nNormal" + elif version['name'] == 'o': + error += f"\nOriginal" + elif version['name'] == "h": + error += f"\nHD" + error +=f" {url}_{version['name']}" + raise DirectDownloadLinkException(f'ERROR: {error}') \ No newline at end of file diff --git a/bot/helper/mirror_utils/download_utils/gd_download.py b/bot/helper/mirror_utils/download_utils/gd_download.py index 8c6d9f77a7..d3a5aa33a4 100644 --- a/bot/helper/mirror_utils/download_utils/gd_download.py +++ b/bot/helper/mirror_utils/download_utils/gd_download.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 from json import dumps as jdumps -from random import SystemRandom -from string import ascii_letters, digits +from secrets import token_hex from cloudscraper import create_scraper as cget from bot import download_dict, download_dict_lock, LOGGER, non_queued_dl, queue_dict_lock @@ -23,8 +22,7 @@ async def add_gd_download(link, path, listener, newname, org_link): return name = newname or name - gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12)) - + gid = token_hex(5) msg, button = await stop_duplicate_check(name, listener) if msg: await sendMessage(listener.message, msg, button) diff --git a/bot/helper/mirror_utils/download_utils/mega_download.py b/bot/helper/mirror_utils/download_utils/mega_download.py index 04bb05e706..19e0acb584 100644 --- a/bot/helper/mirror_utils/download_utils/mega_download.py +++ b/bot/helper/mirror_utils/download_utils/mega_download.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -from random import SystemRandom -from string import ascii_letters, digits +from secrets import token_hex from aiofiles.os import makedirs from asyncio import Event from mega import MegaApi, MegaListener, MegaRequest, MegaTransfer, MegaError @@ -156,7 +155,7 @@ async def add_mega_download(mega_link, path, listener, name): await executor.do(folder_api.logout, ()) return - gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=8)) + gid = token_hex(5) size = api.getSize(node) if limit_exceeded := await limit_checker(size, listener, isMega=True): await sendMessage(listener.message, limit_exceeded) diff --git a/bot/helper/mirror_utils/download_utils/rclone_download.py b/bot/helper/mirror_utils/download_utils/rclone_download.py index 7cc1a1eccb..9ac4203c9d 100644 --- a/bot/helper/mirror_utils/download_utils/rclone_download.py +++ b/bot/helper/mirror_utils/download_utils/rclone_download.py @@ -1,8 +1,7 @@ #!/usr/bin/env python3 from asyncio import gather from json import loads -from random import SystemRandom -from string import ascii_letters, digits +from secrets import token_hex from bot import download_dict, download_dict_lock, queue_dict_lock, non_queued_dl, LOGGER from bot.helper.ext_utils.bot_utils import cmd_exec @@ -41,8 +40,7 @@ async def add_rclone_download(rc_path, config_path, path, name, listener): else: name = rc_path.rsplit('/', 1)[-1] size = rsize['bytes'] - gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12)) - + gid = token_hex(5) msg, button = await stop_duplicate_check(name, listener) if msg: await sendMessage(listener.message, msg, button) diff --git a/bot/helper/mirror_utils/download_utils/yt_dlp_download.py b/bot/helper/mirror_utils/download_utils/yt_dlp_download.py index 6b80b1245b..022a71cb25 100644 --- a/bot/helper/mirror_utils/download_utils/yt_dlp_download.py +++ b/bot/helper/mirror_utils/download_utils/yt_dlp_download.py @@ -1,12 +1,11 @@ #!/usr/bin/env python3 from os import path as ospath, listdir -from random import SystemRandom -from string import ascii_letters, digits +from secrets import token_hex from logging import getLogger from yt_dlp import YoutubeDL, DownloadError from re import search as re_search -from bot import download_dict_lock, download_dict, non_queued_dl, queue_dict_lock, config_dict +from bot import download_dict_lock, download_dict, non_queued_dl, queue_dict_lock from bot.helper.telegram_helper.message_utils import sendStatusMessage from ..status_utils.yt_dlp_download_status import YtDlpDownloadStatus from bot.helper.mirror_utils.status_utils.queue_status import QueueStatus @@ -67,10 +66,10 @@ def __init__(self, listener): 'overwrites': True, 'writethumbnail': True, 'trim_file_name': 220, - 'retry_sleep_functions': {'http': lambda x: 2, - 'fragment': lambda x: 2, - 'file_access': lambda x: 2, - 'extractor': lambda x: 2}} + 'retry_sleep_functions': {'http': lambda n: 3, + 'fragment': lambda n: 3, + 'file_access': lambda n: 3, + 'extractor': lambda n: 3}} @property def download_speed(self): @@ -151,11 +150,10 @@ def extractMetaData(self, link, name): self.__size += entry['filesize_approx'] elif 'filesize' in entry: self.__size += entry['filesize'] - if not name: + if not self.name: outtmpl_ = '%(series,playlist_title,channel)s%(season_number& |)s%(season_number&S|)s%(season_number|)02d.%(ext)s' - name, ext = ospath.splitext( + self.name, ext = ospath.splitext( ydl.prepare_filename(entry, outtmpl=outtmpl_)) - self.name = name if not self.__ext: self.__ext = ext else: @@ -194,18 +192,19 @@ async def add_download(self, link, path, name, qual, playlist, options): self.opts['ignoreerrors'] = True self.is_playlist = True - self.__gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=10)) - + self.__gid = token_hex(5) await self.__onDownloadStart() - self.opts['postprocessors'] = [{'add_chapters': True, 'add_infojson': 'if_exists', 'add_metadata': True, 'key': 'FFmpegMetadata'}] + self.opts['postprocessors'] = [ + {'add_chapters': True, 'add_infojson': 'if_exists', 'add_metadata': True, 'key': 'FFmpegMetadata'}] if qual.startswith('ba/b-'): audio_info = qual.split('-') qual = audio_info[0] audio_format = audio_info[1] rate = audio_info[2] - self.opts['postprocessors'].append({'key': 'FFmpegExtractAudio', 'preferredcodec': audio_format, 'preferredquality': rate}) + self.opts['postprocessors'].append( + {'key': 'FFmpegExtractAudio', 'preferredcodec': audio_format, 'preferredquality': rate}) if audio_format == 'vorbis': self.__ext = '.ogg' elif audio_format == 'alac': @@ -225,7 +224,8 @@ async def add_download(self, link, path, name, qual, playlist, options): base_name, ext = ospath.splitext(self.name) trim_name = self.name if self.is_playlist else base_name if len(trim_name.encode()) > 200: - self.name = self.name[:200] if self.is_playlist else f'{base_name[:200]}{ext}' + self.name = self.name[: + 200] if self.is_playlist else f'{base_name[:200]}{ext}' base_name = ospath.splitext(self.name)[0] if self.is_playlist: @@ -237,18 +237,20 @@ async def add_download(self, link, path, name, qual, playlist, options): else: self.opts['outtmpl'] = {'default': f"{path}/{self.name}", 'thumbnail': f"{path}/yt-dlp-thumb/{base_name}.%(ext)s"} - self.name = base_name + + if qual.startswith('ba/b'): + self.name = f'{base_name}{self.__ext}' if self.__listener.isLeech: self.opts['postprocessors'].append( {'format': 'jpg', 'key': 'FFmpegThumbnailsConvertor', 'when': 'before_dl'}) - if self.__ext in ['.mp3', '.mkv', '.mka', '.ogg', '.opus', '.flac', '.m4a', '.mp4', '.mov']: + if self.__ext in ['.mp3', '.mkv', '.mka', '.ogg', '.opus', '.flac', '.m4a', '.mp4', '.mov', 'm4v']: self.opts['postprocessors'].append( {'already_have_thumbnail': self.__listener.isLeech, 'key': 'EmbedThumbnail'}) elif not self.__listener.isLeech: self.opts['writethumbnail'] = False - msg, button = await stop_duplicate_check(name, self.__listener) + msg, button = await stop_duplicate_check(self.name, self.__listener) if msg: await self.__listener.onDownloadError(msg, button) return @@ -285,11 +287,13 @@ def __set_options(self, options): options = options.split('|') for opt in options: key, value = map(str.strip, opt.split(':', 1)) + if key == 'format' and value.startswith('ba/b-'): + continue if value.startswith('^'): if '.' in value or value == '^inf': - value = float(value.split('^')[1]) + value = float(value.split('^', 1)[1]) else: - value = int(value.split('^')[1]) + value = int(value.split('^', 1)[1]) elif value.lower() == 'true': value = True elif value.lower() == 'false': diff --git a/bot/helper/mirror_utils/rclone_utils/serve.py b/bot/helper/mirror_utils/rclone_utils/serve.py index 2f1f4f9c8b..8b25c32a88 100644 --- a/bot/helper/mirror_utils/rclone_utils/serve.py +++ b/bot/helper/mirror_utils/rclone_utils/serve.py @@ -7,7 +7,6 @@ RcloneServe = [] - async def rclone_serve_booter(): if not config_dict['RCLONE_SERVE_URL'] or not await aiopath.exists('rclone.conf'): if RcloneServe: diff --git a/bot/helper/mirror_utils/status_utils/ddl_status.py b/bot/helper/mirror_utils/status_utils/ddl_status.py index eafa18ab5c..20cb76e2f8 100644 --- a/bot/helper/mirror_utils/status_utils/ddl_status.py +++ b/bot/helper/mirror_utils/status_utils/ddl_status.py @@ -16,7 +16,7 @@ def size(self): return get_readable_file_size(self.__size) def status(self): - return MirrorStatus.STATUS_UPLOADDDL + return MirrorStatus.STATUS_UPLOADING def name(self): return self.__obj.name diff --git a/bot/helper/mirror_utils/status_utils/direct_status.py b/bot/helper/mirror_utils/status_utils/direct_status.py new file mode 100644 index 0000000000..bab1eea4d4 --- /dev/null +++ b/bot/helper/mirror_utils/status_utils/direct_status.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +from bot.helper.ext_utils.bot_utils import (EngineStatus, MirrorStatus, + get_readable_file_size, + get_readable_time) + + + +class DirectStatus: + def __init__(self, obj, gid, listener, upload_details): + self.__gid = gid + self.__listener = listener + self.__obj = obj + self.__name = self.__obj.name + self.upload_details = upload_details + self.message = self.__listener.message + + def gid(self): + return self.__gid + + def speed_raw(self): + return self.__obj.speed + + def progress_raw(self): + try: + return self.processed_raw() / self.__obj.total_size * 100 + except: + return 0 + + def progress(self): + return f'{round(self.progress_raw(), 2)}%' + + def speed(self): + return f'{get_readable_file_size(self.speed_raw())}/s' + + def name(self): + return self.__name + + def size(self): + return get_readable_file_size(self.__obj.total_size) + + def eta(self): + try: + seconds = (self.__obj.total_size - self.processed_raw()) / self.speed_raw() + return get_readable_time(seconds) + except: + return '-' + + def status(self): + return MirrorStatus.STATUS_DOWNLOADING + + def processed_bytes(self): + return get_readable_file_size(self.processed_raw()) + + def processed_raw(self): + return self.__obj.processed_bytes + + def download(self): + return self.__obj + + def eng(self): + return EngineStatus().STATUS_ARIA diff --git a/bot/helper/mirror_utils/upload_utils/ddlEngine.py b/bot/helper/mirror_utils/upload_utils/ddlEngine.py index 3a7f3e2552..39595a1d88 100644 --- a/bot/helper/mirror_utils/upload_utils/ddlEngine.py +++ b/bot/helper/mirror_utils/upload_utils/ddlEngine.py @@ -1,98 +1,130 @@ #!/usr/bin/env python3 -import asyncio +from pathlib import Path +from traceback import format_exc +from json import JSONDecodeError +from io import BufferedReader from re import findall as re_findall -from os import path as ospath +from aiofiles.os import path as aiopath from time import time +from tenacity import retry, wait_exponential, stop_after_attempt, retry_if_exception_type +from aiohttp import ClientSession +from aiohttp.client_exceptions import ContentTypeError from bot import LOGGER, user_data +from bot.helper.mirror_utils.upload_utils.ddlserver.gofile import Gofile +from bot.helper.mirror_utils.upload_utils.ddlserver.streamtape import Streamtape from bot.helper.ext_utils.fs_utils import get_mime_type -from bot.helper.ext_utils.bot_utils import setInterval -from bot.helper.mirror_utils.upload_utils.ddlserver.gofile import Async_Gofile -class DDLUploader: - def __init__(self, name=None, path=None, listener=None): +class ProgressFileReader(BufferedReader): + def __init__(self, filename, read_callback=None): + super().__init__(open(filename, "rb")) + self.__read_callback = read_callback + self.length = Path(filename).stat().st_size + + def read(self, size=None): + size = size or (self.length - self.tell()) + if self.__read_callback: + self.__read_callback(self.tell()) + return super().read(size) + + +class DDLUploader: + def __init__(self, listener=None, name=None, path=None): self.name = name self.__processed_bytes = 0 + self.last_uploaded = 0 self.__listener = listener self.__path = path - self.__updater = None self.__start_time = time() - self.__total_files = 0 - self.__total_folders = 0 - self.__is_cancelled = False + self.total_files = 0 + self.total_folders = 0 + self.is_cancelled = False self.__is_errored = False self.__ddl_servers = {} - self.__engine = '' - self.__total_time = 0 - self.__update_interval = 3 + self.__engine = 'DDL v1' + self.__asyncSession = None self.__user_id = self.__listener.message.from_user.id async def __user_settings(self): user_dict = user_data.get(self.__user_id, {}) self.__ddl_servers = user_dict.get('ddl_servers', {}) - async def __progress(self): - if self.__updater is not None: - self.__processed_bytes += self.__updater.interval - - async def __upload_to_gofile(self, file_path, token): - gf = Async_Gofile(token=token) - if ospath.isfile(file_path): - cmd = await gf.upload(file=file_path) - elif ospath.isdir(file_path): - cmd = await gf.upload_folder(path=file_path) - if cmd and 'parentFolder' in cmd: - await gf.set_option(contentId=cmd['parentFolder'], option="public", value="true") - if cmd and 'downloadPage' in cmd: - return cmd['downloadPage'] - raise Exception("Failed to upload file/folder") + def __progress_callback(self, current): + chunk_size = current - self.last_uploaded + self.last_uploaded = current + self.__processed_bytes += chunk_size + + @retry(wait=wait_exponential(multiplier=2, min=4, max=8), stop=stop_after_attempt(3), + retry=retry_if_exception_type(Exception)) + async def upload_aiohttp(self, url, file_path, req_file, data): + with ProgressFileReader(filename=file_path, read_callback=self.__progress_callback) as file: + data[req_file] = file + async with ClientSession() as self.__asyncSession: + async with self.__asyncSession.post(url, data=data) as resp: + if resp.status == 200: + try: + return await resp.json() + except ContentTypeError: + return "Uploaded" + except JSONDecodeError: + return None async def __upload_to_ddl(self, file_path): + all_links = {} for serv, (enabled, api_key) in self.__ddl_servers.items(): if enabled: + self.total_files = 0 + self.total_folders = 0 if serv == 'gofile': self.__engine = 'GoFile API' - return await self.__upload_to_gofile(file_path, api_key) - elif serv == 'streamsb': - self.__engine = 'StreamSB API' - # return await self.__upload_to_streamsb(file_path, api_key) - raise Exception("No DDL Enabled to Upload.") + nlink = await Gofile(self, api_key).upload(file_path) + all_links['GoFile'] = nlink + if serv == 'streamtape': + self.__engine = 'StreamTape API' + try: + login, key = api_key.split(':') + except IndexError: + raise Exception("StreamTape Login & Key not Found, Kindly Recheck !") + nlink = await Streamtape(self, login, key).upload(file_path) + all_links['StreamTape'] = nlink + self.__processed_bytes = 0 + if not all_links: + raise Exception("No DDL Enabled to Upload.") + return all_links async def upload(self, file_name, size): item_path = f"{self.__path}/{file_name}" LOGGER.info(f"Uploading: {item_path} via DDL") - self.__updater = setInterval(self.__update_interval, self.__progress) await self.__user_settings() try: - if ospath.isfile(item_path): + if await aiopath.isfile(item_path): mime_type = get_mime_type(item_path) - link = await self.__upload_to_ddl(item_path) - if self.__is_cancelled: - return - if link is None: - raise Exception('Upload has been manually cancelled') - LOGGER.info(f"Uploaded To DDL: {item_path}") else: mime_type = 'Folder' - link = await self.__upload_to_ddl(item_path) - if link is None: - raise Exception('Upload has been manually cancelled!') - if self.__is_cancelled: - return - LOGGER.info(f"Uploaded To DDL: {file_name}") + link = await self.__upload_to_ddl(item_path) + if link is None: + raise Exception('Upload has been manually cancelled!') + if self.is_cancelled: + return + LOGGER.info(f"Uploaded To DDL: {item_path}") except Exception as err: - LOGGER.info(f"DDL Upload has been Cancelled") + LOGGER.info("DDL Upload has been Cancelled") + if self.__asyncSession: + await self.__asyncSession.close() + err = str(err).replace('>', '').replace('<', '') + LOGGER.info(format_exc()) + await self.__listener.onUploadError(err) self.__is_errored = True finally: - if self.__is_cancelled or self.__is_errored: + if self.is_cancelled or self.__is_errored: return - await self.__listener.onUploadComplete(link, size, self.__total_files, self.__total_folders, mime_type, file_name) + await self.__listener.onUploadComplete(link, size, self.total_files, self.total_folders, mime_type, file_name) @property def speed(self): try: - return self.__processed_bytes / self.__total_time + return self.__processed_bytes / int(time() - self.__start_time) except ZeroDivisionError: return 0 @@ -105,6 +137,8 @@ def engine(self): return self.__engine async def cancel_download(self): - self.__is_cancelled = True + self.is_cancelled = True LOGGER.info(f"Cancelling Upload: {self.name}") + if self.__asyncSession: + await self.__asyncSession.close() await self.__listener.onUploadError('Your upload has been stopped!') diff --git a/bot/helper/mirror_utils/upload_utils/ddlserver/gofile.py b/bot/helper/mirror_utils/upload_utils/ddlserver/gofile.py index 1001de6366..8938710552 100644 --- a/bot/helper/mirror_utils/upload_utils/ddlserver/gofile.py +++ b/bot/helper/mirror_utils/upload_utils/ddlserver/gofile.py @@ -1,209 +1,175 @@ -import os - +#!/usr/bin/env python3 +from os import path as ospath, walk +from aiofiles.os import path as aiopath, rename as aiorename from asyncio import sleep from aiohttp import ClientSession -from bot.helper.ext_utils.bot_utils import is_valid_token - +from bot import LOGGER +from bot.helper.ext_utils.bot_utils import sync_to_async -class Async_Gofile: - def __init__(self, token=None): +class Gofile: + def __init__(self, dluploader=None, token=None): self.api_url = "https://api.gofile.io/" + self.dluploader = dluploader self.token = token - if self.token is not None: - is_valid_token(url=self.api_url, token=self.token) - async def _api_resp_handler(self, response): - api_status = response["status"] - if api_status == "ok": + @staticmethod + async def is_goapi(token): + if token is None: + return + async with ClientSession() as session: + async with session.get(f"https://api.gofile.io/getAccountDetails?token={token}&allDetails=true") as resp: + if (await resp.json())["status"] == "ok": + return True + return False + + async def __resp_handler(self, response): + api_resp = response.get("status", "") + if api_resp == "ok": return response["data"] - else: - if "error-" in response["status"]: - error = response["status"].split("-")[1] - else: - error = "Response Status is not ok and reason is unknown" - raise Exception(error) + raise Exception(api_resp.split("-")[1] if "error-" in api_resp else "Response Status is not ok and Reason is Unknown") - async def get_Server(self, pre_session=None): - if pre_session: - server_resp = await pre_session.get(f"{self.api_url}getServer") - server_resp = await server_resp.json() - return await self._api_resp_handler(server_resp) - else: - async with ClientSession() as session: - try: - server_resp = await session.get(f"{self.api_url}getServer") - server_resp = await server_resp.json() - return await self._api_resp_handler(server_resp) - except Exception as e: - raise Exception(e) + async def __getServer(self): + async with ClientSession() as session: + async with session.get(f"{self.api_url}getServer") as resp: + return await self.__resp_handler(await resp.json()) - async def get_Account(self, check_account=False): + async def __getAccount(self, check_account=False): if self.token is None: raise Exception() + + api_url = f"{self.api_url}getAccountDetails?token={self.token}&allDetails=true" async with ClientSession() as session: - try: - get_account_resp = await session.get(url=f"{self.api_url}getAccountDetails?token={self.token}&allDetails=true") - get_account_resp = await get_account_resp.json() - if check_account is True: - if get_account_resp["status"] == "ok": - return True - elif get_account_resp["status"] == "error-wrongToken": - return False - else: - return await self._api_resp_handler(get_account_resp) - else: - return await self._api_resp_handler(get_account_resp) - except Exception as e: - raise Exception(e) - - async def upload_folder(self, path: str, folderId: str = "", delay: int = 2): - if not os.path.isdir(path): - raise Exception(f"{path} is not a valid directory") - - folder_name = os.path.basename(path) - if not folderId: - account_data = await self.get_Account() - rtfid = account_data["rootFolder"] - folder_data = await self.create_folder(rtfid, folder_name) - folderId = folder_data["id"] - - uploaded = None - folder_ids = {".": folderId} # Dictionary to store created folder IDs - for root, dirs, files in os.walk(path): - relative_path = os.path.relpath(root, path) - if relative_path == ".": - current_folder_id = folderId + resp = await (await session.get(url=api_url)).json() + if check_account: + return resp["status"] == "ok" if True else await self.__resp_handler(resp) else: - parent_folder_id = folder_ids.get(os.path.dirname(relative_path), folderId) - folder_name = os.path.basename(relative_path) - folder_data = await self.create_folder(parent_folder_id, folder_name) - current_folder_id = folder_data["id"] - folder_ids[relative_path] = current_folder_id + return await self.__resp_handler(resp) + + async def upload_folder(self, path, folderId=None): + if not await aiopath.isdir(path): + raise Exception(f"Path: {path} is not a valid directory") - for file in files: - file_path = os.path.join(root, file) - udt = await self.upload(file_path, current_folder_id) - if uploaded is None: - uploaded = udt - await sleep(delay) - return uploaded - - async def upload(self, file: str, folderId: str = "", description: str = "", password: str = "", tags: str = "", expire: str = ""): - async with ClientSession() as session: - # Check time - if password and len(password) < 4: - raise ValueError("Password Length must be greater than 4") - - server = await self.get_Server(pre_session=session) - server = server["server"] - token = self.token if self.token else "" + folder_data = await self.create_folder((await self.__getAccount())["rootFolder"], ospath.basename(path)) + await self.__setOptions(contentId=folder_data["id"], option="public", value="true") + + folderId = folderId or folder_data["id"] + folder_ids = {".": folderId} + for root, _, files in await sync_to_async(walk, path): + rel_path = ospath.relpath(root, path) + parentFolderId = folder_ids.get(ospath.dirname(rel_path), folderId) + folder_name = ospath.basename(rel_path) + currFolderId = (await self.create_folder(parentFolderId, folder_name))["id"] + await self.__setOptions(contentId=currFolderId, option="public", value="true") + folder_ids[rel_path] = currFolderId - # Making dict - req_dict = {} - if token: - req_dict["token"] = token - if folderId: - req_dict["folderId"] = folderId - if description: - req_dict["description"] = description - if password: - req_dict["password"] = password - if tags: - req_dict["tags"] = tags - if expire: - req_dict["expire"] = expire - - with open(file, "rb") as go_file_d: - req_dict["file"] = go_file_d - upload_file = await session.post( - url=f"https://{server}.gofile.io/uploadFile", - data=req_dict - ) - upload_file = await upload_file.json() - return await self._api_resp_handler(upload_file) + for file in files: + file_path = ospath.join(root, file) + up = await self.upload_file(file_path, currFolderId) + + return folder_data["code"] + + async def upload_file(self, path: str, folderId: str = "", description: str = "", password: str = "", tags: str = "", expire: str = ""): + if password and len(password) < 4: + raise ValueError("Password Length must be greater than 4") + + server = (await self.__getServer())["server"] + token = self.token if self.token else "" + req_dict = {} + if token: + req_dict["token"] = token + if folderId: + req_dict["folderId"] = folderId + if description: + req_dict["description"] = description + if password: + req_dict["password"] = password + if tags: + req_dict["tags"] = tags + if expire: + req_dict["expire"] = expire + + if self.dluploader.is_cancelled: + return + new_path = ospath.join(ospath.dirname(path), ospath.basename(path).replace(' ', '.')) + await aiorename(path, new_path) + self.dluploader.last_uploaded = 0 + upload_file = await self.dluploader.upload_aiohttp(f"https://{server}.gofile.io/uploadFile", new_path, "file", req_dict) + return await self.__resp_handler(upload_file) + + async def upload(self, file_path): + if not await self.is_goapi(self.token): + raise Exception("Invalid Gofile API Key, Recheck your account !!") + if await aiopath.isfile(file_path): + if (gCode := await self.upload_file(path=file_path)) and gCode.get("downloadPage", False): + return gCode['downloadPage'] + elif await aiopath.isdir(file_path): + if (gCode := await self.upload_folder(path=file_path)): + return f"https://gofile.io/d/{gCode}" + if self.dluploader.is_cancelled: + return + raise Exception("Failed to upload file/folder to Gofile API, Retry or Try after sometimes...") async def create_folder(self, parentFolderId, folderName): if self.token is None: raise Exception() + async with ClientSession() as session: - try: - folder_resp = await session.put( - url=f"{self.api_url}createFolder", - data={ + async with session.put(url=f"{self.api_url}createFolder", + data={ "parentFolderId": parentFolderId, "folderName": folderName, "token": self.token } - ) - folder_resp = await folder_resp.json() - return await self._api_resp_handler(folder_resp) - except Exception as e: - raise Exception(e) + ) as resp: + return await self.__resp_handler(await resp.json()) - async def set_option(self, contentId, option, value): + async def __setOptions(self, contentId, option, value): if self.token is None: raise Exception() + if not option in ["public", "password", "description", "expire", "tags"]: - raise Exception(option) + raise Exception(f"Invalid GoFile Option Specified : {option}") async with ClientSession() as session: - try: - set_resp = await session.put( - url=f"{self.api_url}setOption", - data={ + async with session.put(url=f"{self.api_url}setOption", + data={ "token": self.token, "contentId": contentId, "option": option, "value": value } - ) - set_resp = await set_resp.json() - return await self._api_resp_handler(set_resp) - except Exception as e: - raise Exception(e) + ) as resp: + return await self.__resp_handler(await resp.json()) async def get_content(self, contentId): if self.token is None: raise Exception() + async with ClientSession() as session: - try: - get_content_resp = await session.get(url=f"{self.api_url}getContent?contentId={contentId}&token={self.token}") - get_content_resp = await get_content_resp.json() - return await self._api_resp_handler(get_content_resp) - except Exception as e: - raise Exception(e) + async with session.get(url=f"{self.api_url}getContent?contentId={contentId}&token={self.token}") as resp: + return await self.__resp_handler(await resp.json()) async def copy_content(self, contentsId, folderIdDest): if self.token is None: raise Exception() async with ClientSession() as session: - try: - copy_content_resp = await session.put( - url=f"{self.api_url}copyContent", + async with session.put(url=f"{self.api_url}copyContent", data={ "token": self.token, "contentsId": contentsId, "folderIdDest": folderIdDest } - ) - copy_content_resp = await copy_content_resp.json() - return await self._api_resp_handler(copy_content_resp) - except Exception as e: - raise Exception(e) + ) as resp: + return await self.__resp_handler(await resp.json()) async def delete_content(self, contentId): if self.token is None: raise Exception() async with ClientSession() as session: - try: - del_content_resp = await session.delete( - url=f"{self.api_url}deleteContent", + async with session.delete(url=f"{self.api_url}deleteContent", data={ "contentId": contentId, "token": self.token } - ) - del_content_resp = await del_content_resp.json() - return await self._api_resp_handler(del_content_resp) - except Exception as e: - raise Exception(e) + ) as resp: + return await self.__resp_handler(await resp.json()) diff --git a/bot/helper/mirror_utils/upload_utils/ddlserver/streamtape.py b/bot/helper/mirror_utils/upload_utils/ddlserver/streamtape.py new file mode 100644 index 0000000000..82989a388c --- /dev/null +++ b/bot/helper/mirror_utils/upload_utils/ddlserver/streamtape.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +from pathlib import Path + +from aiofiles.os import scandir, path as aiopath +from aiofiles import open as aiopen +from aiohttp import ClientSession + +from bot import config_dict, LOGGER +from bot.helper.ext_utils.telegraph_helper import telegraph + +ALLOWED_EXTS = [ + '.avi', '.mkv', '.mpg', '.mpeg', '.vob', '.wmv', '.flv', '.mp4', '.mov', '.m4v', + '.m2v', '.divx', '.3gp', '.webm', '.ogv', '.ogg', '.ts', '.ogm' +] + +class Streamtape: + def __init__(self, dluploader, login, key): + self.__userLogin = login + self.__passKey = key + self.dluploader = dluploader + self.base_url = 'https://api.streamtape.com' + + async def __getAccInfo(self): + async with ClientSession() as session, session.get(f"{self.base_url}/account/info?login={self.__userLogin}&key={self.__passKey}") as response: + if response.status == 200: + if (data := await response.json()) and data["status"] == 200: + return data["result"] + return None + + async def __getUploadURL(self, folder=None, sha256=None, httponly=False): + _url = f"{self.base_url}/file/ul?login={self.__userLogin}&key={self.__passKey}" + if folder is not None: + _url += f"&folder={folder}" + if sha256 is not None: + _url += f"&sha256={sha256}" + if httponly: + _url += "&httponly=true" + async with ClientSession() as session: + async with session.get(_url) as response: + if response.status == 200: + data = await response.json() + if (data := await response.json()) and data["status"] == 200: + return data["result"] + return None + + async def upload_file(self, file_path, folder_id=None, sha256=None, httponly=False): + if Path(file_path).suffix.lower() not in ALLOWED_EXTS: + return f"Skipping '{file_path}' due to disallowed extension." + file_name = Path(file_path).name + if not folder_id: + genfolder = await self.create_folder(file_name.rsplit(".", 1)[0]) + if genfolder is None: + return None + folder_id = genfolder["folderid"] + upload_info = await self.__getUploadURL(folder=folder_id, sha256=sha256, httponly=httponly) + if upload_info is None: + return None + if self.dluploader.is_cancelled: + return + self.dluploader.last_uploaded = 0 + uploaded = await self.dluploader.upload_aiohttp(upload_info["url"], file_path, file_name, {}) + if uploaded: + file_id = (await self.list_folder(folder=folder_id))['files'][0]['linkid'] + await self.rename(file_id, file_name) + return f"https://streamtape.to/v/{file_id}" + return None + + async def create_folder(self, name, parent=None): + exfolders = [folder["name"] for folder in (await self.list_folder(folder=parent) or {"folders": []})["folders"]] + if name in exfolders: + i = 1 + while f"{i} {name}" in exfolders: + i += 1 + name = f"{i} {name}" + + url = f"{self.base_url}/file/createfolder?login={self.__userLogin}&key={self.__passKey}&name={name}" + if parent is not None: + url += f"&pid={parent}" + async with ClientSession() as session, session.get(url) as response: + if response.status == 200: + data = await response.json() + if data.get("status") == 200: + return data.get("result") + return None + + async def rename(self, file_id, name): + url = f"{self.base_url}/file/rename?login={self.__userLogin}&key={self.__passKey}&file={file_id}&name={name}" + async with ClientSession() as session, session.get(url) as response: + if response.status == 200: + data = await response.json() + if data.get("status") == 200: + return data.get("result") + return None + + async def list_telegraph(self, folder_id, nested=False): + tg_html = "" + contents = await self.list_folder(folder_id) + for fid in contents['folders']: + tg_html += f"


    " + tg_html += await self.list_telegraph(fid['id'], True) + tg_html += "
      " + for finfo in contents['files']: + tg_html += f"""
    1. {finfo['name']}
      πŸ”— StreamTape URL
    2. """ + tg_html += "
    " + if nested: + return tg_html + tg_html = f"""
    """ + tg_html + path = (await telegraph.create_page(title=f"StreamTape X", content=tg_html))["path"] + return f"https://te.legra.ph/{path}" + + async def list_folder(self, folder=None): + url = f"{self.base_url}/file/listfolder?login={self.__userLogin}&key={self.__passKey}" + if folder is not None: + url += f"&folder={folder}" + async with ClientSession() as session, session.get(url) as response: + if response.status == 200: + if (data := await response.json()) and data["status"] == 200: + return data["result"] + return None + + async def upload_folder(self, folder_path, parent_folder_id=None): + folder_name = Path(folder_path).name + genfolder = await self.create_folder(name=folder_name, parent=parent_folder_id) + + if genfolder and (newfid := genfolder.get("folderid")): + for entry in await scandir(folder_path): + if entry.is_file(): + await self.upload_file(entry.path, newfid) + self.dluploader.total_files += 1 + elif entry.is_dir(): + await self.upload_folder(entry.path, newfid) + self.dluploader.total_folders += 1 + return await self.list_telegraph(newfid) + return None + + async def upload(self, file_path): + stlink = None + if await aiopath.isfile(file_path): + stlink = await self.upload_file(file_path) + elif await aiopath.isdir(file_path): + stlink = await self.upload_folder(file_path) + if stlink: + return stlink + if self.dluploader.is_cancelled: + return + raise Exception("Failed to upload file/folder to StreamTape API, Retry! or Try after sometimes...") + + \ No newline at end of file diff --git a/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py b/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py index 691cd43c6e..79da4e0e1d 100644 --- a/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py +++ b/bot/helper/mirror_utils/upload_utils/pyrogramEngine.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 from traceback import format_exc from logging import getLogger, ERROR -from aiofiles.os import remove as aioremove, path as aiopath, rename as aiorename, makedirs +from aiofiles.os import remove as aioremove, path as aiopath, rename as aiorename, makedirs, rmdir from os import walk, path as ospath from time import time from PIL import Image @@ -19,7 +19,7 @@ from bot.helper.telegram_helper.message_utils import sendCustomMsg, editReplyMarkup, sendMultiMessage, chat_info, deleteMessage from bot.helper.ext_utils.fs_utils import clean_unwanted, is_archive, get_base_name from bot.helper.ext_utils.bot_utils import get_readable_file_size, sync_to_async -from bot.helper.ext_utils.leech_utils import get_media_info, get_document_type, take_ss, get_mediainfo_link, format_filename +from bot.helper.ext_utils.leech_utils import get_audio_thumb, get_media_info, get_document_type, take_ss, get_ss, get_mediainfo_link, format_filename LOGGER = getLogger(__name__) getLogger("pyrogram").setLevel(ERROR) @@ -36,6 +36,7 @@ def __init__(self, name=None, path=None, listener=None): self.__start_time = time() self.__total_files = 0 self.__is_cancelled = False + self.__retry_error = False self.__thumb = f"Thumbnails/{listener.message.from_user.id}.jpg" self.__sent_msg = None self.__has_buttons = False @@ -54,9 +55,15 @@ def __init__(self, name=None, path=None, listener=None): self.__bot_pm = False self.__user_id = listener.message.from_user.id self.__leechmsg = {} + self.__leech_utils = self.__listener.leech_utils - async def __buttons(self, up_path): + async def __buttons(self, up_path, is_video=False): buttons = ButtonMaker() + try: + if is_video and bool(self.__leech_utils['screenshots']): + buttons.ubutton(BotTheme('SCREENSHOTS'), await get_ss(up_path, self.__leech_utils['screenshots'])) + except Exception as e: + LOGGER.error(f"ScreenShots Error: {e}") try: if self.__mediainfo: buttons.ubutton(BotTheme('MEDIAINFO_LINK'), await get_mediainfo_link(up_path)) @@ -71,32 +78,51 @@ async def __buttons(self, up_path): async def __copy_file(self): try: if self.__bot_pm and (self.__leechmsg and not self.__listener.excep_chat or self.__listener.isSuperGroup): - copied = await bot.copy_message(chat_id=self.__user_id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id, reply_to_message_id=self.__listener.botpmmsg.id) + copied = await bot.copy_message( + chat_id=self.__user_id, + from_chat_id=self.__sent_msg.chat.id, + message_id=self.__sent_msg.id, + reply_to_message_id=self.__listener.botpmmsg.id + ) if copied and self.__has_buttons: - await editReplyMarkup(copied, (InlineKeyboardMarkup(BTN) if (BTN := self.__sent_msg.reply_markup.inline_keyboard[:-1]) else None) if config_dict['SAVE_MSG'] else self.__sent_msg.reply_markup) + btn_markup = InlineKeyboardMarkup(BTN) if (BTN := self.__sent_msg.reply_markup.inline_keyboard[:-1]) else None + await editReplyMarkup(copied, btn_markup if config_dict['SAVE_MSG'] else self.__sent_msg.reply_markup) except Exception as err: if not self.__is_cancelled: LOGGER.error(f"Failed To Send in BotPM:\n{str(err)}") + try: if len(self.__leechmsg) > 1 and not self.__listener.excep_chat: for chat_id, msg in list(self.__leechmsg.items())[1:]: - self.__leechmsg[chat_id] = await bot.copy_message(chat_id=chat_id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id, reply_to_message_id=msg.id) + chat_id, *topics = chat_id.split(':') + leech_copy = await bot.copy_message( + chat_id=int(chat_id), + from_chat_id=self.__sent_msg.chat.id, + message_id=self.__sent_msg.id, + reply_to_message_id=msg.id + ) + # Layer 161 Needed for Topics ! if config_dict['CLEAN_LOG_MSG'] and msg.text: await deleteMessage(msg) - if (leechmsg := self.__leechmsg[chat_id]) and self.__has_buttons: - await editReplyMarkup(leechmsg, self.__sent_msg.reply_markup) + if leech_copy and self.__has_buttons: + await editReplyMarkup(leech_copy, self.__sent_msg.reply_markup) except Exception as err: if not self.__is_cancelled: LOGGER.error(f"Failed To Send in Leech Log [ {chat_id} ]:\n{str(err)}") + try: if self.__upload_dest: for channel_id in self.__upload_dest: if chat := (await chat_info(channel_id)): try: - dump_copy = await bot.copy_message(chat_id=chat.id, from_chat_id=self.__sent_msg.chat.id, message_id=self.__sent_msg.id) + dump_copy = await bot.copy_message( + chat_id=chat.id, + from_chat_id=self.__sent_msg.chat.id, + message_id=self.__sent_msg.id + ) if dump_copy and self.__has_buttons: - rply = (InlineKeyboardMarkup(BTN) if (BTN := self.__sent_msg.reply_markup.inline_keyboard[:-1]) else None) if config_dict['SAVE_MSG'] else self.__sent_msg.reply_markup - await editReplyMarkup(dump_copy, rply) + btn_markup = InlineKeyboardMarkup(BTN) if (BTN := self.__sent_msg.reply_markup.inline_keyboard[:-1]) else None + await editReplyMarkup(dump_copy, btn_markup if config_dict['SAVE_MSG'] else self.__sent_msg.reply_markup) except (ChannelInvalid, PeerIdInvalid) as e: LOGGER.error(f"{e.NAME}: {e.MESSAGE} for {channel_id}") continue @@ -104,6 +130,7 @@ async def __copy_file(self): if not self.__is_cancelled: LOGGER.error(f"Failed To Send in User Dump:\n{str(err)}") + async def __upload_progress(self, current, total): if self.__is_cancelled: if IS_PREMIUM_USER: @@ -115,12 +142,12 @@ async def __upload_progress(self, current, total): async def __user_settings(self): user_dict = user_data.get(self.__user_id, {}) - self.__as_doc = user_dict.get('as_doc') or config_dict['AS_DOCUMENT'] - self.__media_group = user_dict.get('media_group') or config_dict['MEDIA_GROUP'] - self.__bot_pm = config_dict['BOT_PM'] or user_dict.get('bot_pm') - self.__mediainfo = config_dict['SHOW_MEDIAINFO'] or user_dict.get('mediainfo') + self.__as_doc = user_dict.get('as_doc', False) or (config_dict['AS_DOCUMENT'] if 'as_doc' not in user_dict else False) + self.__media_group = user_dict.get('media_group') or (config_dict['MEDIA_GROUP'] if 'media_group' not in user_dict else False) + self.__bot_pm = user_dict.get('bot_pm') or (config_dict['BOT_PM'] if 'bot_pm' not in user_dict else False) + self.__mediainfo = user_dict.get('mediainfo') or (config_dict['SHOW_MEDIAINFO'] if 'mediainfo' not in user_dict else False) self.__upload_dest = ud if (ud:=self.__listener.upPath) and isinstance(ud, list) else [ud] - self.__has_buttons = bool(config_dict['SAVE_MSG'] or self.__mediainfo) + self.__has_buttons = bool(config_dict['SAVE_MSG'] or self.__mediainfo or self.__leech_utils['screenshots']) if not await aiopath.exists(self.__thumb): self.__thumb = None @@ -147,6 +174,7 @@ async def __prepare_file(self, prefile_, dirpath): try: file_, cap_mono = await format_filename(prefile_, self.__user_id, dirpath) except Exception as err: + LOGGER.info(format_exc()) return await self.__listener.onUploadError(f'Error in Format Filename : {err}') if prefile_ != file_: if self.__listener.seed and not self.__listener.newDir and not dirpath.endswith("/splited_files_mltb"): @@ -306,6 +334,9 @@ async def upload(self, o_files, m_size, size): if self.__total_files <= self.__corrupted: await self.__listener.onUploadError('Files Corrupted or unable to upload. Check logs!') return + if self.__retry_error: + await self.__listener.onUploadError('Unknown Error Occurred. Check logs & Contact Bot Owner!') + return LOGGER.info(f"Leech Completed: {self.name}") await self.__listener.onUploadComplete(None, size, self.__msgs_dict, self.__total_files, self.__corrupted, self.name) @@ -324,6 +355,8 @@ async def __upload_file(self, cap_mono, file, force_document=False): thumb_path = f"{self.__path}/yt-dlp-thumb/{file_name}.jpg" if await aiopath.isfile(thumb_path): thumb = thumb_path + elif is_audio and not is_video: + thumb = await get_audio_thumb(self.__up_path) if self.__as_doc or force_document or (not is_video and not is_audio and not is_image): key = 'documents' @@ -331,6 +364,7 @@ async def __upload_file(self, cap_mono, file, force_document=False): thumb = await take_ss(self.__up_path, None) if self.__is_cancelled: return + buttons = await self.__buttons(self.__up_path, is_video) nrml_media = await self.__client.send_document(chat_id=self.__sent_msg.chat.id, reply_to_message_id=self.__sent_msg.id, document=self.__up_path, @@ -339,11 +373,11 @@ async def __upload_file(self, cap_mono, file, force_document=False): force_document=True, disable_notification=True, progress=self.__upload_progress, - reply_markup=await self.__buttons(self.__up_path)) + reply_markup=buttons) if self.__prm_media and (self.__has_buttons or not self.__leechmsg): try: - self.__sent_msg = await bot.copy_message(nrml_media.chat.id, nrml_media.chat.id, nrml_media.id, reply_to_message_id=self.__sent_msg.id, reply_markup=await self.__buttons(self.__up_path)) + self.__sent_msg = await bot.copy_message(nrml_media.chat.id, nrml_media.chat.id, nrml_media.id, reply_to_message_id=self.__sent_msg.id, reply_markup=buttons) if self.__sent_msg: await deleteMessage(nrml_media) except: self.__sent_msg = nrml_media @@ -374,6 +408,7 @@ async def __upload_file(self, cap_mono, file, force_document=False): self.__up_path = new_path if self.__is_cancelled: return + buttons = await self.__buttons(self.__up_path, is_video) nrml_media = await self.__client.send_video(chat_id=self.__sent_msg.chat.id, reply_to_message_id=self.__sent_msg.id, video=self.__up_path, @@ -385,10 +420,10 @@ async def __upload_file(self, cap_mono, file, force_document=False): supports_streaming=True, disable_notification=True, progress=self.__upload_progress, - reply_markup=await self.__buttons(self.__up_path)) + reply_markup=buttons) if self.__prm_media and (self.__has_buttons or not self.__leechmsg): try: - self.__sent_msg = await bot.copy_message(nrml_media.chat.id, nrml_media.chat.id, nrml_media.id, reply_to_message_id=self.__sent_msg.id, reply_markup=await self.__buttons(self.__up_path)) + self.__sent_msg = await bot.copy_message(nrml_media.chat.id, nrml_media.chat.id, nrml_media.id, reply_to_message_id=self.__sent_msg.id, reply_markup=buttons) if self.__sent_msg: await deleteMessage(nrml_media) except: self.__sent_msg = nrml_media @@ -440,12 +475,18 @@ async def __upload_file(self, cap_mono, file, force_document=False): if self.__thumb is None and thumb is not None and await aiopath.exists(thumb): await aioremove(thumb) + if (dir_name := ospath.dirname(thumb)) and dir_name != "Thumbnails" and await aiopath.exists(dir_name): + await rmdir(dir_name) + self.__retry_error = False except FloodWait as f: LOGGER.warning(str(f)) await sleep(f.value) except Exception as err: + self.__retry_error = True if self.__thumb is None and thumb is not None and await aiopath.exists(thumb): await aioremove(thumb) + if (dir_name := ospath.dirname(thumb)) and dir_name != "Thumbnails" and await aiopath.exists(dir_name): + await rmdir(dir_name) LOGGER.error(f"{format_exc()}. Path: {self.__up_path}") if 'Telegram says: [400' in str(err) and key != 'documents': LOGGER.error(f"Retrying As Document. Path: {self.__up_path}") diff --git a/bot/helper/telegram_helper/button_build.py b/bot/helper/telegram_helper/button_build.py index 7afb26371f..3570b88801 100644 --- a/bot/helper/telegram_helper/button_build.py +++ b/bot/helper/telegram_helper/button_build.py @@ -5,40 +5,56 @@ class ButtonMaker: def __init__(self): self.__button = [] self.__header_button = [] + self.__first_body_button = [] + self.__last_body_button = [] self.__footer_button = [] def ubutton(self, key, link, position=None): if not position: self.__button.append(InlineKeyboardButton(text=key, url=link)) elif position == 'header': - self.__header_button.append( - InlineKeyboardButton(text=key, url=link)) + self.__header_button.append(InlineKeyboardButton(text=key, url=link)) + elif position == 'f_body': + self.__first_body_button.append(InlineKeyboardButton(text=key, url=link)) + elif position == 'l_body': + self.__last_body_button.append(InlineKeyboardButton(text=key, url=link)) elif position == 'footer': - self.__footer_button.append( - InlineKeyboardButton(text=key, url=link)) + self.__footer_button.append(InlineKeyboardButton(text=key, url=link)) def ibutton(self, key, data, position=None): if not position: - self.__button.append(InlineKeyboardButton( - text=key, callback_data=data)) + self.__button.append(InlineKeyboardButton(text=key, callback_data=data)) elif position == 'header': - self.__header_button.append( - InlineKeyboardButton(text=key, callback_data=data)) + self.__header_button.append(InlineKeyboardButton(text=key, callback_data=data)) + elif position == 'f_body': + self.__first_body_button.append(InlineKeyboardButton(text=key, callback_data=data)) + elif position == 'l_body': + self.__last_body_button.append(InlineKeyboardButton(text=key, callback_data=data)) elif position == 'footer': - self.__footer_button.append( - InlineKeyboardButton(text=key, callback_data=data)) + self.__footer_button.append(InlineKeyboardButton(text=key, callback_data=data)) - def build_menu(self, b_cols=1, h_cols=8, f_cols=8): + def build_menu(self, b_cols=1, h_cols=8, fb_cols=2, lb_cols=2, f_cols=8): menu = [self.__button[i:i+b_cols] for i in range(0, len(self.__button), b_cols)] if self.__header_button: - h_cnt = len(self.__header_button) - if h_cnt > h_cols: + if len(self.__header_button) > h_cols: header_buttons = [self.__header_button[i:i+h_cols] for i in range(0, len(self.__header_button), h_cols)] menu = header_buttons + menu else: menu.insert(0, self.__header_button) + if self.__first_body_button: + if len(self.__first_body_button) > fb_cols: + [menu.append(self.__first_body_button[i:i+fb_cols]) + for i in range(0, len(self.__first_body_button), fb_cols)] + else: + menu.append(self.__first_body_button) + if self.__last_body_button: + if len(self.__last_body_button) > lb_cols: + [menu.append(self.__last_body_button[i:i+lb_cols]) + for i in range(0, len(self.__last_body_button), lb_cols)] + else: + menu.append(self.__last_body_button) if self.__footer_button: if len(self.__footer_button) > f_cols: [menu.append(self.__footer_button[i:i+f_cols]) diff --git a/bot/helper/telegram_helper/filters.py b/bot/helper/telegram_helper/filters.py index c1c98479a0..68a1fd248d 100644 --- a/bot/helper/telegram_helper/filters.py +++ b/bot/helper/telegram_helper/filters.py @@ -18,9 +18,20 @@ async def owner_filter(self, _, message): async def authorized_user(self, _, message): user = message.from_user or message.sender_chat uid = user.id + if bool(uid == OWNER_ID or (uid in user_data and (user_data[uid].get('is_auth', False) or user_data[uid].get('is_sudo', False)))): + return True + + auth_chat = False chat_id = message.chat.id - return bool(uid == OWNER_ID or (uid in user_data and (user_data[uid].get('is_auth', False) or - user_data[uid].get('is_sudo', False))) or (chat_id in user_data and user_data[chat_id].get('is_auth', False))) + if chat_id in user_data and user_data[chat_id].get('is_auth', False): + if len(topic_ids := user_data[chat_id].get('topic_ids', [])) == 0: + auth_chat = True + elif (is_forum := message.reply_to_message) and ((is_forum.text is None and is_forum.caption is None and is_forum.id in topic_ids) + or ((is_forum.text or is_forum.caption) and ((not is_forum.reply_to_top_message_id and is_forum.reply_to_message_id in topic_ids) + or (is_forum.reply_to_top_message_id in topic_ids)))): + auth_chat = True + return auth_chat + authorized = create(authorized_user) async def authorized_usetting(self, _, message): @@ -53,6 +64,6 @@ async def sudo_user(self, _, message): async def blacklist_user(self, _, message): user = message.from_user or message.sender_chat uid = user.id - return bool(uid in user_data and user_data[uid].get('is_blacklist') and uid != OWNER_ID) + return bool(uid != OWNER_ID and uid in user_data and user_data[uid].get('is_blacklist')) blacklisted = create(blacklist_user) diff --git a/bot/helper/telegram_helper/message_utils.py b/bot/helper/telegram_helper/message_utils.py index e337a3fd9e..9ae40cee09 100644 --- a/bot/helper/telegram_helper/message_utils.py +++ b/bot/helper/telegram_helper/message_utils.py @@ -6,6 +6,7 @@ from time import time from re import match as re_match +from pyrogram.enums import ParseMode from pyrogram.types import InputMediaPhoto from pyrogram.errors import ReplyMarkupInvalid, FloodWait, PeerIdInvalid, ChannelInvalid, RPCError, UserNotParticipant, MessageNotModified, MessageEmpty, PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty @@ -15,14 +16,14 @@ from bot.helper.ext_utils.exceptions import TgLinkException -async def sendMessage(message, text, buttons=None, photo=None): +async def sendMessage(message, text, buttons=None, photo=None, **kwargs): try: if photo: try: if photo == 'IMAGES': photo = rchoice(config_dict['IMAGES']) return await message.reply_photo(photo=photo, reply_to_message_id=message.id, - caption=text, reply_markup=buttons, disable_notification=True) + caption=text, reply_markup=buttons, disable_notification=True, **kwargs) except IndexError: pass except (PhotoInvalidDimensions, WebpageCurlFailed, MediaEmpty): @@ -33,13 +34,15 @@ async def sendMessage(message, text, buttons=None, photo=None): except Exception as e: LOGGER.error(format_exc()) return await message.reply(text=text, quote=True, disable_web_page_preview=True, - disable_notification=True, reply_markup=buttons) + disable_notification=True, reply_markup=buttons, **kwargs) except FloodWait as f: LOGGER.warning(str(f)) await sleep(f.value * 1.2) return await sendMessage(message, text, buttons, photo) except ReplyMarkupInvalid: return await sendMessage(message, text, None, photo) + except MessageEmpty: + return await sendMessage(message, text, parse_mode=ParseMode.DISABLED) except Exception as e: LOGGER.error(format_exc()) return str(e) @@ -71,8 +74,6 @@ async def sendCustomMsg(chat_id, text, buttons=None, photo=None, debug=False): except ReplyMarkupInvalid: return await sendCustomMsg(chat_id, text, None, photo) except Exception as e: - if debug: - raise e LOGGER.error(format_exc()) return str(e) @@ -95,6 +96,8 @@ async def chat_info(channel_id): async def sendMultiMessage(chat_ids, text, buttons=None, photo=None): msg_dict = {} for channel_id in chat_ids.split(): + channel_id, *topic_id = channel_id.split(':') + topic_id = int(topic_id[0]) if len(topic_id) else None chat = await chat_info(channel_id) try: if photo: @@ -102,8 +105,8 @@ async def sendMultiMessage(chat_ids, text, buttons=None, photo=None): if photo == 'IMAGES': photo = rchoice(config_dict['IMAGES']) sent = await bot.send_photo(chat_id=chat.id, photo=photo, caption=text, - reply_markup=buttons, disable_notification=True) - msg_dict[chat.id] = sent + reply_markup=buttons, reply_to_message_id=topic_id, disable_notification=True) + msg_dict[f"{chat.id}:{topic_id}"] = sent continue except IndexError: pass @@ -115,8 +118,8 @@ async def sendMultiMessage(chat_ids, text, buttons=None, photo=None): except Exception as e: LOGGER.error(str(e)) sent = await bot.send_message(chat_id=chat.id, text=text, disable_web_page_preview=True, - disable_notification=True, reply_markup=buttons) - msg_dict[chat.id] = sent + disable_notification=True, reply_to_message_id=topic_id, reply_markup=buttons) + msg_dict[f"{chat.id}:{topic_id}"] = sent except FloodWait as f: LOGGER.warning(str(f)) await sleep(f.value * 1.2) @@ -173,10 +176,10 @@ async def sendFile(message, file, caption=None, buttons=None): async def sendRss(text): try: if user: - return await user.send_message(chat_id=config_dict['RSS_CHAT_ID'], text=text, disable_web_page_preview=True, + return await user.send_message(chat_id=config_dict['RSS_CHAT'], text=text, disable_web_page_preview=True, disable_notification=True) else: - return await bot.send_message(chat_id=config_dict['RSS_CHAT_ID'], text=text, disable_web_page_preview=True, + return await bot.send_message(chat_id=config_dict['RSS_CHAT'], text=text, disable_web_page_preview=True, disable_notification=True) except FloodWait as f: LOGGER.warning(str(f)) @@ -293,11 +296,11 @@ async def sendStatusMessage(msg): message = status_reply_dict[chat_id][0] await deleteMessage(message) del status_reply_dict[chat_id] - message = await sendMessage(msg, progress, buttons, photo='IMAGES') - if hasattr(message, 'caption'): - message.caption = progress - else: - message.text = progress + if message := await sendMessage(msg, progress, buttons, photo='IMAGES'): + if hasattr(message, 'caption'): + message.caption = progress + else: + message.text = progress status_reply_dict[chat_id] = [message, time()] if not Interval: Interval.append(setInterval(config_dict['STATUS_UPDATE_INTERVAL'], update_all_messages)) diff --git a/bot/helper/themes/wzml_minimal.py b/bot/helper/themes/wzml_minimal.py index 1ce4ee56d9..db84d929fd 100644 --- a/bot/helper/themes/wzml_minimal.py +++ b/bot/helper/themes/wzml_minimal.py @@ -10,7 +10,34 @@ class WZMLStyle: Type {help_command} to get a list of available commands''' ST_BOTPM = '''Now, This bot will send all your files and links here. Start Using ...''' ST_UNAUTH = '''You Are not authorized user! Deploy your own WZML-X Mirror-Leech bot''' + OWN_TOKEN_GENERATE = '''Temporary Token is not yours!\n\nKindly generate your own.''' + USED_TOKEN = '''Temporary Token already used!\n\nKindly generate a new one.''' + LOGGED_PASSWORD = '''Bot Already Logged In via Password\n\nNo Need to Accept Temp Tokens.''' + ACTIVATE_BUTTON = 'Activate Temporary Token' + TOKEN_MSG = '''Generated Temporary Login Token! +Temp Token: {token} +Validity: {validity}''' # --------------------- + # async def token_callback(_, query): ---> __main__.py + ACTIVATED = 'βœ…οΈ Activated βœ…' + # --------------------- + # async def login(_, message): --> __main__.py + LOGGED_IN = 'Already Bot Login In!' + INVALID_PASS = 'Invalid Password!\n\nKindly put the correct Password .' + PASS_LOGGED = 'Bot Permanent Login Successfully!' + LOGIN_USED = 'Bot Login Usage :\n\n/cmd [password]' + # --------------------- + # async def log(_, message): ---> __main__.py + LOG_DISPLAY_BT = 'πŸ“‘ Log Display' + WEB_PASTE_BT = 'πŸ“¨ Web Paste (SB)' + # --------------------- + # async def bot_help(client, message): ---> __main__.py + BASIC_BT = 'Basic' + USER_BT = 'Users' + MICS_BT = 'Mics' + O_S_BT = 'Owner & Sudos' + CLOSE_BT = 'Close' + HELP_HEADER = "γŠ‚ Help Guide Menu!\n\nNOTE: Click on any CMD to see more minor detalis." # async def stats(client, message): BOT_STATS = '''⌬ BOT STATISTICS : @@ -120,7 +147,8 @@ class WZMLStyle: L_CC = 'β”– By: {Tag}\n\n' PM_BOT_MSG = '➲ File(s) have been Sent above' L_BOT_MSG = '➲ File(s) have been Sent to Bot PM (Private)' - L_LL_MSG = '➲ File(s) have been Sent. Access via Links...' + L_LL_MSG = '➲ File(s) have been Sent. Access via Links...\n' + L_PM_WARN = '➲ BOT PM is Off turn it ON to get the Leech Index Link' # ----- MIRROR ------- M_TYPE = 'β”  Type: {Mimetype}\n' @@ -129,7 +157,7 @@ class WZMLStyle: RCPATH = 'β”  Path: {RCpath}\n' M_CC = 'β”– By: {Tag}\n\n' M_BOT_MSG = '➲ Link(s) have been Sent to Bot PM (Private)' - + M_PM_WARN = '➲ BOT PM is Off turn it ON to get the Mirror Link' # ----- BUTTONS ------- CLOUD_LINK = '☁️ Cloud Link' SAVE_MSG = 'πŸ“¨ Save Message' @@ -142,6 +170,7 @@ class WZMLStyle: CHECK_PM = 'πŸ“₯ View in Bot PM' CHECK_LL = 'πŸ–‡ View in Links Log' MEDIAINFO_LINK = 'πŸ“ƒ MediaInfo' + SCREENSHOTS = 'πŸ–Ό ScreenShots' # --------------------- # def get_readable_message(): ---> bot_utilis.py diff --git a/bot/modules/anilist.py b/bot/modules/anilist.py index cae1b37332..83c5e25e64 100644 --- a/bot/modules/anilist.py +++ b/bot/modules/anilist.py @@ -207,24 +207,31 @@ async def anilist(_, msg, aniid=None, u_id=None): else: user_id = int(u_id) vars = {'id' : aniid} - animeResp = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': vars}).json()['data'].get('Media', None) - if animeResp: + if ( + animeResp := rpost( + url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': vars} + ) + .json()['data'] + .get('Media', None) + ): ro_title = animeResp['title']['romaji'] na_title = animeResp['title']['native'] en_title = animeResp['title']['english'] - format = animeResp['format'] - if format: format = format.capitalize() - status = animeResp['status'] - if status: status = status.capitalize() + if format := animeResp['format']: + format = format.capitalize() + if status := animeResp['status']: + status = status.capitalize() year = animeResp['seasonYear'] or 'N/A' try: sd = animeResp['startDate'] if sd['day'] and sd['year']: startdate = f"{month_name[sd['month']]} {sd['day']}, {sd['year']}" - except: startdate = "" + except Exception: + startdate = "" try: ed = animeResp['endDate'] if ed['day'] and ed['year']: enddate = f"{month_name[ed['month']]} {ed['day']}, {ed['year']}" - except: enddate = "" + except Exception: + enddate = "" season = f"{animeResp['season'].capitalize()} {animeResp['seasonYear']}" conname = (conn.get(alpha_2=animeResp['countryOfOrigin'])).name try: @@ -235,13 +242,14 @@ async def anilist(_, msg, aniid=None, u_id=None): episodes = animeResp.get('episodes', 'N/A') try: duration = f"{get_readable_time(animeResp['duration']*60)}" - except: duration = "N/A" + except Exception: + duration = "N/A" avgscore = f"{animeResp['averageScore']}%" or '' genres = ", ".join(f"{GENRES_EMOJI[x]} #{x.replace(' ', '_').replace('-', '_')}" for x in animeResp['genres']) studios = ", ".join(f"""{x['name']}""" for x in animeResp['studios']['nodes']) source = animeResp['source'] or '-' hashtag = animeResp['hashtag'] or 'N/A' - synonyms = ", ".join(x for x in animeResp['synonyms']) or '' + synonyms = ", ".join(animeResp['synonyms']) or '' siteurl = animeResp.get('siteUrl') trailer = animeResp.get('trailer', None) if trailer and trailer.get('site') == "youtube": @@ -278,11 +286,10 @@ async def anilist(_, msg, aniid=None, u_id=None): LOGGER.error(f"AniList Error: {e}") if aniid: return template, btns.build_menu(3) - else: - try: - await sendMessage(msg, template, btns.build_menu(3), photo=title_img) - except: - await sendMessage(msg, template, btns.build_menu(3), photo='https://te.legra.ph/file/8a5155c0fc61cc2b9728c.jpg') + try: + await sendMessage(msg, template, btns.build_menu(3), photo=title_img) + except Exception: + await sendMessage(msg, template, btns.build_menu(3), photo='https://te.legra.ph/file/8a5155c0fc61cc2b9728c.jpg') async def setAnimeButtons(client, query): @@ -299,30 +306,41 @@ async def setAnimeButtons(client, query): await query.answer() if data[2] == "tags": aniTag = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': {'id' : siteid}}).json()['data'].get('Media', None) - msg = "Tags :\n\n" - msg += "\n".join(f"""{x['name']} {x['rank']}%""" for x in aniTag['tags']) + msg = "Tags :\n\n" + "\n".join( + f"""{x['name']} {x['rank']}%""" + for x in aniTag['tags'] + ) elif data[2] == "sts": links = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': {'id' : siteid}}).json()['data'].get('Media', None) - msg = "External & Streaming Links :\n\n" - msg += "\n".join(f"""{x['site']}""" for x in links['externalLinks']) + msg = "External & Streaming Links :\n\n" + "\n".join( + f"""{x['site']}""" + for x in links['externalLinks'] + ) elif data[2] == "rev": animeResp = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': {'id' : siteid}}).json()['data'].get('Media', None) - msg = "Reviews :\n\n" reList = animeResp['reviews']['nodes'] - msg += "\n\n".join(f"""{x['summary']}\nScore : {x['score']} / 100\nBy {x['user']['name']}""" for x in reList[:8]) + msg = "Reviews :\n\n" + "\n\n".join( + f"""{x['summary']}\nScore : {x['score']} / 100\nBy {x['user']['name']}""" + for x in reList[:8] + ) elif data[2] == "rel": animeResp = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': {'id' : siteid}}).json()['data'].get('Media', None) - msg = "Relations :\n\n" - msg += "\n\n".join(f"""{x['node']['title']['english']} ({x['node']['title']['romaji']})\nFormat: {x['node']['format'].capitalize()}\nStatus: {x['node']['status'].capitalize()}\nAverage Score: {x['node']['averageScore']}%\nSource: {x['node']['source'].capitalize()}\nRelation Type: {x.get('relationType', 'N/A').capitalize()}""" for x in animeResp['relations']['edges']) + msg = "Relations :\n\n" + "\n\n".join( + f"""{x['node']['title']['english']} ({x['node']['title']['romaji']})\nFormat: {x['node']['format'].capitalize()}\nStatus: {x['node']['status'].capitalize()}\nAverage Score: {x['node']['averageScore']}%\nSource: {x['node']['source'].capitalize()}\nRelation Type: {x.get('relationType', 'N/A').capitalize()}""" + for x in animeResp['relations']['edges'] + ) elif data[2] == "cha": animeResp = rpost(url, json={'query': ANIME_GRAPHQL_QUERY, 'variables': {'id' : siteid}}).json()['data'].get('Media', None) - msg = "List of Characters :\n\n" - msg += "\n\n".join(f"""β€’ {x['node']['name']['full']} ({x['node']['name']['native']})\nRole : {x['role'].capitalize()}""" for x in (animeResp['characters']['edges'])[:8]) + msg = "List of Characters :\n\n" + "\n\n".join( + f"""β€’ {x['node']['name']['full']} ({x['node']['name']['native']})\nRole : {x['role'].capitalize()}""" + for x in (animeResp['characters']['edges'])[:8] + ) elif data[2] == "home": msg, btns = await anilist(client, message, siteid, data[1]) await editMessage(message, msg, btns) return await editMessage(message, msg, btns.build_menu(1)) + return async def character(_, message, aniid=None, u_id=None): @@ -338,8 +356,11 @@ async def character(_, message, aniid=None, u_id=None): else: vars = {'id': aniid} user_id = int(u_id) - json = rpost(url, json={'query': character_query, 'variables': vars}).json()['data'].get('Character', None) - if json: + if ( + json := rpost(url, json={'query': character_query, 'variables': vars}) + .json()['data'] + .get('Character', None) + ): msg = f"{json.get('name').get('full')} ({json.get('name').get('native')})\n\n" description = json['description'] site_url = json.get('siteUrl') @@ -353,16 +374,14 @@ async def character(_, message, aniid=None, u_id=None): if len(description) > 700: description = f"{description[:700]}...." msg += markdown(description).replace('

    ', '').replace('

    ', '') - image = json.get('image', None) - if image: + if image := json.get('image', None): img = image.get('large') if aniid: return msg, rlp_mk - else: - if img: - await sendMessage(message, msg, rlp_mk, img) - else: - await sendMessage(message, msg) + if img: + await sendMessage(message, msg, rlp_mk, img) + else: + await sendMessage(message, msg) async def setCharacButtons(client, query): @@ -418,7 +437,7 @@ async def manga(_, message): msg = msg.replace('
    ', '').replace('', '').replace('', '') try: await sendMessage(message, msg, buttons.build_menu(1), image) - except: + except Exception: msg += f" [〽️]({image})" await sendMessage(message, msg, buttons.build_menu(1)) diff --git a/bot/modules/authorize.py b/bot/modules/authorize.py index 101e28787f..189a9e07b2 100644 --- a/bot/modules/authorize.py +++ b/bot/modules/authorize.py @@ -2,7 +2,7 @@ from pyrogram.handlers import MessageHandler from pyrogram.filters import command, regex -from bot import user_data, DATABASE_URL, bot +from bot import user_data, DATABASE_URL, bot, LOGGER from bot.helper.telegram_helper.message_utils import sendMessage from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands @@ -12,32 +12,64 @@ async def authorize(client, message): msg = message.text.split() + tid_ = "" if len(msg) > 1: - id_ = int(msg[1].strip()) - elif reply_to := message.reply_to_message: + nid_ = msg[1].split(':') + id_ = int(nid_[0]) + if len(nid_) > 1: + tid_ = int(nid_[1]) + elif (reply_to := message.reply_to_message) and (reply_to.text is None and reply_to.caption is None): + id_ = message.chat.id + tid_ = message.reply_to_message_id + elif reply_to: id_ = reply_to.from_user.id else: id_ = message.chat.id if id_ in user_data and user_data[id_].get('is_auth'): msg = 'Already Authorized!' + if tid_: + if tid_ not in (tids_ := user_data[id_].get('topic_ids', [])): + tids_.append(tid_) + update_user_ldata(id_, 'topic_ids', tids_) + if DATABASE_URL: + await DbManger().update_user_data(id_) + msg = 'Topic Authorized!' + else: + msg = 'Topic Already Authorized!' else: update_user_ldata(id_, 'is_auth', True) + if tid_: + update_user_ldata(id_, 'topic_ids', [tid_]) + msg = 'Topic Authorized!' + else: + msg = 'Authorized' if DATABASE_URL: await DbManger().update_user_data(id_) - msg = 'Authorized' await sendMessage(message, msg) async def unauthorize(client, message): msg = message.text.split() + tid_ = "" if len(msg) > 1: - id_ = int(msg[1].strip()) + nid_ = msg[1].split(':') + id_ = int(nid_[0]) + if len(nid_) > 1: + tid_ = int(nid_[1]) + elif (reply_to := message.reply_to_message) and (reply_to.text is None and reply_to.caption is None): + id_ = message.chat.id + tid_ = message.reply_to_message_id elif reply_to := message.reply_to_message: id_ = reply_to.from_user.id else: id_ = message.chat.id + tids_ = [] + if tid_ and id_ in user_data and tid_ in (tids_ := user_data[id_].get('topic_ids', [])): + tids_.remove(tid_) + update_user_ldata(id_, 'topic_ids', tids_) if id_ not in user_data or user_data[id_].get('is_auth'): - update_user_ldata(id_, 'is_auth', False) + if not tids_: + update_user_ldata(id_, 'is_auth', False) if DATABASE_URL: await DbManger().update_user_data(id_) msg = 'Unauthorized' diff --git a/bot/modules/bot_settings.py b/bot/modules/bot_settings.py index b0e1e7108a..6ba243e031 100644 --- a/bot/modules/bot_settings.py +++ b/bot/modules/bot_settings.py @@ -5,7 +5,7 @@ from pyrogram.enums import ChatType from functools import partial from collections import OrderedDict -from asyncio import create_subprocess_exec, create_subprocess_shell, sleep +from asyncio import create_subprocess_exec, create_subprocess_shell, sleep, gather from aiofiles.os import remove, rename, path as aiopath from aiofiles import open as aiopen from os import environ, getcwd @@ -49,7 +49,7 @@ } bool_vars = ['AS_DOCUMENT', 'BOT_PM', 'STOP_DUPLICATE', 'SET_COMMANDS', 'SAVE_MSG', 'SHOW_MEDIAINFO', 'SOURCE_LINK', 'SAFE_MODE', 'SHOW_EXTRA_CMDS', 'IS_TEAM_DRIVE', 'USE_SERVICE_ACCOUNTS', 'WEB_PINCODE', 'EQUAL_SPLITS', 'DISABLE_DRIVE_LINK', 'DELETE_LINKS', 'CLEAN_LOG_MSG', 'USER_TD_MODE', - 'INCOMPLETE_TASK_NOTIFIER'] + 'INCOMPLETE_TASK_NOTIFIER', 'UPGRADE_PACKAGES'] async def load_config(): @@ -104,7 +104,11 @@ async def load_config(): if len(AUTHORIZED_CHATS) != 0: aid = AUTHORIZED_CHATS.split() for id_ in aid: - user_data[int(id_.strip())] = {'is_auth': True} + chat_id, *topic_ids = id_.split(':') + chat_id = int(chat_id) + user_data.setdefault(chat_id, {'is_auth': True}) + if topic_ids: + user_data[chat_id].setdefault('topic_ids', []).extend(map(int, topic_ids)) SUDO_USERS = environ.get('SUDO_USERS', '') if len(SUDO_USERS) != 0: @@ -122,10 +126,9 @@ async def load_config(): if len(EXTENSION_FILTER) > 0: fx = EXTENSION_FILTER.split() GLOBAL_EXTENSION_FILTER.clear() - GLOBAL_EXTENSION_FILTER.append('aria2') + GLOBAL_EXTENSION_FILTER.extend(['aria2', '!qB']) for x in fx: - if x.strip().startswith('.'): - x = x.lstrip('.') + x = x.lstrip('.') GLOBAL_EXTENSION_FILTER.append(x.strip().lower()) MEGA_EMAIL = environ.get('MEGA_EMAIL', '') @@ -142,9 +145,13 @@ async def load_config(): if len(GDTOT_CRYPT) == 0: GDTOT_CRYPT = '' - DEBRID_API_KEY = environ.get('DEBRID_API_KEY', '') - if len(DEBRID_API_KEY) == 0: - DEBRID_API_KEY = '' + REAL_DEBRID_API = environ.get('REAL_DEBRID_API', '') + if len(REAL_DEBRID_API) == 0: + REAL_DEBRID_API = '' + + DEBRID_LINK_API = environ.get('DEBRID_LINK_API', '') + if len(DEBRID_LINK_API) == 0: + DEBRID_LINK_API = '' INDEX_URL = environ.get('INDEX_URL', '').rstrip("/") if len(INDEX_URL) == 0: @@ -227,8 +234,8 @@ async def load_config(): STATUS_LIMIT = environ.get('STATUS_LIMIT', '') STATUS_LIMIT = 10 if len(STATUS_LIMIT) == 0 else int(STATUS_LIMIT) - RSS_CHAT_ID = environ.get('RSS_CHAT_ID', '') - RSS_CHAT_ID = '' if len(RSS_CHAT_ID) == 0 else int(RSS_CHAT_ID) + RSS_CHAT = environ.get('RSS_CHAT', '') + RSS_CHAT = '' if len(RSS_CHAT) == 0 else int(RSS_CHAT) RSS_DELAY = environ.get('RSS_DELAY', '') RSS_DELAY = 900 if len(RSS_DELAY) == 0 else int(RSS_DELAY) @@ -341,6 +348,9 @@ async def load_config(): UPSTREAM_REPO = environ.get('UPSTREAM_REPO', '') if len(UPSTREAM_REPO) == 0: UPSTREAM_REPO = '' + + UPGRADE_PACKAGES = environ.get('UPGRADE_PACKAGES', '') + UPGRADE_PACKAGES = UPGRADE_PACKAGES.lower() == 'true' UPSTREAM_BRANCH = environ.get('UPSTREAM_BRANCH', '') if len(UPSTREAM_BRANCH) == 0: @@ -469,6 +479,10 @@ async def load_config(): if len(LOGIN_PASS) == 0: LOGIN_PASS = None + FILELION_API = environ.get('FILELION_API', '') + if len(FILELION_API) == 0: + FILELION_API = '' + DEF_IMDB_TEMP = environ.get('IMDB_TEMPLATE', '') if len(DEF_IMDB_TEMP) == 0: DEF_IMDB_TEMP = '''Title: {title} [{year}] @@ -577,7 +591,9 @@ async def load_config(): 'CAP_FONT': CAP_FONT, 'CMD_SUFFIX': CMD_SUFFIX, 'DATABASE_URL': DATABASE_URL, - 'DEBRID_API_KEY': DEBRID_API_KEY, + 'REAL_DEBRID_API': REAL_DEBRID_API, + 'DEBRID_LINK_API': DEBRID_LINK_API, + 'FILELION_API': FILELION_API, 'DELETE_LINKS': DELETE_LINKS, 'DEFAULT_UPLOAD': DEFAULT_UPLOAD, 'DOWNLOAD_DIR': DOWNLOAD_DIR, @@ -643,7 +659,7 @@ async def load_config(): 'RCLONE_SERVE_USER': RCLONE_SERVE_USER, 'RCLONE_SERVE_PASS': RCLONE_SERVE_PASS, 'RCLONE_SERVE_PORT': RCLONE_SERVE_PORT, - 'RSS_CHAT_ID': RSS_CHAT_ID, + 'RSS_CHAT': RSS_CHAT, 'RSS_DELAY': RSS_DELAY, 'SAVE_MSG': SAVE_MSG, 'SAFE_MODE': SAFE_MODE, @@ -665,6 +681,7 @@ async def load_config(): 'TORRENT_TIMEOUT': TORRENT_TIMEOUT, 'UPSTREAM_REPO': UPSTREAM_REPO, 'UPSTREAM_BRANCH': UPSTREAM_BRANCH, + 'UPGRADE_PACKAGES': UPGRADE_PACKAGES, 'UPTOBOX_TOKEN': UPTOBOX_TOKEN, 'USER_SESSION_STRING': USER_SESSION_STRING, 'USER_TD_MODE':USER_TD_MODE, @@ -675,9 +692,7 @@ async def load_config(): if DATABASE_URL: await DbManger().update_config(config_dict) - await initiate_search_tools() - await start_from_queued() - await rclone_serve_booter() + await gather(initiate_search_tools(), start_from_queued(), rclone_serve_booter()) async def get_buttons(key=None, edit_type=None, edit_mode=None, mess=None): @@ -793,7 +808,7 @@ async def edit_variable(_, message, pre_message, key): elif key == 'DOWNLOAD_DIR': if not value.endswith('/'): value += '/' - elif key in ['LINKS_LOG_ID', 'RSS_CHAT_ID']: + elif key in ['LINKS_LOG_ID', 'RSS_CHAT']: value = int(value) elif key == 'STATUS_UPDATE_INTERVAL': value = int(value) @@ -830,7 +845,7 @@ async def edit_variable(_, message, pre_message, key): elif key == 'EXTENSION_FILTER': fx = value.split() GLOBAL_EXTENSION_FILTER.clear() - GLOBAL_EXTENSION_FILTER.append('.aria2') + GLOBAL_EXTENSION_FILTER.extend(['aria2', '!qB']) for x in fx: if x.strip().startswith('.'): x = x.lstrip('.') @@ -1068,7 +1083,7 @@ async def edit_bot_settings(client, query): value, update_all_messages)) elif data[2] == 'EXTENSION_FILTER': GLOBAL_EXTENSION_FILTER.clear() - GLOBAL_EXTENSION_FILTER.append('.aria2') + GLOBAL_EXTENSION_FILTER.extend(['aria2', '!qB']) elif data[2] == 'TORRENT_TIMEOUT': downloads = await sync_to_async(aria2.get_downloads) for download in downloads: diff --git a/bot/modules/broadcast.py b/bot/modules/broadcast.py index ae5b074849..ed52881bb8 100644 --- a/bot/modules/broadcast.py +++ b/bot/modules/broadcast.py @@ -85,7 +85,7 @@ async def broadcast(_, message): except FloodWait as e: await sleep(e.value) await msg.edit(text=rply.text, entities=rply.entities, reply_markup=rply.reply_markup) - except: + except Exception: u += 1 t += 1 return await editMessage(temp_wait, f'''⌬ Broadcast Edited Stats : @@ -124,7 +124,7 @@ async def broadcast(_, message): except InputUserDeactivated: await DbManger().rm_pm_user(uid) d += 1 - except: + except Exception: u += 1 if bc_msg: bc_msgs.append(bc_msg) @@ -133,7 +133,10 @@ async def broadcast(_, message): await editMessage(pls_wait, status.format(**locals())) updater = time() bc_cache[bc_hash] = bc_msgs - await editMessage(pls_wait, status.format(**locals()) + f"\n\nElapsed Time: {get_readable_time(time() - start_time)}\nBroadcast ID: {bc_hash}") + await editMessage( + pls_wait, + f"{status.format(**locals())}\n\nElapsed Time: {get_readable_time(time() - start_time)}\nBroadcast ID: {bc_hash}", + ) bot.add_handler(MessageHandler(broadcast, filters=command(BotCommands.BroadcastCommand) & CustomFilters.sudo)) \ No newline at end of file diff --git a/bot/modules/category_select.py b/bot/modules/category_select.py index a4d54714bc..5b67ff88bd 100644 --- a/bot/modules/category_select.py +++ b/bot/modules/category_select.py @@ -131,7 +131,7 @@ async def confirm_dump(client, query): user_dumps = await fetch_user_dumps(user_id) cat_name = data[3].replace('_', ' ') upall = cat_name == "All" - bot_cache[msg_id][0] = user_dumps[cat_name] if not upall else list(user_dumps.values()) + bot_cache[msg_id][0] = list(user_dumps.values()) if upall else user_dumps[cat_name] buttons = ButtonMaker() if user_dumps: for _name in user_dumps.keys(): diff --git a/bot/modules/clone.py b/bot/modules/clone.py index 391ca25655..1db2b975b1 100644 --- a/bot/modules/clone.py +++ b/bot/modules/clone.py @@ -1,8 +1,7 @@ #!/usr/bin/env python3 from pyrogram.handlers import MessageHandler from pyrogram.filters import command -from random import SystemRandom -from string import ascii_letters, digits +from secrets import token_hex from asyncio import sleep, gather from aiofiles.os import path as aiopath from cloudscraper import create_scraper as cget @@ -58,12 +57,13 @@ async def rcloneNode(client, message, link, dst_path, rcf, tag): if config_path != f'rclone/{message.from_user.id}.conf': await sendMessage(message, 'You should use same rclone.conf to clone between paths!') return + dst_path = dst_path.lstrip('mrcc:') elif config_path != 'rclone.conf': await sendMessage(message, 'You should use same rclone.conf to clone between paths!') return remote, src_path = link.split(':', 1) - src_path = src_path .strip('/') + src_path = src_path.strip('/') cmd = ['rclone', 'lsjson', '--fast-list', '--stat', '--no-modtime', '--config', config_path, f'{remote}:{src_path}'] @@ -87,7 +87,7 @@ async def rcloneNode(client, message, link, dst_path, rcf, tag): RCTransfer = RcloneTransferHelper(listener, name) LOGGER.info(f'Clone Started: Name: {name} - Source: {link} - Destination: {dst_path}') - gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12)) + gid = token_hex(5) async with download_dict_lock: download_dict[message.id] = RcloneStatus( RCTransfer, message, gid, 'cl', listener.upload_details) @@ -149,7 +149,7 @@ async def gdcloneNode(message, link, listen_up): button = await get_telegraph_list(telegraph_content) await sendMessage(message, msg, button) return - listener = MirrorLeechListener(message, tag=listen_up[0], isClone=True, drive_id=listen_up[1], index_link=listen_up[2], source_url=org_link if org_link else link) + listener = MirrorLeechListener(message, tag=listen_up[0], isClone=True, drive_id=listen_up[1], index_link=listen_up[2], source_url=org_link or link) if limit_exceeded := await limit_checker(size, listener): await sendMessage(listener.message, limit_exceeded) return @@ -161,7 +161,7 @@ async def gdcloneNode(message, link, listen_up): link, size, mime_type, files, folders = await sync_to_async(drive.clone, link, listener.drive_id) await deleteMessage(msg) else: - gid = ''.join(SystemRandom().choices(ascii_letters + digits, k=12)) + gid = token_hex(5) async with download_dict_lock: download_dict[message.id] = GdriveStatus( drive, size, message, gid, 'cl', listener.upload_details) @@ -195,7 +195,7 @@ async def clone(client, message): try: multi = int(args['-i']) - except: + except Exception: multi = 0 dst_path = args['-up'] or args['-upload'] @@ -204,7 +204,7 @@ async def clone(client, message): drive_id = args['-id'] index_link = args['-index'] gd_cat = args['-c'] or args['-category'] - + if username := message.from_user.username: tag = f"@{username}" else: diff --git a/bot/modules/eval.py b/bot/modules/eval.py index f3e0f28a7f..83220dff16 100644 --- a/bot/modules/eval.py +++ b/bot/modules/eval.py @@ -2,12 +2,14 @@ from pyrogram.handlers import MessageHandler from pyrogram.filters import command from os import path as ospath, getcwd, chdir +from aiofiles import open as aiopen from traceback import format_exc from textwrap import indent from io import StringIO, BytesIO -from contextlib import redirect_stdout +from re import match +from contextlib import redirect_stdout, suppress -from bot import LOGGER, bot +from bot import LOGGER, bot, user from bot.helper.telegram_helper.filters import CustomFilters from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.message_utils import sendFile, sendMessage @@ -21,15 +23,13 @@ def namespace_of(message): '__builtins__': globals()['__builtins__'], 'bot': bot, 'message': message, - 'user': message.from_user or message.sender_chat, - 'chat': message.chat} - + 'user': user, + } return namespaces[message.chat.id] def log_input(message): - LOGGER.info( - f"IN: {message.text} (user={message.from_user.id}, chat={message.chat.id})") + LOGGER.info(f"INPUT: {message.text} (User ID ={message.from_user.id} | Chat ID ={message.chat.id})") async def send(msg, message): @@ -38,8 +38,12 @@ async def send(msg, message): out_file.name = "output.txt" await sendFile(message, out_file) else: - LOGGER.info(f"OUT: '{msg}'") - await sendMessage(message, f"{msg}") + LOGGER.info(f"OUTPUT: '{msg}'") + if not msg or msg == '\n': + msg = "MessageEmpty" + elif not bool(match(r'<(spoiler|b|i|code|s|u|/a)>', msg)): + msg = f"{msg}" + await sendMessage(message, msg) @new_task @@ -65,8 +69,8 @@ async def do(func, message): env = namespace_of(message) chdir(getcwd()) - with open(ospath.join(getcwd(), 'bot/modules/temp.txt'), 'w') as temp: - temp.write(body) + async with aiopen(ospath.join(getcwd(), 'bot/modules/temp.txt'), 'w') as temp: + await temp.write(body) stdout = StringIO() @@ -92,10 +96,8 @@ async def do(func, message): if value: result = f'{value}' else: - try: + with suppress(Exception): result = f'{repr(eval(body, env))}' - except: - pass else: result = f'{value}{func_return}' if result: @@ -107,7 +109,9 @@ async def clear(client, message): global namespaces if message.chat.id in namespaces: del namespaces[message.chat.id] - await send("Locals Cleared.", message) + await send("Cached Locals Cleared !", message) + else: + await send("No Cache Locals Found !", message) bot.add_handler(MessageHandler(evaluate, filters=command( diff --git a/bot/modules/gen_pyro_sess.py b/bot/modules/gen_pyro_sess.py index 89e1ecfadd..f8eb3769ea 100644 --- a/bot/modules/gen_pyro_sess.py +++ b/bot/modules/gen_pyro_sess.py @@ -139,7 +139,8 @@ async def genPyroString(client, message): try: await aioremove(f'WZML-X-{message.from_user.id}.session') await aioremove(f'WZML-X-{message.from_user.id}.session-journal') - except: pass + except Exception: + pass async def set_details(_, message, newkey): diff --git a/bot/modules/images.py b/bot/modules/images.py index 29bab52293..f74e4a00fb 100644 --- a/bot/modules/images.py +++ b/bot/modules/images.py @@ -20,13 +20,12 @@ async def picture_add(_, message): editable = await sendMessage(message, "Fetching Input ...") if len(message.command) > 1 or resm and resm.text: msg_text = resm.text if resm else message.command[1] - if msg_text.startswith("http"): - pic_add = msg_text.strip() - await editMessage(editable, f"Adding your Link : {pic_add}") - else: + if not msg_text.startswith("http"): return await editMessage(editable, "Not a Valid Link, Must Start with 'http'") + pic_add = msg_text.strip() + await editMessage(editable, f"Adding your Link : {pic_add}") elif resm and resm.photo: - if not (resm.photo and resm.photo.file_size <= 5242880*2): + if resm.photo.file_size > 5242880 * 2: return await editMessage(editable, "Media is Not Supported! Only Photos!!") try: photo_dir = await resm.download() @@ -53,12 +52,12 @@ async def picture_add(_, message): async def pictures(_, message): - user_id = message.from_user.id if not config_dict['IMAGES']: await sendMessage(message, f"No Photo to Show ! Add by /{BotCommands.AddImageCommand}") else: to_edit = await sendMessage(message, "Generating Grid of your Images...") buttons = ButtonMaker() + user_id = message.from_user.id buttons.ibutton("<<", f"images {user_id} turn -1") buttons.ibutton(">>", f"images {user_id} turn 1") buttons.ibutton("Remove Image", f"images {user_id} remov 0") diff --git a/bot/modules/imdb.py b/bot/modules/imdb.py index 2cf008de69..fbfd5488a1 100644 --- a/bot/modules/imdb.py +++ b/bot/modules/imdb.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 +from contextlib import suppress from re import findall, IGNORECASE from imdb import Cinemagoer from pycountry import countries as conn @@ -28,10 +29,10 @@ async def imdb_search(_, message): buttons = ButtonMaker() if title.lower().startswith("https://www.imdb.com/title/tt"): movieid = title.replace("https://www.imdb.com/title/tt", "") - movie = imdb.get_movie(movieid) - if not movie: + if movie := imdb.get_movie(movieid): + buttons.ibutton(f"🎬 {movie.get('title')} ({movie.get('year')})", f"imdb {user_id} movie {movieid}") + else: return await editMessage(k, "No Results Found") - buttons.ibutton(f"🎬 {movie.get('title')} ({movie.get('year')})", f"imdb {user_id} movie {movieid}") else: movies = get_poster(title, bulk=True) if not movies: @@ -62,14 +63,10 @@ def get_poster(query, bulk=False, id=False, file=None): if not movieid: return None if year: - filtered=list(filter(lambda k: str(k.get('year')) == str(year), movieid)) - if not filtered: - filtered = movieid + filtered = list(filter(lambda k: str(k.get('year')) == str(year), movieid)) or movieid else: filtered = movieid - movieid=list(filter(lambda k: k.get('kind') in ['movie', 'tv series'], filtered)) - if not movieid: - movieid = filtered + movieid = list(filter(lambda k: k.get('kind') in ['movie', 'tv series'], filtered)) or filtered if bulk: return movieid movieid = movieid[0].movieID @@ -83,10 +80,7 @@ def get_poster(query, bulk=False, id=False, file=None): else: date = "N/A" plot = movie.get('plot') - if plot and len(plot) > 0: - plot = plot[0] - else: - plot = movie.get('plot outline') + plot = plot[0] if plot and len(plot) > 0 else movie.get('plot outline') if plot and len(plot) > 300: plot = f"{plot[:300]}..." return { @@ -152,11 +146,9 @@ def list_to_hash(k, flagg=False, emoji=False): for elem in k: ele = elem.replace(" ", "_").replace("-", "_") if flagg: - try: + with suppress(AttributeError): conflag = (conn.get(name=elem)).flag listing += f'{conflag} ' - except AttributeError: - pass if emoji: listing += f"{IMDB_GENRE_EMOJI.get(elem, '')} " listing += f'#{ele}, ' diff --git a/bot/modules/mediainfo.py b/bot/modules/mediainfo.py index 5f78900481..4b16305e1c 100644 --- a/bot/modules/mediainfo.py +++ b/bot/modules/mediainfo.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -import aiohttp +from aiohttp import ClientSession from re import search as re_search from shlex import split as ssplit from aiofiles import open as aiopen @@ -27,7 +27,7 @@ async def gen_mediainfo(message, link=None, media=None, mmsg=None): filename = re_search(".+/(.+)", link).group(1) des_path = ospath.join(path, filename) headers = {"user-agent":"Mozilla/5.0 (Linux; Android 12; 2201116PI) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Mobile Safari/537.36"} - async with aiohttp.ClientSession() as session: + async with ClientSession() as session: async with session.get(link, headers=headers) as response: async with aiopen(des_path, "wb") as f: async for chunk in response.content.iter_chunked(10000000): @@ -85,11 +85,24 @@ async def mediainfo(_, message): link = rply.text if rply else message.command[1] return await gen_mediainfo(message, link) elif rply: - file = next((i for i in [rply.document, rply.video, rply.audio, rply.voice, - rply.animation, rply.video_note] if i is not None), None) - if not file: + if file := next( + ( + i + for i in [ + rply.document, + rply.video, + rply.audio, + rply.voice, + rply.animation, + rply.video_note, + ] + if i is not None + ), + None, + ): + return await gen_mediainfo(message, None, file, rply) + else: return await sendMessage(message, help_msg) - return await gen_mediainfo(message, None, file, rply) else: return await sendMessage(message, help_msg) diff --git a/bot/modules/mirror_leech.py b/bot/modules/mirror_leech.py index 42ce65b952..9b8a733142 100644 --- a/bot/modules/mirror_leech.py +++ b/bot/modules/mirror_leech.py @@ -9,6 +9,7 @@ from cloudscraper import create_scraper from bot import bot, DOWNLOAD_DIR, LOGGER, config_dict, bot_name, categories_dict, user_data +from bot.helper.mirror_utils.download_utils.direct_downloader import add_direct_download from bot.helper.ext_utils.bot_utils import is_url, is_magnet, is_mega_link, is_gdrive_link, get_content_type, new_task, sync_to_async, is_rclone_path, is_telegram_link, arg_parser, fetch_user_tds, fetch_user_dumps, get_stats from bot.helper.ext_utils.exceptions import DirectDownloadLinkException from bot.helper.ext_utils.task_manager import task_utils @@ -36,7 +37,7 @@ async def _mirror_leech(client, message, isQbit=False, isLeech=False, sameDir=No input_list = text[0].split(' ') arg_base = {'link': '', - '-i': 0, + '-i': '0', '-m': '', '-sd': '', '-samedir': '', '-d': False, '-seed': False, '-j': False, '-join': False, @@ -54,15 +55,15 @@ async def _mirror_leech(client, message, isQbit=False, isLeech=False, sameDir=No '-index': '', '-c': '', '-category': '', '-ud': '', '-dump': '', + '-h': '', '-headers': '', + '-ss': '0', '-screenshots': '', + '-t': '', '-thumb': '', } args = arg_parser(input_list[1:], arg_base) cmd = input_list[0].split('@')[0] - try: - multi = int(args['-i']) - except: - multi = 0 + multi = int(args['-i']) if args['-i'].isdigit() else 0 link = args['link'] folder_name = args['-m'] or args['-sd'] or args['-samedir'] @@ -79,6 +80,11 @@ async def _mirror_leech(client, message, isQbit=False, isLeech=False, sameDir=No index_link = args['-index'] gd_cat = args['-c'] or args['-category'] user_dump = args['-ud'] or args['-dump'] + headers = args['-h'] or args['-headers'] + ussr = args['-u'] or args['-user'] + pssw = args['-p'] or args['-pass'] + thumb = args['-t'] or args['-thumb'] + sshots = int(ss) if (ss := (args['-ss'] or args['-screenshots'])).isdigit() else 0 bulk_start = 0 bulk_end = 0 ratio = None @@ -228,17 +234,26 @@ async def __run_multi(): LOGGER.info(link) org_link = link - if not is_mega_link(link) and not isQbit and not is_magnet(link) and not is_rclone_path(link) \ - and not is_gdrive_link(link) and not link.endswith('.torrent') and file_ is None: + if (not is_mega_link(link) or (is_mega_link(link) and not config_dict['MEGA_EMAIL'] and config_dict['DEBRID_LINK_API'])) \ + and (not is_magnet(link) or (config_dict['REAL_DEBRID_API'] and is_magnet(link))) \ + and (not isQbit or (config_dict['REAL_DEBRID_API'] and is_magnet(link))) \ + and not is_rclone_path(link) and not is_gdrive_link(link) and not link.endswith('.torrent') and file_ is None: content_type = await get_content_type(link) if content_type is None or re_match(r'text/html|text/plain', content_type): process_msg = await sendMessage(message, f"Processing: {link}") try: + if not is_magnet(link) and (ussr or pssw): + link = (link, (ussr, pssw)) link = await sync_to_async(direct_link_generator, link) - LOGGER.info(f"Generated link: {link}") - await editMessage(process_msg, f"Generated link: {link}") + if isinstance(link, tuple): + link, headers = link + if isinstance(link, str): + LOGGER.info(f"Generated link: {link}") + await editMessage(process_msg, f"Generated link: {link}") except DirectDownloadLinkException as e: - LOGGER.info(str(e)) + e = str(e) + if 'This link requires a password!' not in e: + LOGGER.info(e) if str(e).startswith('ERROR:'): await editMessage(process_msg, str(e)) await delete_links(message) @@ -318,11 +333,14 @@ async def __run_multi(): return listener = MirrorLeechListener(message, compress, extract, isQbit, isLeech, tag, select, seed, - sameDir, rcf, up, join, drive_id=drive_id, index_link=index_link, source_url=org_link if org_link else link) + sameDir, rcf, up, join, drive_id=drive_id, index_link=index_link, + source_url=org_link or link, leech_utils={'screenshots': sshots, 'thumb': thumb}) if file_ is not None: await delete_links(message) await TelegramDownloadHelper(listener).add_download(reply_to, f'{path}/', name, session) + elif isinstance(link, dict): + await add_direct_download(link, path, listener, name) elif is_rclone_path(link): if link.startswith('mrcc:'): link = link.split('mrcc:', 1)[1] @@ -340,16 +358,16 @@ async def __run_multi(): elif is_mega_link(link): await delete_links(message) await add_mega_download(link, f'{path}/', listener, name) - elif isQbit: + elif isQbit and 'real-debrid' not in link: await add_qb_torrent(link, path, listener, ratio, seed_time) - else: - ussr = args['-u'] or args['-user'] - pssw = args['-p'] or args['-pass'] + elif not is_telegram_link(link): if ussr or pssw: auth = f"{ussr}:{pssw}" - auth = "Basic " + b64encode(auth.encode()).decode('ascii') + auth = f"authorization: Basic {b64encode(auth.encode()).decode('ascii')}" else: auth = '' + if headers: + auth += f'{auth} {headers}' await add_aria2c_download(link, path, listener, name, auth, ratio, seed_time) await delete_links(message) @@ -390,11 +408,13 @@ def parseline(line): async with aiopen('log.txt', 'r') as f: logFile = await f.read() cget = create_scraper().request - resp = cget('POST', 'http://stashbin.xyz/api/document', data={'content': logFile}).json() - if resp['ok']: + resp = cget('POST', 'https://spaceb.in/api/v1/documents', data={'content': logFile, 'extension': 'None'}).json() + if resp['status'] == 201: btn = ButtonMaker() - btn.ubutton('πŸ“¨ Web Paste', f"http://stashbin.xyz/{resp['data']['key']}") + btn.ubutton('πŸ“¨ Web Paste (SB)', f"https://spaceb.in/{resp['payload']['id']}") await editReplyMarkup(message, btn.build_menu(1)) + else: + LOGGER.error(f"Web Paste Failed : {str(err)}") elif data[2] == "botpm": await query.answer(url=f"https://t.me/{bot_name}?start=wzmlx") elif data[2] == "help": diff --git a/bot/modules/mydramalist.py b/bot/modules/mydramalist.py index 215c7daa93..70116f45c2 100644 --- a/bot/modules/mydramalist.py +++ b/bot/modules/mydramalist.py @@ -1,11 +1,12 @@ #!/usr/bin/env python3 +from contextlib import suppress from aiohttp import ClientSession from requests import get as rget from urllib.parse import quote as q from pycountry import countries as conn from pyrogram.filters import command, regex -from pyrogram.handlers import MessageHandler, CallbackQueryHandler +from pyrogram.handlers import MessageHandler, CallbackQueryHandler from pyrogram.errors import MediaEmpty, PhotoInvalidDimensions, WebpageMediaEmpty, ReplyMarkupInvalid from bot import LOGGER, bot, config_dict, user_data @@ -103,11 +104,9 @@ def list_to_hash(k, flagg=False, emoji=False): for elem in k: ele = elem.replace(" ", "_").replace("-", "_") if flagg: - try: + with suppress(AttributeError): conflag = (conn.get(name=elem)).flag listing += f'{conflag} ' - except AttributeError: - pass if emoji: listing += f"{IMDB_GENRE_EMOJI.get(elem, '')} " listing += f'#{ele}, ' diff --git a/bot/modules/rss.py b/bot/modules/rss.py index 44d0d300df..23fb7a8aba 100644 --- a/bot/modules/rss.py +++ b/bot/modules/rss.py @@ -145,7 +145,12 @@ async def rssSub(client, message, pre_event): if msg: await sendMessage(message, msg) await updateRssMenu(pre_event) - + is_sudo = await CustomFilters.sudo(client, message) + if scheduler.state == 2: + scheduler.resume() + elif is_sudo and not scheduler.running: + addJob(config_dict['RSS_DELAY']) + scheduler.start() async def getUserId(title): async with rss_dict_lock: @@ -554,8 +559,8 @@ async def rssListener(client, query): async def rssMonitor(): - if not config_dict['RSS_CHAT_ID']: - LOGGER.warning('RSS_CHAT_ID not added! Shutting down rss scheduler...') + if not config_dict['RSS_CHAT']: + LOGGER.warning('RSS_CHAT not added! Shutting down rss scheduler...') scheduler.shutdown(wait=False) return if len(rss_dict) == 0: @@ -564,7 +569,6 @@ async def rssMonitor(): all_paused = True for user, items in list(rss_dict.items()): for title, data in list(items.items()): - await sleep(0) try: if data['paused']: continue @@ -576,11 +580,11 @@ async def rssMonitor(): last_link = rss_d.entries[0]['links'][1]['href'] except IndexError: last_link = rss_d.entries[0]['link'] + finally: + all_paused = False last_title = rss_d.entries[0]['title'] if data['last_feed'] == last_link or data['last_title'] == last_title: - all_paused = False continue - all_paused = False feed_count = 0 while True: try: @@ -637,7 +641,7 @@ async def rssMonitor(): break except Exception as e: LOGGER.error( - f"{e} Feed Name: {title} - Feed Link: {data['link']}") + f"{e} - Feed Name: {title} - Feed Link: {data['link']}") continue if all_paused: scheduler.pause() @@ -647,7 +651,6 @@ def addJob(delay): scheduler.add_job(rssMonitor, trigger=IntervalTrigger(seconds=delay), id='0', name='RSS', misfire_grace_time=15, max_instances=1, next_run_time=datetime.now()+timedelta(seconds=20), replace_existing=True) - addJob(config_dict['RSS_DELAY']) scheduler.start() bot.add_handler(MessageHandler(getRssMenu, filters=command( diff --git a/bot/modules/save_msg.py b/bot/modules/save_msg.py index fdb65076e4..9bf1018d41 100644 --- a/bot/modules/save_msg.py +++ b/bot/modules/save_msg.py @@ -15,7 +15,7 @@ async def save_message(_, query): try: await query.message.copy(usr, reply_markup=InlineKeyboardMarkup(BTN) if (BTN := query.message.reply_markup.inline_keyboard[:-1]) else None) await query.answer("Message/Media Successfully Saved !", show_alert=True) - except: + except Exception: if user_dict.get('save_mode'): await query.answer('Make Bot as Admin and give Post Permissions and Try Again', show_alert=True) else: diff --git a/bot/modules/speedtest.py b/bot/modules/speedtest.py index 11e9753eaf..6952e452e7 100644 --- a/bot/modules/speedtest.py +++ b/bot/modules/speedtest.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -from speedtest import Speedtest +from speedtest import Speedtest, ConfigRetrievalError from pyrogram.handlers import MessageHandler from pyrogram.filters import command @@ -11,8 +11,12 @@ @new_task async def speedtest(_, message): - speed = await sendMessage(message, "Initializing Speedtest...") - test = Speedtest() + speed = await sendMessage(message, "Initiating Speedtest...") + try: + test = Speedtest() + except ConfigRetrievalError: + await editMessage(speed, "ERROR: Can't connect to Server at the Moment, Try Again Later !") + return test.get_best_server() test.download() test.upload() @@ -49,7 +53,7 @@ async def speedtest(_, message): await deleteMessage(speed) except Exception as e: LOGGER.error(str(e)) - pho = await editMessage(speed, string_speed) + await editMessage(speed, string_speed) bot.add_handler(MessageHandler(speedtest, filters=command( BotCommands.SpeedCommand) & CustomFilters.authorized & ~CustomFilters.blacklisted)) diff --git a/bot/modules/torrent_search.py b/bot/modules/torrent_search.py index 164c3072a5..8ad050c9fe 100644 --- a/bot/modules/torrent_search.py +++ b/bot/modules/torrent_search.py @@ -145,7 +145,7 @@ async def __getResult(search_results, key, message, method): msg += f"Size: {result['size']}
    " try: msg += f"Seeders: {result['seeders']} | Leechers: {result['leechers']}
    " - except: + except Exception: pass if 'torrent' in result.keys(): msg += f"Direct Link

    " diff --git a/bot/modules/torrent_select.py b/bot/modules/torrent_select.py index fa821394fd..364ecc3bf8 100644 --- a/bot/modules/torrent_select.py +++ b/bot/modules/torrent_select.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 +from contextlib import suppress from pyrogram.handlers import MessageHandler, CallbackQueryHandler from pyrogram.filters import regex from aiofiles.os import remove as aioremove, path as aiopath @@ -62,7 +63,7 @@ async def select(client, message): LOGGER.error( f"{e} Error in pause, this mostly happens after abuse aria2") listener.select = True - except: + except Exception: await sendMessage(message, "This is not a bittorrent task!") return @@ -102,20 +103,16 @@ async def get_confirm(client, query): f_paths = [f"{path}/{f.name}", f"{path}/{f.name}.!qB"] for f_path in f_paths: if await aiopath.exists(f_path): - try: + with suppress(Exception): await aioremove(f_path) - except: - pass if not dl.queued: await sync_to_async(client.torrents_resume, torrent_hashes=id_) else: res = await sync_to_async(aria2.client.get_files, id_) for f in res: if f['selected'] == 'false' and await aiopath.exists(f['path']): - try: + with suppress(Exception): await aioremove(f['path']) - except: - pass if not dl.queued: try: await sync_to_async(aria2.client.unpause, id_) diff --git a/bot/modules/users_settings.py b/bot/modules/users_settings.py index e332e223e8..cb7c9d66c6 100644 --- a/bot/modules/users_settings.py +++ b/bot/modules/users_settings.py @@ -20,6 +20,7 @@ from bot.helper.mirror_utils.upload_utils.gdriveTools import GoogleDriveHelper from bot.helper.ext_utils.db_handler import DbManger from bot.helper.ext_utils.bot_utils import getdailytasks, update_user_ldata, get_readable_file_size, sync_to_async, new_thread, is_gdrive_link +from bot.helper.mirror_utils.upload_utils.ddlserver.gofile import Gofile from bot.helper.themes import BotTheme handler_dict = {} @@ -36,9 +37,9 @@ 'yt_opt': ['YT-DLP Options is the Custom Quality for the extraction of videos from the yt-dlp supported sites.', 'Send YT-DLP Options. Timeout: 60 sec\nFormat: key:value|key:value|key:value.\nExample: format:bv*+mergeall[vcodec=none]|nocheckcertificate:True\nCheck all yt-dlp api options from this FILE to convert cli arguments to api options.'], 'split_size': ['Leech Splits Size is the size to split the Leeched File before uploading', f'Send Leech split size in any comfortable size, like 2Gb, 500MB or 1.46gB. \nPREMIUM ACTIVE: {IS_PREMIUM_USER}. \nTimeout: 60 sec'], 'ddl_servers': ['DDL Servers which uploads your File to their Specific Hosting', ''], - 'user_tds': [f'UserTD helps to Upload files via Bot to your Custom Drive Destination via Global SA mail\n\n➲ SA Mail : {SA if (SA := config_dict["USER_TD_SA"]) else "Not Specified"}', 'Send User TD details for Use while Mirror/Clone\n➲ Format: \nname id/link index(optional)\nname2 link2/id2 index(optional)\n\nNOTE: \n1. Drive ID must be valid, then only it will accept\n2. Names can have spaces \n3. All UserTDs are updated on every change \n4. To delete specific UserTD, give Name(s) separated by each line\n\nTimeout: 60 sec'], - 'gofile': ['Gofile is a free file sharing and storage platform. You can store and share your content without any limit.', "Send GoFile's API Key. Get it on https://gofile.io/myProfile\nTimeout: 60 sec"], - 'streamsb': ['StreamSB', "Send StreamSB's API Key\nTimeout: 60 sec"], + 'user_tds': [f'UserTD helps to Upload files via Bot to your Custom Drive Destination via Global SA mail\n\n➲ SA Mail : {"Not Specified" if "USER_TD_SA" not in config_dict else config_dict["USER_TD_SA"]}', 'Send User TD details for Use while Mirror/Clone\n➲ Format:\nname id/link index(optional)\nname2 link2/id2 index(optional)\n\nNOTE:\n1. Drive ID must be valid, then only it will accept\n2. Names can have spaces\n3. All UserTDs are updated on every change\n4. To delete specific UserTD, give Name(s) separated by each line\n\nTimeout: 60 sec'], + 'gofile': ['Gofile is a free file sharing and storage platform. You can store and share your content without any limit.', "Send GoFile's API Key. Get it on https://gofile.io/myProfile, It will not be Accepted if the API Key is Invalid !!\nTimeout: 60 sec"], + 'streamtape': ['Streamtape', "Send StreamTape's Login and Key\nFormat: user_login:pass_key\nTimeout: 60 sec"], } fname_dict = {'rcc': 'RClone', 'lprefix': 'Prefix', @@ -55,7 +56,7 @@ 'ddl_servers': 'DDL Servers', 'user_tds': 'User Custom TDs', 'gofile': 'GoFile', - 'streamsb': 'StreamSB', + 'streamtape': 'StreamTape', } async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None): @@ -89,7 +90,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None) mediainfo = "Force Enabled" save_mode = "Save As Dump" if user_dict.get('save_mode') else "Save As BotPM" buttons.ibutton('Save As BotPM' if save_mode == 'Save As Dump' else 'Save As Dump', f"userset {user_id} save_mode") - dailytl = config_dict['DAILY_TASK_LIMIT'] if config_dict['DAILY_TASK_LIMIT'] else "♾️" + dailytl = config_dict['DAILY_TASK_LIMIT'] or "♾️" dailytas = user_dict.get('dly_tasks')[1] if user_dict and user_dict.get('dly_tasks') and user_id != OWNER_ID and config_dict['DAILY_TASK_LIMIT'] else config_dict.get('DAILY_TASK_LIMIT', "♾️") if user_id != OWNER_ID else "♾️" if user_dict.get('dly_tasks', False): t = str(datetime.now() - user_dict['dly_tasks'][0]).split(':') @@ -117,7 +118,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None) ddl_serv = len(val) if (val := user_dict.get('ddl_servers', False)) else 0 buttons.ibutton("DDL Servers", f"userset {user_id} ddl_servers") - tds_mode = "Enabled" if user_dict.get('td_mode', config_dict['BOT_PM']) else "Disabled" + tds_mode = "Enabled" if user_dict.get('td_mode', False) else "Disabled" if not config_dict['USER_TD_MODE']: tds_mode = "Force Disabled" @@ -141,25 +142,25 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None) dailytlle = get_readable_file_size(config_dict['DAILY_LEECH_LIMIT'] * 1024**3) if config_dict['DAILY_LEECH_LIMIT'] else "️∞" dailyll = get_readable_file_size(await getdailytasks(user_id, check_leech=True)) if config_dict['DAILY_LEECH_LIMIT'] and user_id != OWNER_ID else "∞" - buttons.ibutton("Thumbnail", f"userset {user_id} thumb") thumbmsg = "Exists" if await aiopath.exists(thumbpath) else "Not Exists" - - buttons.ibutton("Leech Splits", f"userset {user_id} split_size") + buttons.ibutton(f"{'βœ…οΈ' if thumbmsg == 'Exists' else ''} Thumbnail", f"userset {user_id} thumb") + split_size = get_readable_file_size(config_dict['LEECH_SPLIT_SIZE']) + ' (Default)' if user_dict.get('split_size', '') == '' else get_readable_file_size(user_dict['split_size']) equal_splits = 'Enabled' if user_dict.get('equal_splits', config_dict.get('EQUAL_SPLITS')) else 'Disabled' media_group = 'Enabled' if user_dict.get('media_group', config_dict.get('MEDIA_GROUP')) else 'Disabled' + buttons.ibutton(f"{'βœ…οΈ' if user_dict.get('split_size', False) else ''} Leech Splits", f"userset {user_id} split_size") - buttons.ibutton("Leech Caption", f"userset {user_id} lcaption") lcaption = 'Not Exists' if (val:=user_dict.get('lcaption', config_dict.get('LEECH_FILENAME_CAPTION', ''))) == '' else val + buttons.ibutton(f"{'βœ…οΈ' if lcaption != 'Not Exists' else ''} Leech Caption", f"userset {user_id} lcaption") - buttons.ibutton("Leech Prefix", f"userset {user_id} lprefix") lprefix = 'Not Exists' if (val:=user_dict.get('lprefix', config_dict.get('LEECH_FILENAME_PREFIX', ''))) == '' else val + buttons.ibutton(f"{'βœ…οΈ' if lprefix != 'Not Exists' else ''} Leech Prefix", f"userset {user_id} lprefix") - buttons.ibutton("Leech Suffix", f"userset {user_id} lsuffix") lsuffix = 'Not Exists' if (val:=user_dict.get('lsuffix', config_dict.get('LEECH_FILENAME_SUFFIX', ''))) == '' else val - - buttons.ibutton("Leech Remname", f"userset {user_id} lremname") + buttons.ibutton(f"{'βœ…οΈ' if lsuffix != 'Not Exists' else ''} Leech Suffix", f"userset {user_id} lsuffix") + lremname = 'Not Exists' if (val:=user_dict.get('lremname', config_dict.get('LEECH_FILENAME_REMNAME', ''))) == '' else val + buttons.ibutton(f"{'βœ…οΈ' if lremname != 'Not Exists' else ''} Leech Remname", f"userset {user_id} lremname") buttons.ibutton("Leech Dump", f"userset {user_id} ldump") ldump = 'Not Exists' if (val:=user_dict.get('ldump', '')) == '' else len(val) @@ -174,16 +175,17 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None) buttons.ibutton("Close", f"userset {user_id} close", "footer") button = buttons.build_menu(2) elif key == "ddl_servers": - ddl_serv = 0 + ddl_serv, serv_list = 0, [] if (ddl_dict := user_dict.get('ddl_servers', False)): - for _, (enabled, _) in ddl_dict.items(): + for serv, (enabled, _) in ddl_dict.items(): if enabled: + serv_list.append(serv) ddl_serv += 1 text = f"γŠ‚ {fname_dict[key]} Settings :\n\n" \ f"➲ Enabled DDL Server(s) : {ddl_serv}\n\n" \ f"➲ Description : {desp_dict[key][0]}" - for btn in ['gofile', 'streamsb']: - buttons.ibutton(fname_dict[btn], f"userset {user_id} {btn}") + for btn in ['gofile', 'streamtape']: + buttons.ibutton(f"{'βœ…οΈ' if btn in serv_list else ''} {fname_dict[btn]}", f"userset {user_id} {btn}") buttons.ibutton("Back", f"userset {user_id} back mirror", "footer") buttons.ibutton("Close", f"userset {user_id} close", "footer") button = buttons.build_menu(2) @@ -217,7 +219,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None) elif key in ['mprefix', 'mremname', 'msuffix']: set_exist = 'Not Exists' if (val:=user_dict.get(key, config_dict.get(f'MIRROR_FILENAME_{key[1:].upper()}', ''))) == '' else val text += f"➲ Mirror Filename {fname_dict[key]} : {set_exist}\n\n" - elif key in ['gofile', 'streamsb']: + elif key in ['gofile', 'streamtape']: set_exist = 'Exists' if key in (ddl_dict:=user_dict.get('ddl_servers', {})) and ddl_dict[key][1] and ddl_dict[key][1] != '' else 'Not Exists' ddl_mode = 'Enabled' if key in (ddl_dict:=user_dict.get('ddl_servers', {})) and ddl_dict[key][0] else 'Disabled' text = f"➲ Upload {fname_dict[key]} : {ddl_mode}\n" \ @@ -225,7 +227,7 @@ async def get_user_settings(from_user, key=None, edit_type=None, edit_mode=None) buttons.ibutton('Disable DDL' if ddl_mode == 'Enabled' else 'Enable DDL', f"userset {user_id} s{key}", "header") elif key == 'user_tds': set_exist = len(val) if (val:=user_dict.get(key, False)) else 'Not Exists' - tds_mode = "Enabled" if user_dict.get('td_mode', config_dict['BOT_PM']) else "Disabled" + tds_mode = "Enabled" if user_dict.get('td_mode', False) else "Disabled" buttons.ibutton('Disable UserTDs' if tds_mode == 'Enabled' else 'Enable UserTDs', f"userset {user_id} td_mode", "header") if not config_dict['USER_TD_MODE']: tds_mode = "Force Disabled" @@ -283,7 +285,9 @@ async def user_settings(client, message): ➲ Leech User Dump : /cmd -s ldump''') else: - msg, button = await get_user_settings(message.from_user) + from_user = message.from_user + handler_dict[from_user.id] = False + msg, button = await get_user_settings(from_user) await sendMessage(message, msg, button, 'IMAGES') @@ -305,9 +309,11 @@ async def set_custom(client, message, pre_event, key, direct=False): return_key = 'leech' n_key = key user_dict = user_data.get(user_id, {}) - if key in ['gofile', 'streamsb']: + if key in ['gofile', 'streamtape']: ddl_dict = user_dict.get('ddl_servers', {}) mode, api = ddl_dict.get(key, [False, ""]) + if key == "gofile" and not await Gofile.is_goapi(value): + value = "" ddl_dict[key] = [mode, value] value = ddl_dict n_key = 'ddl_servers' @@ -462,7 +468,7 @@ async def edit_user_settings(client, query): try: await sendCustomMsg(user_id, msg, debug=True) await query.answer('User TDs Successfully Send in your PM', show_alert=True) - except: + except Exception: await query.answer('Start the Bot in PM (Private) and Try Again', show_alert=True) await update_user_settings(query, 'user_tds', 'mirror') elif data[2] == "dthumb": @@ -546,7 +552,7 @@ async def edit_user_settings(client, query): await update_user_settings(query, 'leech') if DATABASE_URL: await DbManger().update_user_data(user_id) - elif data[2] in ['sgofile', 'sstreamsb', 'dgofile', 'dstreamsb']: + elif data[2] in ['sgofile', 'sstreamtape', 'dgofile', 'dstreamtape']: handler_dict[user_id] = False ddl_dict = user_dict.get('ddl_servers', {}) key = data[2][1:] @@ -582,7 +588,7 @@ async def edit_user_settings(client, query): else: await query.answer("Old Settings", show_alert=True) await update_user_settings(query) - elif data[2] in ['ddl_servers', 'user_tds', 'gofile', 'streamsb']: + elif data[2] in ['ddl_servers', 'user_tds', 'gofile', 'streamtape']: handler_dict[user_id] = False await query.answer() edit_mode = len(data) == 4 diff --git a/bot/modules/ytdlp.py b/bot/modules/ytdlp.py index 402a65dc35..9ee3ca7055 100644 --- a/bot/modules/ytdlp.py +++ b/bot/modules/ytdlp.py @@ -81,7 +81,7 @@ async def __event_handler(self): pfunc, filters=regex('^ytq') & user(self.__user_id)), group=-1) try: await wait_for(self.event.wait(), timeout=self.__timeout) - except: + except Exception: await editMessage(self.__reply_to, 'Timed Out. Task has been cancelled!') self.qual = None self.is_cancelled = True @@ -344,8 +344,6 @@ async def __run_multi(): path = f'{DOWNLOAD_DIR}{message.id}{folder_name}' - opt = opt or config_dict['YT_DLP_OPTIONS'] - if len(text) > 1 and text[1].startswith('Tag: '): tag, id_ = text[1].split('Tag: ')[1].split() message.from_user = await client.get_users(id_) @@ -355,6 +353,13 @@ async def __run_multi(): pass elif sender_chat := message.sender_chat: tag = sender_chat.title + + user_id = message.from_user.id + + user_dict = user_data.get(user_id, {}) + + opt = opt or user_dict.get('yt_opt') or config_dict['YT_DLP_OPTIONS'] + if username := message.from_user.username: tag = f'@{username}' else: @@ -465,6 +470,12 @@ async def __run_multi(): yt_opt = opt.split('|') for ytopt in yt_opt: key, value = map(str.strip, ytopt.split(':', 1)) + if key == 'format': + if select: + qual = '' + elif value.startswith('ba/b-'): + qual = value + continue if value.startswith('^'): if '.' in value or value == '^inf': value = float(value.split('^')[1]) @@ -491,13 +502,8 @@ async def __run_multi(): __run_multi() - if not select: - user_id = message.from_user.id - user_dict = user_data.get(user_id, {}) - if 'format' in options: - qual = options['format'] - elif user_dict.get('yt_opt'): - qual = user_dict['yt_opt'] + if not select and (not qual and 'format' in options): + qual = options['format'] if not qual: qual = await YtSelection(client, message).get_quality(result) diff --git a/bot/version.py b/bot/version.py index 8454c89de9..d2565d2f73 100644 --- a/bot/version.py +++ b/bot/version.py @@ -8,7 +8,7 @@ def get_version() -> str: ''' MAJOR = '1' MINOR = '2' - PATCH = '0' + PATCH = '1' STATE = 'x' return f"v{MAJOR}.{MINOR}.{PATCH}-{STATE}" diff --git a/config_sample.env b/config_sample.env index c1493b0820..3b24b559c5 100644 --- a/config_sample.env +++ b/config_sample.env @@ -13,28 +13,36 @@ DOWNLOAD_DIR = "/usr/src/app/downloads/" # Require restart after changing it CMD_SUFFIX = "" # Require restart after changing it while bot running AUTHORIZED_CHATS = "" # Require restart after changing it while bot running SUDO_USERS = "" # Require restart after changing it while bot running -STATUS_LIMIT = "6" +BLACKLIST_USERS = "" # Require restart after changing it while bot running +STATUS_LIMIT = "4" DEFAULT_UPLOAD = "gd" STATUS_UPDATE_INTERVAL = "10" AUTO_DELETE_MESSAGE_DURATION = "60" -UPTOBOX_TOKEN = "" -EXTENSION_FILTER = "" INCOMPLETE_TASK_NOTIFIER = "False" -YT_DLP_OPTIONS = "" -USE_SERVICE_ACCOUNTS = "False" SET_COMMANDS = "False" +EXTENSION_FILTER = "" +YT_DLP_OPTIONS = "" FSUB_IDS = "" BOT_PM = "" -BOT_MAX_TASKS = "" # GDrive Tools GDRIVE_ID = "" +USER_TD_MODE = "" +USER_TD_SA = "" +INDEX_URL = "" +USE_SERVICE_ACCOUNTS = "False" IS_TEAM_DRIVE = "False" STOP_DUPLICATE = "False" DISABLE_DRIVE_LINK = "False" -INDEX_URL = "" GD_INFO = "Uploaded by WZML-X" +# API's/Cookies +REAL_DEBRID_API = "" +DEBRID_LINK_API = "" +FILELION_API = "" +GDTOT_CRYPT = "" +UPTOBOX_TOKEN = "" + # Rclone RCLONE_PATH = "" RCLONE_FLAGS = "" @@ -46,6 +54,7 @@ RCLONE_SERVE_PASS = "" # Update UPSTREAM_REPO = "" UPSTREAM_BRANCH = "" +UPGRADE_PACKAGES = "" # Leech & Mirror LEECH_SPLIT_SIZE = "" @@ -61,7 +70,7 @@ MIRROR_FILENAME_PREFIX = "" MIRROR_FILENAME_SUFFIX = "" MIRROR_FILENAME_REMNAME = "" -# Log Channel (Single ID) +# Log Channel/SuperGroup (Topics Support) LEECH_LOG_ID = "" MIRROR_LOG_ID = "" LINKS_LOG_ID = "" @@ -79,7 +88,7 @@ QUEUE_UPLOAD = "" # RSS RSS_DELAY = "600" -RSS_CHAT_ID = "" +RSS_CHAT = "" # Mega MEGA_EMAIL = "" @@ -90,6 +99,7 @@ DAILY_TASK_LIMIT = "" DAILY_MIRROR_LIMIT = "" DAILY_LEECH_LIMIT = "" USER_MAX_TASKS = "" +BOT_MAX_TASKS = "" TORRENT_LIMIT= "" DIRECT_LIMIT = "" GDRIVE_LIMIT = "" @@ -99,6 +109,7 @@ PLAYLIST_LIMIT = "" LEECH_LIMIT = "" MEGA_LIMIT = "" STORAGE_THRESHOLD = "" +USER_TIME_INTERVAL = "0" # Templates ANIME_TEMPLATE = "" @@ -109,23 +120,24 @@ MDL_TEMPLATE = "" TITLE_NAME = "WZ-M/L-X" AUTHOR_NAME = "WZML-X" AUTHOR_URL = "https://t.me/WZML_X" +COVER_IMAGE = "" # Extra SAFE_MODE = "" DELETE_LINKS = "" CLEAN_LOG_MSG = "" SHOW_EXTRA_CMDS = "" -SOURCE_LINK = "" TIMEZONE = "Asia/Kolkata" IMAGES = "" IMG_SEARCH = "" IMG_PAGE = "" BOT_THEME = "minimal" -USER_TIME_INTERVAL = "0" +EXCEP_CHATS = "" # M/L Buttons SHOW_MEDIAINFO = "False" SAVE_MSG = "False" +SOURCE_LINK = "False" # Token system TOKEN_TIMEOUT = "" diff --git a/requirements.txt b/requirements.txt index 6f1330f05d..6dd5c94375 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,6 +18,7 @@ google-auth-oauthlib gunicorn git+https://github.com/zevtyardt/lk21.git httpx +langcodes[data] lxml motor mutagen @@ -39,4 +40,4 @@ tenacity tgcrypto uvloop xattr -yt-dlp==2023.3.4 +yt-dlp==2023.7.6 diff --git a/update.py b/update.py index cced3098c2..e4a8b35f56 100644 --- a/update.py +++ b/update.py @@ -1,6 +1,7 @@ from logging import FileHandler, StreamHandler, INFO, basicConfig, error as log_error, info as log_info from os import path as ospath, environ, remove -from subprocess import run as srun +from subprocess import run as srun, call as scall +from pkg_resources import working_set from requests import get as rget from dotenv import load_dotenv, dotenv_values from pymongo import MongoClient @@ -9,7 +10,7 @@ with open('log.txt', 'r+') as f: f.truncate(0) -if ospath.exists('rlog.txt'): #RClone Logs +if ospath.exists('rlog.txt'): remove('rlog.txt') basicConfig(format="[%(asctime)s] [%(levelname)s] - %(message)s", @@ -48,8 +49,14 @@ and config_dict is not None: environ['UPSTREAM_REPO'] = config_dict['UPSTREAM_REPO'] environ['UPSTREAM_BRANCH'] = config_dict['UPSTREAM_BRANCH'] + environ['UPGRADE_PACKAGES'] = config_dict.get('UPDATE_PACKAGES', 'False') conn.close() +UPGRADE_PACKAGES = environ.get('UPGRADE_PACKAGES', 'False') +if UPGRADE_PACKAGES.lower() == 'true': + packages = [dist.project_name for dist in working_set] + scall("pip install " + ' '.join(packages), shell=True) + UPSTREAM_REPO = environ.get('UPSTREAM_REPO', '') if len(UPSTREAM_REPO) == 0: UPSTREAM_REPO = None @@ -76,5 +83,5 @@ if update.returncode == 0: log_info('Successfully updated with latest commits !!') else: - log_error('Something went Wrong !!') - log_error(f'UPSTREAM_REPO: {UPSTREAM_REPO} | UPSTREAM_BRANCH: {UPSTREAM_BRANCH}') + log_error('Something went Wrong ! Retry or Ask Support !') + log_info(f'UPSTREAM_REPO: {UPSTREAM_REPO} | UPSTREAM_BRANCH: {UPSTREAM_BRANCH}') diff --git a/web/wserver.py b/web/wserver.py index 29b6481934..f745238f61 100644 --- a/web/wserver.py +++ b/web/wserver.py @@ -620,13 +620,13 @@ src="https://graph.org/file/1a6ad157f55bc42b548df.png" alt="logo" /> - +

    Bittorrent Selection

    @@ -781,7 +781,70 @@ def set_priority(id_): @app.route('/') def homepage(): - return "

    Checkout WZML-X @GitHub By WZML-X Devs

    " + return """ + + + + + + + + + +
    +

    WZML-X

    +
    + + + + +""" @app.errorhandler(Exception)