Skip to content

Commit

Permalink
refactor: custom download
Browse files Browse the repository at this point in the history
fix: file name not included in links
  • Loading branch information
EverythingSuckz committed Feb 4, 2022
1 parent 8a6987e commit 977dcb0
Show file tree
Hide file tree
Showing 8 changed files with 175 additions and 162 deletions.
4 changes: 2 additions & 2 deletions WebStreamer/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@

import asyncio
import logging
from .vars import Var
from aiohttp import web
from pyrogram import idle
from WebStreamer import utils
from WebStreamer import bot_info
from WebStreamer.vars import Var
from WebStreamer.server import web_server
from WebStreamer import utils
from WebStreamer.bot.clients import initialize_clients


Expand Down
28 changes: 4 additions & 24 deletions WebStreamer/bot/plugins/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,27 +6,11 @@
from WebStreamer.vars import Var
from urllib.parse import quote_plus
from WebStreamer.bot import StreamBot
from WebStreamer.utils import get_unique_id
from WebStreamer.utils import get_hash, get_name
from pyrogram.types import Message, InlineKeyboardMarkup, InlineKeyboardButton


def detect_type(media_msg: Message):
attribute = None
for attr in (
"audio",
"document",
"photo",
"sticker",
"animation",
"video",
"voice",
"video_note",
):
try:
attribute = getattr(media_msg, attr)
except AttributeError:
continue
return attribute


@StreamBot.on_message(
filters.private
Expand All @@ -43,13 +27,9 @@ def detect_type(media_msg: Message):
group=4,
)
async def media_receive_handler(_, m: Message):
file = detect_type(m)
file_name = ""
if file:
file_name = file.file_name
log_msg = await m.forward(chat_id=Var.BIN_CHANNEL)
stream_link = f"{Var.URL}{log_msg.message_id}/{quote_plus(file_name)}?hash={get_unique_id(log_msg)}"
short_link = f"{Var.URL}{get_unique_id(log_msg)}{log_msg.message_id}"
stream_link = f"{Var.URL}{log_msg.message_id}/{quote_plus(get_name(m))}?hash={get_hash(log_msg)}"
short_link = f"{Var.URL}{get_hash(log_msg)}{log_msg.message_id}"
logging.info(f"Generated link: {stream_link} for {m.from_user.first_name}")
await m.reply_text(
text="<code>{}</code>\n(<a href='{}'>shortened</a>)".format(
Expand Down
6 changes: 6 additions & 0 deletions WebStreamer/server/exceptions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

class InvalidHash(Exception):
message = "Invalid hash"

class FIleNotFound(Exception):
message = "File not found"
87 changes: 45 additions & 42 deletions WebStreamer/server/stream_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,32 +8,39 @@
import secrets
import mimetypes
from aiohttp import web
from WebStreamer import utils
from WebStreamer.vars import Var
from pyrogram import Client
from aiohttp.http_exceptions import BadStatusLine
from WebStreamer.bot import multi_clients, work_loads
from WebStreamer import StartTime, __version__, bot_info
from WebStreamer.bot.clients import multi_clients, work_loads
from WebStreamer.server.exceptions import FIleNotFound, InvalidHash
from WebStreamer import Var, utils, StartTime, __version__, bot_info

routes = web.RouteTableDef()

routes = web.RouteTableDef()

@routes.get("/", allow_head=True)
async def root_route_handler(request):
async def root_route_handler(_):
return web.json_response(
{
"server_status": "running",
"uptime": utils.get_readable_time(time.time() - StartTime),
"telegram_bot": "@" + bot_info.username,
"connected_bots": len(multi_clients),
"loads": work_loads,
"loads": dict(
("bot" + str(c + 1), l)
for c, (_, l) in enumerate(
sorted(work_loads.items(), key=lambda x: x[1], reverse=True)
)
),
"version": __version__,
}
)


@routes.get(r"/{path:\S+}", allow_head=True)
async def stream_handler(request: web.Request):
index = min(work_loads, key=work_loads.get)
faster_client = multi_clients[index]
work_loads[index] += 1
try:
path = request.match_info["path"]
match = re.search(r"^([a-zA-Z0-9_-]{6})(\d+)$", path)
Expand All @@ -43,44 +50,42 @@ async def stream_handler(request: web.Request):
else:
message_id = int(re.search(r"(\d+)(?:\/\S+)?", path).group(1))
secure_hash = request.rel_url.query.get("hash")
return await media_streamer(request, message_id, secure_hash)
except ValueError:
raise web.HTTPNotFound
except AttributeError:
pass
except BadStatusLine:
return await media_streamer(request, faster_client, index, message_id, secure_hash)
except InvalidHash as e:
raise web.HTTPForbidden(text=e.message)
except FIleNotFound as e:
raise web.HTTPNotFound(text=e.message)
except (AttributeError, BadStatusLine, ConnectionResetError):
pass

except Exception as e:
logging.critical(e.with_traceback())
raise web.HTTPInternalServerError(text=str(e))
finally:
work_loads[index] -= 1

class_cache = {}

async def media_streamer(request: web.Request, message_id: int, secure_hash: str):
async def media_streamer(request: web.Request, client: Client, index:str, message_id: int, secure_hash: str):
range_header = request.headers.get("Range", 0)

_index = min(work_loads, key=work_loads.get)
faster_client = multi_clients[_index]
work_loads[_index] += 1
if Var.MULTI_CLIENT:
logging.info(f"Client {_index} is now serving {request.remote}")
logging.info(f"Client {index} is now serving {request.remote}")


if faster_client in class_cache:
tg_connect = class_cache[faster_client]
logging.debug(f"Using cached ByteStreamer object for client {_index}")
if client in class_cache:
tg_connect = class_cache[client]
logging.debug(f"Using cached ByteStreamer object for client {index}")
else:
logging.debug(f"Creating new ByteStreamer object for client {_index}")
tg_connect = utils.ByteStreamer(faster_client)
class_cache[faster_client] = tg_connect

media_msg = await tg_connect.get_media_msg(message_id)

if utils.get_unique_id(media_msg) != secure_hash:
work_loads[_index] -= 1
raise web.HTTPForbidden

logging.debug(f"Creating new ByteStreamer object for client {index}")
tg_connect = utils.ByteStreamer(client)
class_cache[client] = tg_connect
logging.debug("before calling get_file_properties")
file_id, file_unique_id = await tg_connect.get_file_properties(message_id)
logging.debug("after calling get_file_properties")

file_properties = await tg_connect.get_file_properties(media_msg)
file_size = file_properties.file_size
if file_unique_id.encode()[:6] != secure_hash:
logging.debug(f"Invalid hash for message with ID {message_id}")
raise InvalidHash

file_size = file_id.file_size

if range_header:
from_bytes, until_bytes = range_header.replace("bytes=", "").split("-")
Expand All @@ -97,11 +102,11 @@ async def media_streamer(request: web.Request, message_id: int, secure_hash: str
last_part_cut = (until_bytes % new_chunk_size) + 1
part_count = math.ceil(req_length / new_chunk_size)
body = tg_connect.yield_file(
media_msg, offset, first_part_cut, last_part_cut, part_count, new_chunk_size
message_id, offset, first_part_cut, last_part_cut, part_count, new_chunk_size
)

mime_type = file_properties.mime_type
file_name = file_properties.file_name
mime_type = file_id.mime_type
file_name = file_id.file_name
disposition = "attachment"
if mime_type:
if not file_name:
Expand All @@ -111,7 +116,7 @@ async def media_streamer(request: web.Request, message_id: int, secure_hash: str
file_name = f"{secrets.token_hex(2)}.unknown"
else:
if file_name:
mime_type = mimetypes.guess_type(file_properties.file_name)
mime_type = mimetypes.guess_type(file_id.file_name)
else:
mime_type = "application/octet-stream"
file_name = f"{secrets.token_hex(2)}.unknown"
Expand All @@ -122,7 +127,6 @@ async def media_streamer(request: web.Request, message_id: int, secure_hash: str
body=body,
headers={
"Content-Type": f"{mime_type}",
"Content-Length": f"{file_size}",
"Range": f"bytes={from_bytes}-{until_bytes}",
"Content-Range": f"bytes {from_bytes}-{until_bytes}/{file_size}",
"Content-Disposition": f'{disposition}; filename="{file_name}"',
Expand All @@ -133,5 +137,4 @@ async def media_streamer(request: web.Request, message_id: int, secure_hash: str
if return_resp.status == 200:
return_resp.headers.add("Content-Length", str(file_size))

work_loads[_index] -= 1
return return_resp
2 changes: 1 addition & 1 deletion WebStreamer/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# This file is a part of TG-FileStreamBot
# Coding : Jyothis Jayanth [@EverythingSuckz]

from .file_id import get_unique_id
from .keepalive import ping_server
from .config_parser import TokenParser
from .time_format import get_readable_time
from .file_properties import get_hash, get_name
from .custom_dl import ByteStreamer, offset_fix, chunk_size
Loading

0 comments on commit 977dcb0

Please sign in to comment.