Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
kwichighspeedbot authored Oct 17, 2022
1 parent 47f7892 commit c52baea
Show file tree
Hide file tree
Showing 12 changed files with 484 additions and 0 deletions.
1 change: 1 addition & 0 deletions Adarsh/utils/@Aadhi000
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

1 change: 1 addition & 0 deletions Adarsh/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# (c) adarsh-goel
22 changes: 22 additions & 0 deletions Adarsh/utils/broadcast_helper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# (c) adarsh-goel

import asyncio
import traceback
from pyrogram.errors import FloodWait, InputUserDeactivated, UserIsBlocked, PeerIdInvalid


async def send_msg(user_id, message):
try:
await message.forward(chat_id=user_id)
return 200, None
except FloodWait as e:
await asyncio.sleep(e.x)
return send_msg(user_id, message)
except InputUserDeactivated:
return 400, f"{user_id} : deactivated\n"
except UserIsBlocked:
return 400, f"{user_id} : blocked the bot\n"
except PeerIdInvalid:
return 400, f"{user_id} : user id invalid\n"
except Exception as e:
return 500, f"{user_id} : {traceback.format_exc()}\n"
19 changes: 19 additions & 0 deletions Adarsh/utils/config_parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from os import environ
from typing import Dict, Optional


class TokenParser:
def __init__(self, config_file: Optional[str] = None):
self.tokens = {}
self.config_file = config_file

def parse_from_env(self) -> Dict[int, str]:
self.tokens = dict(
(c + 1, t)
for c, (_, t) in enumerate(
filter(
lambda n: n[0].startswith("MULTI_TOKEN"), sorted(environ.items())
)
)
)
return self.tokens
237 changes: 237 additions & 0 deletions Adarsh/utils/custom_dl.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,237 @@
import math
import asyncio
import logging
from Adarsh.vars import Var
from typing import Dict, Union
from Adarsh.bot import work_loads
from pyrogram import Client, utils, raw
from .file_properties import get_file_ids
from pyrogram.session import Session, Auth
from pyrogram.errors import AuthBytesInvalid
from Adarsh.server.exceptions import FIleNotFound
from pyrogram.file_id import FileId, FileType, ThumbnailSource


async def chunk_size(length):
return 2 ** max(min(math.ceil(math.log2(length / 1024)), 10), 2) * 1024


async def offset_fix(offset, chunksize):
offset -= offset % chunksize
return offset


class ByteStreamer:
def __init__(self, client: Client):
"""A custom class that holds the cache of a specific client and class functions.
attributes:
client: the client that the cache is for.
cached_file_ids: a dict of cached file IDs.
cached_file_properties: a dict of cached file properties.
functions:
generate_file_properties: returns the properties for a media of a specific message contained in Tuple.
generate_media_session: returns the media session for the DC that contains the media file.
yield_file: yield a file from telegram servers for streaming.
This is a modified version of the <https://github.com/eyaadh/megadlbot_oss/blob/master/mega/telegram/utils/custom_download.py>
Thanks to Eyaadh <https://github.com/eyaadh>
"""
self.clean_timer = 30 * 60
self.client: Client = client
self.cached_file_ids: Dict[int, FileId] = {}
asyncio.create_task(self.clean_cache())

async def get_file_properties(self, id: int) -> FileId:
"""
Returns the properties of a media of a specific message in a FIleId class.
if the properties are cached, then it'll return the cached results.
or it'll generate the properties from the Message ID and cache them.
"""
if id not in self.cached_file_ids:
await self.generate_file_properties(id)
logging.debug(f"Cached file properties for message with ID {id}")
return self.cached_file_ids[id]

async def generate_file_properties(self, id: int) -> FileId:
"""
Generates the properties of a media file on a specific message.
returns ths properties in a FIleId class.
"""
file_id = await get_file_ids(self.client, Var.BIN_CHANNEL, id)
logging.debug(f"Generated file ID and Unique ID for message with ID {id}")
if not file_id:
logging.debug(f"Message with ID {id} not found")
raise FIleNotFound
self.cached_file_ids[id] = file_id
logging.debug(f"Cached media message with ID {id}")
return self.cached_file_ids[id]

async def generate_media_session(self, client: Client, file_id: FileId) -> Session:
"""
Generates the media session for the DC that contains the media file.
This is required for getting the bytes from Telegram servers.
"""

media_session = client.media_sessions.get(file_id.dc_id, None)

if media_session is None:
if file_id.dc_id != await client.storage.dc_id():
media_session = Session(
client,
file_id.dc_id,
await Auth(
client, file_id.dc_id, await client.storage.test_mode()
).create(),
await client.storage.test_mode(),
is_media=True,
)
await media_session.start()

for _ in range(6):
exported_auth = await client.invoke(
raw.functions.auth.ExportAuthorization(dc_id=file_id.dc_id)
)

try:
await media_session.send(
raw.functions.auth.ImportAuthorization(
id=exported_auth.id, bytes=exported_auth.bytes
)
)
break
except AuthBytesInvalid:
logging.debug(
f"Invalid authorization bytes for DC {file_id.dc_id}"
)
continue
else:
await media_session.stop()
raise AuthBytesInvalid
else:
media_session = Session(
client,
file_id.dc_id,
await client.storage.auth_key(),
await client.storage.test_mode(),
is_media=True,
)
await media_session.start()
logging.debug(f"Created media session for DC {file_id.dc_id}")
client.media_sessions[file_id.dc_id] = media_session
else:
logging.debug(f"Using cached media session for DC {file_id.dc_id}")
return media_session


@staticmethod
async def get_location(file_id: FileId) -> Union[raw.types.InputPhotoFileLocation,
raw.types.InputDocumentFileLocation,
raw.types.InputPeerPhotoFileLocation,]:
"""
Returns the file location for the media file.
"""
file_type = file_id.file_type

if file_type == FileType.CHAT_PHOTO:
if file_id.chat_id > 0:
peer = raw.types.InputPeerUser(
user_id=file_id.chat_id, access_hash=file_id.chat_access_hash
)
else:
if file_id.chat_access_hash == 0:
peer = raw.types.InputPeerChat(chat_id=-file_id.chat_id)
else:
peer = raw.types.InputPeerChannel(
channel_id=utils.get_channel_id(file_id.chat_id),
access_hash=file_id.chat_access_hash,
)

location = raw.types.InputPeerPhotoFileLocation(
peer=peer,
volume_id=file_id.volume_id,
local_id=file_id.local_id,
big=file_id.thumbnail_source == ThumbnailSource.CHAT_PHOTO_BIG,
)
elif file_type == FileType.PHOTO:
location = raw.types.InputPhotoFileLocation(
id=file_id.media_id,
access_hash=file_id.access_hash,
file_reference=file_id.file_reference,
thumb_size=file_id.thumbnail_size,
)
else:
location = raw.types.InputDocumentFileLocation(
id=file_id.media_id,
access_hash=file_id.access_hash,
file_reference=file_id.file_reference,
thumb_size=file_id.thumbnail_size,
)
return location

async def yield_file(
self,
file_id: FileId,
index: int,
offset: int,
first_part_cut: int,
last_part_cut: int,
part_count: int,
chunk_size: int,
) -> Union[str, None]:
"""
Custom generator that yields the bytes of the media file.
Modded from <https://github.com/eyaadh/megadlbot_oss/blob/master/mega/telegram/utils/custom_download.py#L20>
Thanks to Eyaadh <https://github.com/eyaadh>
"""
client = self.client
work_loads[index] += 1
logging.debug(f"Starting to yielding file with client {index}.")
media_session = await self.generate_media_session(client, file_id)

current_part = 1

location = await self.get_location(file_id)

try:
r = await media_session.send(
raw.functions.upload.GetFile(
location=location, offset=offset, limit=chunk_size
),
)
if isinstance(r, raw.types.upload.File):
while current_part <= part_count:
chunk = r.bytes
if not chunk:
break
offset += chunk_size
if part_count == 1:
yield chunk[first_part_cut:last_part_cut]
break
if current_part == 1:
yield chunk[first_part_cut:]
if 1 < current_part <= part_count:
yield chunk

r = await media_session.send(
raw.functions.upload.GetFile(
location=location, offset=offset, limit=chunk_size
),
)

current_part += 1
except (TimeoutError, AttributeError):
pass
finally:
logging.debug("Finished yielding file with {current_part} parts.")
work_loads[index] -= 1


async def clean_cache(self) -> None:
"""
function to clean the cache to reduce memory usage
"""
while True:
await asyncio.sleep(self.clean_timer)
self.cached_file_ids.clear()
logging.debug("Cleaned the cache")
43 changes: 43 additions & 0 deletions Adarsh/utils/database.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#(c) Adarsh-Goel
import datetime
import motor.motor_asyncio


class Database:
def __init__(self, uri, database_name):
self._client = motor.motor_asyncio.AsyncIOMotorClient(uri)
self.db = self._client[database_name]
self.col = self.db.users

def new_user(self, id):
return dict(
id=id,
join_date=datetime.date.today().isoformat()
)

async def add_user(self, id):
user = self.new_user(id)
await self.col.insert_one(user)

async def add_user_pass(self, id, ag_pass):
await self.add_user(int(id))
await self.col.update_one({'id': int(id)}, {'$set': {'ag_p': ag_pass}})

async def get_user_pass(self, id):
user_pass = await self.col.find_one({'id': int(id)})
return user_pass.get("ag_p", None) if user_pass else None

async def is_user_exist(self, id):
user = await self.col.find_one({'id': int(id)})
return True if user else False

async def total_users_count(self):
count = await self.col.count_documents({})
return count

async def get_all_users(self):
all_users = self.col.find({})
return all_users

async def delete_user(self, user_id):
await self.col.delete_many({'id': int(user_id)})
60 changes: 60 additions & 0 deletions Adarsh/utils/file_properties.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from pyrogram import Client
from typing import Any, Optional
from pyrogram.types import Message
from pyrogram.file_id import FileId
from pyrogram.raw.types.messages import Messages
from Adarsh.server.exceptions import FIleNotFound


async def parse_file_id(message: "Message") -> Optional[FileId]:
media = get_media_from_message(message)
if media:
return FileId.decode(media.file_id)

async def parse_file_unique_id(message: "Messages") -> Optional[str]:
media = get_media_from_message(message)
if media:
return media.file_unique_id

async def get_file_ids(client: Client, chat_id: int, id: int) -> Optional[FileId]:
message = await client.get_messages(chat_id, id)
if message.empty:
raise FIleNotFound
media = get_media_from_message(message)
file_unique_id = await parse_file_unique_id(message)
file_id = await parse_file_id(message)
setattr(file_id, "file_size", getattr(media, "file_size", 0))
setattr(file_id, "mime_type", getattr(media, "mime_type", ""))
setattr(file_id, "file_name", getattr(media, "file_name", ""))
setattr(file_id, "unique_id", file_unique_id)
return file_id

def get_media_from_message(message: "Message") -> Any:
media_types = (
"audio",
"document",
"photo",
"sticker",
"animation",
"video",
"voice",
"video_note",
)
for attr in media_types:
media = getattr(message, attr, None)
if media:
return media


def get_hash(media_msg: Message) -> str:
media = get_media_from_message(media_msg)
return getattr(media, "file_unique_id", "")[:6]

def get_name(media_msg: Message) -> str:
media = get_media_from_message(media_msg)
file_name=getattr(media, "file_name", "")
return file_name if file_name else ""

def get_media_file_size(m):
media = get_media_from_message(m)
return getattr(media, "file_size", 0)
4 changes: 4 additions & 0 deletions Adarsh/utils/file_size.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# (c) adarsh-goel
def human_size(bytes, units=[' bytes','KB','MB','GB','TB', 'PB', 'EB']):
""" Returns a human readable string representation of bytes """
return str(bytes) + units[0] if int(bytes) < 1024 else human_size(int(bytes)>>10, units[1:])
Loading

0 comments on commit c52baea

Please sign in to comment.