Skip to content

Commit

Permalink
random fix
Browse files Browse the repository at this point in the history
  • Loading branch information
DeekshithSH committed May 9, 2024
1 parent aaec51d commit 8550bd8
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 65 deletions.
34 changes: 9 additions & 25 deletions WebStreamer/server/stream_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,36 +74,20 @@ async def media_streamer(request: web.Request, db_id: str):

file_size = file_id.file_size

if range_header:
from_bytes, until_bytes = range_header.replace("bytes=", "").split("-")
from_bytes = int(from_bytes)
until_bytes = int(until_bytes) if until_bytes else file_size - 1
else:
from_bytes = request.http_range.start or 0
until_bytes = (request.http_range.stop or file_size) - 1

logging.debug(f"from_bytes: {from_bytes} until_bytes: {until_bytes}")
if from_bytes <10 and until_bytes >200:
await db.increment_dl_count(file_id.org_id)

if (until_bytes > file_size) or (from_bytes < 0) or (until_bytes < from_bytes):
try:
offset = request.http_range.start or 0
limit = request.http_range.stop or file_size
if (limit > file_size) or (offset < 0) or (limit < offset):
raise ValueError("range not in acceptable format")
except ValueError:
return web.Response(
status=416,
body="416: Range not satisfiable",
headers={"Content-Range": f"bytes */{file_size}"},
)

chunk_size = 1024 * 1024
until_bytes = min(until_bytes, file_size - 1)

offset = from_bytes - (from_bytes % chunk_size)
first_part_cut = from_bytes - offset
last_part_cut = until_bytes % chunk_size + 1

req_length = until_bytes - from_bytes + 1
part_count = math.ceil(until_bytes / chunk_size) - math.floor(offset / chunk_size)
body = tg_connect.yield_file(
file_id, offset, first_part_cut, last_part_cut, part_count, chunk_size, multi_clients
file_id, offset, limit, multi_clients
)

mime_type = file_id.mime_type
Expand All @@ -121,8 +105,8 @@ async def media_streamer(request: web.Request, db_id: str):
body=body,
headers={
"Content-Type": f"{mime_type}",
"Content-Range": f"bytes {from_bytes}-{until_bytes}/{file_size}",
"Content-Length": str(req_length),
"Content-Range": f"bytes {offset}-{limit}/{file_size}",
"Content-Length": str(limit - offset),
"Content-Disposition": f'{disposition}; filename="{file_name}"',
"Accept-Ranges": "bytes",
},
Expand Down
51 changes: 11 additions & 40 deletions WebStreamer/utils/custom_dl.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
import asyncio
import logging
# import heapq
import math
from typing import Dict, Union
from WebStreamer.bot import work_loads
from WebStreamer.bot import StreamBot, work_loads
from pyrogram import Client, utils, raw
from .file_properties import get_file_ids
from pyrogram.session import Session, Auth
Expand Down Expand Up @@ -164,10 +165,7 @@ async def yield_file(
self,
file_id: FileId,
offset: int,
first_part_cut: int,
last_part_cut: int,
part_count: int,
chunk_size: int,
limit: int,
multi_clients
) -> Union[str, None]:
"""
Expand All @@ -178,47 +176,20 @@ async def yield_file(
client = multi_clients[file_id.index]
work_loads[file_id.index] += 1
logging.debug(f"Starting to yielding file with client {file_id.index}.")
media_session = await self.generate_media_session(client, file_id)
try:
if offset < 0:
if file_id.file_size == 0:
raise ValueError("Negative offsets are not supported for file ids, pass a Message object instead")

current_part = 1
chunks = math.ceil(file_id.file_size / 1024 / 1024)
offset += chunks

location = await self.get_location(file_id)
async for chunk in client.get_file(file_id, file_id.file_size, limit, offset):
yield chunk

try:
r = await media_session.invoke(
raw.functions.upload.GetFile(
location=location, offset=offset, limit=chunk_size
),
)
if isinstance(r, raw.types.upload.File):
while True:
chunk = r.bytes
if not chunk:
break
elif part_count == 1:
yield chunk[first_part_cut:last_part_cut]
elif current_part == 1:
yield chunk[first_part_cut:]
elif current_part == part_count:
yield chunk[:last_part_cut]
else:
yield chunk

current_part += 1
offset += chunk_size

if current_part > part_count:
break

r = await media_session.invoke(
raw.functions.upload.GetFile(
location=location, offset=offset, limit=chunk_size
),
)
except (TimeoutError, AttributeError):
pass
finally:
logging.debug(f"Finished yielding file with {current_part} parts.")
work_loads[file_id.index] -= 1


Expand Down

0 comments on commit 8550bd8

Please sign in to comment.