Skip to content

Commit b8139f9

Browse files
Kludexfrostming
andauthored
Consider FileResponse.chunk_size when handling multiple ranges (#2703)
* Take in consideration the `FileResponse.chunk_size` on multiple ranges * Update starlette/responses.py * Update starlette/responses.py * Update starlette/responses.py Co-authored-by: Frost Ming <[email protected]> --------- Co-authored-by: Frost Ming <[email protected]>
1 parent 4fbf766 commit b8139f9

File tree

4 files changed

+66
-14
lines changed

4 files changed

+66
-14
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,4 +103,5 @@ exclude_lines = [
103103
"pragma: nocover",
104104
"if typing.TYPE_CHECKING:",
105105
"@typing.overload",
106+
"raise NotImplementedError",
106107
]

starlette/responses.py

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -374,13 +374,7 @@ async def _handle_simple(self, send: Send, send_header_only: bool) -> None:
374374
while more_body:
375375
chunk = await file.read(self.chunk_size)
376376
more_body = len(chunk) == self.chunk_size
377-
await send(
378-
{
379-
"type": "http.response.body",
380-
"body": chunk,
381-
"more_body": more_body,
382-
}
383-
)
377+
await send({"type": "http.response.body", "body": chunk, "more_body": more_body})
384378

385379
async def _handle_single_range(
386380
self, send: Send, start: int, end: int, file_size: int, send_header_only: bool
@@ -419,10 +413,12 @@ async def _handle_multiple_ranges(
419413
else:
420414
async with await anyio.open_file(self.path, mode="rb") as file:
421415
for start, end in ranges:
422-
await file.seek(start)
423-
chunk = await file.read(min(self.chunk_size, end - start))
424416
await send({"type": "http.response.body", "body": header_generator(start, end), "more_body": True})
425-
await send({"type": "http.response.body", "body": chunk, "more_body": True})
417+
await file.seek(start)
418+
while start < end:
419+
chunk = await file.read(min(self.chunk_size, end - start))
420+
start += len(chunk)
421+
await send({"type": "http.response.body", "body": chunk, "more_body": True})
426422
await send({"type": "http.response.body", "body": b"\n", "more_body": True})
427423
await send(
428424
{

tests/middleware/test_base.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,7 @@ async def passthrough(
289289
}
290290

291291
async def receive() -> Message:
292-
raise NotImplementedError("Should not be called!") # pragma: no cover
292+
raise NotImplementedError("Should not be called!")
293293

294294
async def send(message: Message) -> None:
295295
if message["type"] == "http.response.body":
@@ -330,7 +330,7 @@ async def passthrough(request: Request, call_next: RequestResponseEndpoint) -> R
330330
}
331331

332332
async def receive() -> Message:
333-
raise NotImplementedError("Should not be called!") # pragma: no cover
333+
raise NotImplementedError("Should not be called!")
334334

335335
async def send(message: Message) -> None:
336336
if message["type"] == "http.response.body":
@@ -403,7 +403,7 @@ async def passthrough(
403403
}
404404

405405
async def receive() -> Message:
406-
raise NotImplementedError("Should not be called!") # pragma: no cover
406+
raise NotImplementedError("Should not be called!")
407407

408408
async def send(message: Message) -> None:
409409
if message["type"] == "http.response.body":

tests/test_responses.py

Lines changed: 56 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import time
55
from http.cookies import SimpleCookie
66
from pathlib import Path
7-
from typing import AsyncIterator, Iterator
7+
from typing import Any, AsyncIterator, Iterator
88

99
import anyio
1010
import pytest
@@ -682,3 +682,58 @@ def test_file_response_insert_ranges(file_response_client: TestClient) -> None:
682682
"",
683683
f"--{boundary}--",
684684
]
685+
686+
687+
@pytest.mark.anyio
688+
async def test_file_response_multi_small_chunk_size(readme_file: Path) -> None:
689+
class SmallChunkSizeFileResponse(FileResponse):
690+
chunk_size = 10
691+
692+
app = SmallChunkSizeFileResponse(path=str(readme_file))
693+
694+
received_chunks: list[bytes] = []
695+
start_message: dict[str, Any] = {}
696+
697+
async def receive() -> Message:
698+
raise NotImplementedError("Should not be called!")
699+
700+
async def send(message: Message) -> None:
701+
if message["type"] == "http.response.start":
702+
start_message.update(message)
703+
elif message["type"] == "http.response.body":
704+
received_chunks.append(message["body"])
705+
706+
await app({"type": "http", "method": "get", "headers": [(b"range", b"bytes=0-15,20-35,35-50")]}, receive, send)
707+
assert start_message["status"] == 206
708+
709+
headers = Headers(raw=start_message["headers"])
710+
assert headers.get("content-type") == "text/plain; charset=utf-8"
711+
assert headers.get("accept-ranges") == "bytes"
712+
assert "content-length" in headers
713+
assert "last-modified" in headers
714+
assert "etag" in headers
715+
assert headers["content-range"].startswith("multipart/byteranges; boundary=")
716+
boundary = headers["content-range"].split("boundary=")[1]
717+
718+
assert received_chunks == [
719+
# Send the part headers.
720+
f"--{boundary}\nContent-Type: text/plain; charset=utf-8\nContent-Range: bytes 0-15/526\n\n".encode(),
721+
# Send the first chunk (10 bytes).
722+
b"# B\xc3\xa1iZ\xc3\xa9\n",
723+
# Send the second chunk (6 bytes).
724+
b"\nPower",
725+
# Send the new line to separate the parts.
726+
b"\n",
727+
# Send the part headers. We merge the ranges 20-35 and 35-50 into a single part.
728+
f"--{boundary}\nContent-Type: text/plain; charset=utf-8\nContent-Range: bytes 20-50/526\n\n".encode(),
729+
# Send the first chunk (10 bytes).
730+
b"and exquis",
731+
# Send the second chunk (10 bytes).
732+
b"ite WSGI/A",
733+
# Send the third chunk (10 bytes).
734+
b"SGI framew",
735+
# Send the last chunk (1 byte).
736+
b"o",
737+
b"\n",
738+
f"\n--{boundary}--\n".encode(),
739+
]

0 commit comments

Comments
 (0)