Skip to content

Commit

Permalink
apply max_form_memory_size another level up in the parser
Browse files Browse the repository at this point in the history
  • Loading branch information
davidism committed Oct 25, 2024
1 parent 2fc6d4f commit 5e78c41
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 1 deletion.
6 changes: 6 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
0.19.7
------

* Security Fix how ``max_form_memory_size`` is applied when parsing large
non-file fields. https://github.com/advisories/GHSA-q34m-jh98-gwm2

0.19.6 2024-05-19
-----------------

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "Quart"
version = "0.19.6"
version = "0.19.7.dev"
description = "A Python ASGI web microframework with the same API as Flask"
authors = ["pgjones <philip.graham.jones@googlemail.com>"]
classifiers = [
Expand Down
10 changes: 10 additions & 0 deletions src/quart/formparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from urllib.parse import parse_qsl

from werkzeug.datastructures import Headers, MultiDict
from werkzeug.exceptions import RequestEntityTooLarge
from werkzeug.formparser import default_stream_factory
from werkzeug.http import parse_options_header
from werkzeug.sansio.multipart import Data, Epilogue, Field, File, MultipartDecoder, NeedData
Expand Down Expand Up @@ -173,19 +174,28 @@ async def parse(
files = []

current_part: Field | File
field_size: int | None = None
async for data in body:
parser.receive_data(data)
event = parser.next_event()
while not isinstance(event, (Epilogue, NeedData)):
if isinstance(event, Field):
current_part = event
field_size = 0
container = []
_write = container.append
elif isinstance(event, File):
current_part = event
field_size = None
container = self.start_file_streaming(event, content_length)
_write = container.write
elif isinstance(event, Data):
if field_size is not None:
field_size += len(event.data)

if field_size > self.max_form_memory_size:
raise RequestEntityTooLarge()

_write(event.data)
if not event.more_data:
if isinstance(current_part, Field):
Expand Down
21 changes: 21 additions & 0 deletions tests/test_formparser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from __future__ import annotations

import pytest
from werkzeug.exceptions import RequestEntityTooLarge

from quart.formparser import MultiPartParser
from quart.wrappers.request import Body


async def test_multipart_max_form_memory_size() -> None:
"""max_form_memory_size is tracked across multiple data events."""
data = b"--bound\r\nContent-Disposition: form-field; name=a\r\n\r\n"
data += b"a" * 15 + b"\r\n--bound--"
body = Body(None, None)
body.set_result(data)
# The buffer size is less than the max size, so multiple data events will be
# returned. The field size is greater than the max.
parser = MultiPartParser(max_form_memory_size=10, buffer_size=5)

with pytest.raises(RequestEntityTooLarge):
await parser.parse(body, b"bound", 0)

0 comments on commit 5e78c41

Please sign in to comment.