@@ -104,16 +104,15 @@ async def test_write_large_payload_deflate_compression_data_in_eof(
104
104
assert transport .write .called # type: ignore[attr-defined]
105
105
chunks = [c [1 ][0 ] for c in list (transport .write .mock_calls )] # type: ignore[attr-defined]
106
106
transport .write .reset_mock () # type: ignore[attr-defined]
107
- assert not transport .writelines .called # type: ignore[attr-defined]
108
107
109
108
# This payload compresses to 20447 bytes
110
109
payload = b"" .join (
111
110
[bytes ((* range (0 , i ), * range (i , 0 , - 1 ))) for i in range (255 ) for _ in range (64 )]
112
111
)
113
112
await msg .write_eof (payload )
114
- assert not transport .write .called # type: ignore[attr-defined]
115
- assert transport . writelines . called # type: ignore[attr-defined]
116
- chunks . extend ( transport . writelines . mock_calls [ 0 ][ 1 ][ 0 ]) # type: ignore[attr-defined]
113
+ chunks . extend ([ c [ 1 ][ 0 ] for c in list ( transport .write .mock_calls )]) # type: ignore[attr-defined]
114
+
115
+ assert all ( chunks )
117
116
content = b"" .join (chunks )
118
117
assert zlib .decompress (content ) == (b"data" * 4096 ) + payload
119
118
@@ -180,7 +179,7 @@ async def test_write_payload_deflate_compression_chunked(
180
179
await msg .write (b"data" )
181
180
await msg .write_eof ()
182
181
183
- chunks = [b"" . join ( c [1 ][0 ]) for c in list (transport .writelines .mock_calls )] # type: ignore[attr-defined]
182
+ chunks = [c [1 ][0 ] for c in list (transport .write .mock_calls )] # type: ignore[attr-defined]
184
183
assert all (chunks )
185
184
content = b"" .join (chunks )
186
185
assert content == expected
@@ -216,7 +215,7 @@ async def test_write_payload_deflate_compression_chunked_data_in_eof(
216
215
await msg .write (b"data" )
217
216
await msg .write_eof (b"end" )
218
217
219
- chunks = [b"" . join ( c [1 ][0 ]) for c in list (transport .writelines .mock_calls )] # type: ignore[attr-defined]
218
+ chunks = [c [1 ][0 ] for c in list (transport .write .mock_calls )] # type: ignore[attr-defined]
220
219
assert all (chunks )
221
220
content = b"" .join (chunks )
222
221
assert content == expected
@@ -235,16 +234,16 @@ async def test_write_large_payload_deflate_compression_chunked_data_in_eof(
235
234
# This payload compresses to 1111 bytes
236
235
payload = b"" .join ([bytes ((* range (0 , i ), * range (i , 0 , - 1 ))) for i in range (255 )])
237
236
await msg .write_eof (payload )
238
- assert not transport .write .called # type: ignore[attr-defined]
239
237
240
- chunks = []
241
- for write_lines_call in transport .writelines .mock_calls : # type: ignore[attr-defined]
242
- chunked_payload = list (write_lines_call [1 ][0 ])[1 :]
243
- chunked_payload .pop ()
244
- chunks .extend (chunked_payload )
238
+ compressed = []
239
+ chunks = [c [1 ][0 ] for c in list (transport .write .mock_calls )] # type: ignore[attr-defined]
240
+ chunked_body = b"" .join (chunks )
241
+ split_body = chunked_body .split (b"\r \n " )
242
+ while split_body :
243
+ if split_body .pop (0 ):
244
+ compressed .append (split_body .pop (0 ))
245
245
246
- assert all (chunks )
247
- content = b"" .join (chunks )
246
+ content = b"" .join (compressed )
248
247
assert zlib .decompress (content ) == (b"data" * 4096 ) + payload
249
248
250
249
0 commit comments