Sign Up
Log In
Log In
or
Sign Up
Places
All Projects
Status Monitor
Collapse sidebar
SUSE:SLE-15-SP4:Update
python-Twisted.34943
CVE-2024-41671.patch
Overview
Repositories
Revisions
Requests
Users
Attributes
Meta
File CVE-2024-41671.patch of Package python-Twisted.34943
Index: Twisted-19.10.0/src/twisted/web/http.py =================================================================== --- Twisted-19.10.0.orig/src/twisted/web/http.py +++ Twisted-19.10.0/src/twisted/web/http.py @@ -1904,6 +1904,9 @@ class _ChunkedTransferDecoder(object): self.finishCallback = finishCallback self._buffer = bytearray() self._start = 0 + self._trailerHeaders = [] + self._maxTrailerHeadersSize = 2**16 + self._receivedTrailerHeadersSize = 0 def _dataReceived_CHUNK_LENGTH(self): @@ -1964,19 +1967,43 @@ class _ChunkedTransferDecoder(object): def _dataReceived_TRAILER(self): - if len(self._buffer) < 2: + eolIndex = self._buffer.find(b"\r\n", self._start) + + if eolIndex == -1: + # Still no end of network line marker found. + # + # Check if we've run up against the trailer size limit: if the next + # read contains the terminating CRLF then we'll have this many bytes + # of trailers (including the CRLFs). + minTrailerSize = ( + self._receivedTrailerHeadersSize + + len(self._buffer) + + (1 if self._buffer.endswith(b"\r") else 2) + ) + if minTrailerSize > self._maxTrailerHeadersSize: + raise _MalformedChunkedDataError("Trailer headers data is too long.") + # Continue processing more data. return False - if not self._buffer.startswith(b"\r\n"): - raise _MalformedChunkedDataError("Chunk did not end with CRLF") + if eolIndex > 0: + # A trailer header was detected. + self._trailerHeaders.append(self._buffer[0:eolIndex]) + del self._buffer[0 : eolIndex + 2] + self._start = 0 + self._receivedTrailerHeadersSize += eolIndex + 2 + if self._receivedTrailerHeadersSize > self._maxTrailerHeadersSize: + raise _MalformedChunkedDataError("Trailer headers data is too long.") + return True + + # eolIndex in this part of code is equal to 0 data = memoryview(self._buffer)[2:].tobytes() + del self._buffer[:] self.state = "FINISHED" self.finishCallback(data) return False - def _dataReceived_BODY(self): if len(self._buffer) >= self.length: chunk = memoryview(self._buffer)[: self.length].tobytes() @@ -2287,8 +2314,8 @@ class HTTPChannel(basic.LineReceiver, po def _finishRequestBody(self, data): - self.allContentReceived() self._dataBuffer.append(data) + self.allContentReceived() def _maybeChooseTransferDecoder(self, header, data): Index: Twisted-19.10.0/src/twisted/web/test/test_http.py =================================================================== --- Twisted-19.10.0.orig/src/twisted/web/test/test_http.py +++ Twisted-19.10.0/src/twisted/web/test/test_http.py @@ -590,29 +590,41 @@ class PipeliningBodyTests(unittest.TestC b"POST / HTTP/1.1\r\n" b"Content-Length: 10\r\n" b"\r\n" - b"0123456789POST / HTTP/1.1\r\n" - b"Content-Length: 10\r\n" - b"\r\n" b"0123456789" + # Chunk encoded request. + b"POST / HTTP/1.1\r\n" + b"Transfer-Encoding: chunked\r\n" + b"\r\n" + b"a\r\n" + b"0123456789\r\n" + b"0\r\n" + b"\r\n" ) expectedResponses = [ - (b"HTTP/1.1 200 OK", - b"Request: /", - b"Command: POST", - b"Version: HTTP/1.1", - b"Content-Length: 21", - b"'''\n10\n0123456789'''\n"), - (b"HTTP/1.1 200 OK", - b"Request: /", - b"Command: POST", - b"Version: HTTP/1.1", - b"Content-Length: 21", - b"'''\n10\n0123456789'''\n")] - - def test_noPipelining(self): - """ - Test that pipelined requests get buffered, not processed in parallel. + ( + b"HTTP/1.1 200 OK", + b"Request: /", + b"Command: POST", + b"Version: HTTP/1.1", + b"Content-Length: 21", + b"'''\n10\n0123456789'''\n", + ), + ( + b"HTTP/1.1 200 OK", + b"Request: /", + b"Command: POST", + b"Version: HTTP/1.1", + b"Content-Length: 23", + b"'''\nNone\n0123456789'''\n", + ), + ] + + def test_stepwiseTinyTube(self): + """ + Imitate a slow connection that delivers one byte at a time. + The request handler (L{DelayedHTTPHandler}) is puppeted to + step through the handling of each request. """ b = StringTransport() a = http.HTTPChannel() @@ -621,10 +633,9 @@ class PipeliningBodyTests(unittest.TestC # one byte at a time, to stress it. for byte in iterbytes(self.requests): a.dataReceived(byte) - value = b.value() # So far only one request should have been dispatched. - self.assertEqual(value, b'') + self.assertEqual(b.value(), b"") self.assertEqual(1, len(a.requests)) # Now, process each request one at a time. @@ -633,9 +644,95 @@ class PipeliningBodyTests(unittest.TestC request = a.requests[0].original request.delayedProcess() - value = b.value() - self.assertResponseEquals(value, self.expectedResponses) + self.assertResponseEquals(b.value(), self.expectedResponses) + + def test_stepwiseDumpTruck(self): + """ + Imitate a fast connection where several pipelined + requests arrive in a single read. The request handler + (L{DelayedHTTPHandler}) is puppeted to step through the + handling of each request. + """ + b = StringTransport() + a = http.HTTPChannel() + a.requestFactory = DelayedHTTPHandlerProxy + a.makeConnection(b) + + a.dataReceived(self.requests) + + # So far only one request should have been dispatched. + self.assertEqual(b.value(), b"") + self.assertEqual(1, len(a.requests)) + + # Now, process each request one at a time. + while a.requests: + self.assertEqual(1, len(a.requests)) + request = a.requests[0].original + request.delayedProcess() + + self.assertResponseEquals(b.value(), self.expectedResponses) + + def test_immediateTinyTube(self): + """ + Imitate a slow connection that delivers one byte at a time. + + (L{DummyHTTPHandler}) immediately responds, but no more + than one + """ + b = StringTransport() + a = http.HTTPChannel() + a.requestFactory = DummyHTTPHandlerProxy # "sync" + a.makeConnection(b) + + # one byte at a time, to stress it. + for byte in iterbytes(self.requests): + a.dataReceived(byte) + # There is never more than one request dispatched at a time: + self.assertLessEqual(len(a.requests), 1) + + self.assertResponseEquals(b.value(), self.expectedResponses) + + def test_immediateDumpTruck(self): + """ + Imitate a fast connection where several pipelined + requests arrive in a single read. The request handler + (L{DummyHTTPHandler}) immediately responds. + + This doesn't check the at-most-one pending request + invariant but exercises otherwise uncovered code paths. + See GHSA-c8m8-j448-xjx7. + """ + b = StringTransport() + a = http.HTTPChannel() + a.requestFactory = DummyHTTPHandlerProxy + a.makeConnection(b) + + # All bytes at once to ensure there's stuff to buffer. + a.dataReceived(self.requests) + + self.assertResponseEquals(b.value(), self.expectedResponses) + + def test_immediateABiggerTruck(self): + """ + Imitate a fast connection where a so many pipelined + requests arrive in a single read that backpressure is indicated. + The request handler (L{DummyHTTPHandler}) immediately responds. + + This doesn't check the at-most-one pending request + invariant but exercises otherwise uncovered code paths. + See GHSA-c8m8-j448-xjx7. + + @see: L{http.HTTPChannel._optimisticEagerReadSize} + """ + b = StringTransport() + a = http.HTTPChannel() + a.requestFactory = DummyHTTPHandlerProxy + a.makeConnection(b) + + overLimitCount = a._optimisticEagerReadSize // len(self.requests) * 10 + a.dataReceived(self.requests * overLimitCount) + self.assertResponseEquals(b.value(), self.expectedResponses * overLimitCount) def test_pipeliningReadLimit(self): """ @@ -1414,6 +1511,83 @@ class ChunkedTransferEncodingTests(unitt http._MalformedChunkedDataError, p.dataReceived, b"0x3\r\nabc\r\n" ) + def test_trailerHeaders(self): + """ + L{_ChunkedTransferDecoder.dataReceived} decodes chunked-encoded data + and ignores trailer headers which come after the terminating zero-length + chunk. + """ + L = [] + finished = [] + p = http._ChunkedTransferDecoder(L.append, finished.append) + p.dataReceived(b"3\r\nabc\r\n5\r\n12345\r\n") + p.dataReceived( + b"a\r\n0123456789\r\n0\r\nServer-Timing: total;dur=123.4\r\nExpires: Wed, 21 Oct 2015 07:28:00 GMT\r\n\r\n" + ) + self.assertEqual(L, [b"abc", b"12345", b"0123456789"]) + self.assertEqual(finished, [b""]) + self.assertEqual( + p._trailerHeaders, + [ + b"Server-Timing: total;dur=123.4", + b"Expires: Wed, 21 Oct 2015 07:28:00 GMT", + ], + ) + + def test_shortTrailerHeader(self): + """ + L{_ChunkedTransferDecoder.dataReceived} decodes chunks of input with + tailer header broken up and delivered in multiple calls. + """ + L = [] + finished = [] + p = http._ChunkedTransferDecoder(L.append, finished.append) + for s in iterbytes( + b"3\r\nabc\r\n5\r\n12345\r\n0\r\nServer-Timing: total;dur=123.4\r\n\r\n" + ): + p.dataReceived(s) + self.assertEqual(L, [b"a", b"b", b"c", b"1", b"2", b"3", b"4", b"5"]) + self.assertEqual(finished, [b""]) + self.assertEqual(p._trailerHeaders, [b"Server-Timing: total;dur=123.4"]) + + def test_tooLongTrailerHeader(self): + r""" + L{_ChunkedTransferDecoder.dataReceived} raises + L{_MalformedChunkedDataError} when the trailing headers data is too long. + """ + p = http._ChunkedTransferDecoder( + lambda b: None, + lambda b: None, # pragma: nocov + ) + p._maxTrailerHeadersSize = 10 + self.assertRaises( + http._MalformedChunkedDataError, + p.dataReceived, + b"3\r\nabc\r\n0\r\nTotal-Trailer: header;greater-then=10\r\n\r\n", + ) + + def test_unfinishedTrailerHeader(self): + r""" + L{_ChunkedTransferDecoder.dataReceived} raises + L{_MalformedChunkedDataError} when the trailing headers data is too long + and doesn't have final CRLF characters. + """ + p = http._ChunkedTransferDecoder( + lambda b: None, + lambda b: None, # pragma: nocov + ) + p._maxTrailerHeadersSize = 10 + # 9 bytes are received so far, in 2 packets. + # For now, all is ok. + p.dataReceived(b"3\r\nabc\r\n0\r\n01234567") + p.dataReceived(b"\r") + # Once the 10th byte is received, the processing fails. + self.assertRaises( + http._MalformedChunkedDataError, + p.dataReceived, + b"A", + ) + class ChunkingTests(unittest.TestCase, ResponseTestMixin):
Locations
Projects
Search
Status Monitor
Help
OpenBuildService.org
Documentation
API Documentation
Code of Conduct
Contact
Support
@OBShq
Terms
openSUSE Build Service is sponsored by
The Open Build Service is an
openSUSE project
.
Sign Up
Log In
Places
Places
All Projects
Status Monitor