You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
gentoo-overlay/dev-python/twisted/files/twisted-20.3.0-py38-cgi.patch

260 lines
8.4 KiB

From 62ab0203c59c1f9788c53dfad4a212774094d05c Mon Sep 17 00:00:00 2001
From: Craig Rodrigues <rodrigc@FreeBSD.org>
Date: Mon, 13 Apr 2020 01:22:23 -0700
Subject: [PATCH 2/2] Merge 9801-rodrigc-cgi: Change import of cgi.parse_qs to
urllib.parse.parse_qs
Author: rodrigc
Reviewer: hawkowl
Fixes: ticket:9801
---
src/twisted/web/client.py | 17 ++++-----
src/twisted/web/http.py | 49 ++++++++++++-------------
src/twisted/web/newsfragments/9801.misc | 0
src/twisted/web/test/test_http.py | 41 +++------------------
src/twisted/web/test/test_webclient.py | 5 +--
5 files changed, 38 insertions(+), 74 deletions(-)
create mode 100644 src/twisted/web/newsfragments/9801.misc
diff --git a/src/twisted/web/client.py b/src/twisted/web/client.py
index 7e4642ef3..8209f5a5e 100644
--- a/src/twisted/web/client.py
+++ b/src/twisted/web/client.py
@@ -12,15 +12,8 @@ import os
import collections
import warnings
-try:
- from urlparse import urlunparse, urljoin, urldefrag
-except ImportError:
- from urllib.parse import urljoin, urldefrag
- from urllib.parse import urlunparse as _urlunparse
-
- def urlunparse(parts):
- result = _urlunparse(tuple([p.decode("charmap") for p in parts]))
- return result.encode("charmap")
+from urllib.parse import urljoin, urldefrag
+from urllib.parse import urlunparse as _urlunparse
import zlib
from functools import wraps
@@ -51,6 +44,12 @@ from twisted.web._newclient import _ensureValidURI, _ensureValidMethod
+def urlunparse(parts):
+ result = _urlunparse(tuple([p.decode("charmap") for p in parts]))
+ return result.encode("charmap")
+
+
+
class PartialDownloadError(error.Error):
"""
Page was only partially downloaded, we got disconnected in middle.
diff --git a/src/twisted/web/http.py b/src/twisted/web/http.py
index b7afa8b0d..94d0ae81f 100644
--- a/src/twisted/web/http.py
+++ b/src/twisted/web/http.py
@@ -66,27 +66,10 @@ import time
import calendar
import warnings
import os
-from io import BytesIO as StringIO
-
-try:
- from urlparse import (
- ParseResult as ParseResultBytes, urlparse as _urlparse)
- from urllib import unquote
- from cgi import parse_header as _parseHeader
-except ImportError:
- from urllib.parse import (
- ParseResultBytes, urlparse as _urlparse, unquote_to_bytes as unquote)
-
- def _parseHeader(line):
- # cgi.parse_header requires a str
- key, pdict = cgi.parse_header(line.decode('charmap'))
-
- # We want the key as bytes, and cgi.parse_multipart (which consumes
- # pdict) expects a dict of str keys but bytes values
- key = key.encode('charmap')
- pdict = {x:y.encode('charmap') for x, y in pdict.items()}
- return (key, pdict)
+from io import BytesIO
+from urllib.parse import (
+ ParseResultBytes, urlparse as _urlparse, unquote_to_bytes as unquote)
from zope.interface import Attribute, Interface, implementer, provider
@@ -163,6 +146,20 @@ monthname = [None,
weekdayname_lower = [name.lower() for name in weekdayname]
monthname_lower = [name and name.lower() for name in monthname]
+
+
+def _parseHeader(line):
+ # cgi.parse_header requires a str
+ key, pdict = cgi.parse_header(line.decode('charmap'))
+
+ # We want the key as bytes, and cgi.parse_multipart (which consumes
+ # pdict) expects a dict of str keys but bytes values
+ key = key.encode('charmap')
+ pdict = {x: y.encode('charmap') for x, y in pdict.items()}
+ return (key, pdict)
+
+
+
def urlparse(url):
"""
Parse an URL into six components.
@@ -486,13 +483,15 @@ class _IDeprecatedHTTPChannelToRequestInterface(Interface):
class StringTransport:
"""
- I am a StringIO wrapper that conforms for the transport API. I support
+ I am a BytesIO wrapper that conforms for the transport API. I support
the `writeSequence' method.
"""
def __init__(self):
- self.s = StringIO()
+ self.s = BytesIO()
+
def writeSequence(self, seq):
self.s.write(b''.join(seq))
+
def __getattr__(self, attr):
return getattr(self.__dict__['s'], attr)
@@ -513,7 +512,7 @@ class HTTPClient(basic.LineReceiver):
@type firstLine: C{bool}
@ivar __buffer: The buffer that stores the response to the HTTP request.
- @type __buffer: A C{StringIO} object.
+ @type __buffer: A C{BytesIO} object.
@ivar _header: Part or all of an HTTP request header.
@type _header: C{bytes}
@@ -579,7 +578,7 @@ class HTTPClient(basic.LineReceiver):
if self._header != b"":
# Only extract headers if there are any
self.extractHeader(self._header)
- self.__buffer = StringIO()
+ self.__buffer = BytesIO()
self.handleEndHeaders()
self.setRawMode()
return
@@ -665,7 +664,7 @@ def _getContentFile(length):
Get a writeable file-like object to which request content can be written.
"""
if length is not None and length < 100000:
- return StringIO()
+ return BytesIO()
return tempfile.TemporaryFile()
diff --git a/src/twisted/web/newsfragments/9801.misc b/src/twisted/web/newsfragments/9801.misc
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/twisted/web/test/test_http.py b/src/twisted/web/test/test_http.py
index a3067f732..4189b307c 100644
--- a/src/twisted/web/test/test_http.py
+++ b/src/twisted/web/test/test_http.py
@@ -9,15 +9,11 @@ from __future__ import absolute_import, division
import base64
import calendar
-import cgi
import random
import hamcrest
-try:
- from urlparse import urlparse, urlunsplit, clear_cache
-except ImportError:
- from urllib.parse import urlparse, urlunsplit, clear_cache
+from urllib.parse import urlparse, urlunsplit, clear_cache, parse_qs
from io import BytesIO
from itertools import cycle
@@ -28,7 +24,7 @@ from zope.interface import (
)
from zope.interface.verify import verifyObject
-from twisted.python.compat import (_PY3, iterbytes, long, networkString,
+from twisted.python.compat import (iterbytes, long, networkString,
unicode, intToBytes)
from twisted.python.components import proxyForInterface
from twisted.python.failure import Failure
@@ -2019,33 +2015,6 @@ Content-Type: application/x-www-form-urlencoded
self.assertEqual(content, [networkString(query)])
- def test_missingContentDisposition(self):
- """
- If the C{Content-Disposition} header is missing, the request is denied
- as a bad request.
- """
- req = b'''\
-POST / HTTP/1.0
-Content-Type: multipart/form-data; boundary=AaB03x
-Content-Length: 103
-
---AaB03x
-Content-Type: text/plain
-Content-Transfer-Encoding: quoted-printable
-
-abasdfg
---AaB03x--
-'''
- channel = self.runRequest(req, http.Request, success=False)
- self.assertEqual(
- channel.transport.value(),
- b"HTTP/1.1 400 Bad Request\r\n\r\n")
-
- if _PY3:
- test_missingContentDisposition.skip = (
- "cgi.parse_multipart is much more error-tolerant on Python 3.")
-
-
def test_multipartProcessingFailure(self):
"""
When the multipart processing fails the client gets a 400 Bad Request.
@@ -2373,15 +2342,15 @@ ok
class QueryArgumentsTests(unittest.TestCase):
def testParseqs(self):
self.assertEqual(
- cgi.parse_qs(b"a=b&d=c;+=f"),
+ parse_qs(b"a=b&d=c;+=f"),
http.parse_qs(b"a=b&d=c;+=f"))
self.assertRaises(
ValueError, http.parse_qs, b"blah", strict_parsing=True)
self.assertEqual(
- cgi.parse_qs(b"a=&b=c", keep_blank_values=1),
+ parse_qs(b"a=&b=c", keep_blank_values=1),
http.parse_qs(b"a=&b=c", keep_blank_values=1))
self.assertEqual(
- cgi.parse_qs(b"a=&b=c"),
+ parse_qs(b"a=&b=c"),
http.parse_qs(b"a=&b=c"))
diff --git a/src/twisted/web/test/test_webclient.py b/src/twisted/web/test/test_webclient.py
index 680e02780..672594993 100644
--- a/src/twisted/web/test/test_webclient.py
+++ b/src/twisted/web/test/test_webclient.py
@@ -11,10 +11,7 @@ import io
import os
from errno import ENOSPC
-try:
- from urlparse import urlparse, urljoin
-except ImportError:
- from urllib.parse import urlparse, urljoin
+from urllib.parse import urlparse, urljoin
from twisted.python.compat import networkString, nativeString, intToBytes
from twisted.trial import unittest, util
--
2.26.2