Skip to content

Commit 01aa64e

Browse files
Merge branch 'main' into typealiasmod
2 parents 98cc5fa + febcc6c commit 01aa64e

14 files changed

Lines changed: 257 additions & 98 deletions

File tree

.github/CODEOWNERS

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ configure* @erlend-aasland @corona10
2222
**/*hamt* @1st1
2323
Objects/set* @rhettinger
2424
Objects/dict* @methane @markshannon
25+
Objects/typevarobject.c @JelleZijlstra
2526
Objects/type* @markshannon
2627
Objects/codeobject.c @markshannon
2728
Objects/frameobject.c @markshannon
@@ -33,6 +34,7 @@ Python/flowgraph.c @markshannon @iritkatriel
3334
Python/ast_opt.c @isidentical
3435
Lib/test/test_patma.py @brandtbucher
3536
Lib/test/test_peepholer.py @brandtbucher
37+
Lib/test/test_type_*.py @JelleZijlstra
3638

3739
# Exceptions
3840
Lib/traceback.py @iritkatriel

Doc/library/http.client.rst

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -394,6 +394,17 @@ HTTPConnection Objects
394394
one will be automatically generated and transmitted if not provided in
395395
the headers argument.
396396

397+
398+
.. method:: HTTPConnection.get_proxy_response_headers()
399+
400+
Returns a dictionary with the headers of the response received from
401+
the proxy server to the CONNECT request.
402+
403+
If the CONNECT request was not sent, the method returns an empty dictionary.
404+
405+
.. versionadded:: 3.12
406+
407+
397408
.. method:: HTTPConnection.connect()
398409

399410
Connect to the server specified when the object was created. By default,

Doc/library/logging.config.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ in :mod:`logging` itself) and defining handlers which are declared either in
111111
they or their ancestors are explicitly named
112112
in the logging configuration.
113113

114-
:param encoding: The encoding used to open file when *fname* is filename.
114+
:param encoding: The encoding used to open file when *fname* is filename.
115115

116116
.. versionchanged:: 3.4
117117
An instance of a subclass of :class:`~configparser.RawConfigParser` is

Lib/http/client.py

Lines changed: 29 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -221,8 +221,9 @@ def _read_headers(fp):
221221
break
222222
return headers
223223

224-
def parse_headers(fp, _class=HTTPMessage):
225-
"""Parses only RFC2822 headers from a file pointer.
224+
def _parse_header_lines(header_lines, _class=HTTPMessage):
225+
"""
226+
Parses only RFC2822 headers from header lines.
226227
227228
email Parser wants to see strings rather than bytes.
228229
But a TextIOWrapper around self.rfile would buffer too many bytes
@@ -231,10 +232,15 @@ def parse_headers(fp, _class=HTTPMessage):
231232
to parse.
232233
233234
"""
234-
headers = _read_headers(fp)
235-
hstring = b''.join(headers).decode('iso-8859-1')
235+
hstring = b''.join(header_lines).decode('iso-8859-1')
236236
return email.parser.Parser(_class=_class).parsestr(hstring)
237237

238+
def parse_headers(fp, _class=HTTPMessage):
239+
"""Parses only RFC2822 headers from a file pointer."""
240+
241+
headers = _read_headers(fp)
242+
return _parse_header_lines(headers, _class)
243+
238244

239245
class HTTPResponse(io.BufferedIOBase):
240246

@@ -858,7 +864,7 @@ def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
858864
self._tunnel_host = None
859865
self._tunnel_port = None
860866
self._tunnel_headers = {}
861-
self._proxy_response_headers = None
867+
self._raw_proxy_headers = None
862868

863869
(self.host, self.port) = self._get_hostport(host, port)
864870

@@ -945,11 +951,11 @@ def _tunnel(self):
945951
try:
946952
(version, code, message) = response._read_status()
947953

948-
self._proxy_response_headers = parse_headers(response.fp)
954+
self._raw_proxy_headers = _read_headers(response.fp)
949955

950956
if self.debuglevel > 0:
951-
for hdr, val in self._proxy_response_headers.items():
952-
print("header:", hdr + ":", val)
957+
for header in self._raw_proxy_headers:
958+
print('header:', header.decode())
953959

954960
if code != http.HTTPStatus.OK:
955961
self.close()
@@ -958,6 +964,21 @@ def _tunnel(self):
958964
finally:
959965
response.close()
960966

967+
def get_proxy_response_headers(self):
968+
"""
969+
Returns a dictionary with the headers of the response
970+
received from the proxy server to the CONNECT request
971+
sent to set the tunnel.
972+
973+
If the CONNECT request was not sent, the method returns
974+
an empty dictionary.
975+
"""
976+
return (
977+
_parse_header_lines(self._raw_proxy_headers)
978+
if self._raw_proxy_headers is not None
979+
else {}
980+
)
981+
961982
def connect(self):
962983
"""Connect to the host and port specified in __init__."""
963984
sys.audit("http.client.connect", self, self.host, self.port)

Lib/test/test_httplib.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2401,7 +2401,7 @@ def test_proxy_response_headers(self):
24012401
self.conn.set_tunnel('destination.com')
24022402

24032403
self.conn.request('PUT', '/', '')
2404-
headers = self.conn._proxy_response_headers
2404+
headers = self.conn.get_proxy_response_headers()
24052405
self.assertIn(expected_header, headers.items())
24062406

24072407
def test_tunnel_leak(self):

Lib/test/test_zipfile/test_core.py

Lines changed: 153 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1080,6 +1080,159 @@ def test_generated_valid_zip64_extra(self):
10801080
self.assertEqual(zinfo.header_offset, expected_header_offset)
10811081
self.assertEqual(zf.read(zinfo), expected_content)
10821082

1083+
def test_force_zip64(self):
1084+
"""Test that forcing zip64 extensions correctly notes this in the zip file"""
1085+
1086+
# GH-103861 describes an issue where forcing a small file to use zip64
1087+
# extensions would add a zip64 extra record, but not change the data
1088+
# sizes to 0xFFFFFFFF to indicate to the extractor that the zip64
1089+
# record should be read. Additionally, it would not set the required
1090+
# version to indicate that zip64 extensions are required to extract it.
1091+
# This test replicates the situation and reads the raw data to specifically ensure:
1092+
# - The required extract version is always >= ZIP64_VERSION
1093+
# - The compressed and uncompressed size in the file headers are both
1094+
# 0xFFFFFFFF (ie. point to zip64 record)
1095+
# - The zip64 record is provided and has the correct sizes in it
1096+
# Other aspects of the zip are checked as well, but verifying the above is the main goal.
1097+
# Because this is hard to verify by parsing the data as a zip, the raw
1098+
# bytes are checked to ensure that they line up with the zip spec.
1099+
# The spec for this can be found at: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
1100+
# The relevent sections for this test are:
1101+
# - 4.3.7 for local file header
1102+
# - 4.5.3 for zip64 extra field
1103+
1104+
data = io.BytesIO()
1105+
with zipfile.ZipFile(data, mode="w", allowZip64=True) as zf:
1106+
with zf.open("text.txt", mode="w", force_zip64=True) as zi:
1107+
zi.write(b"_")
1108+
1109+
zipdata = data.getvalue()
1110+
1111+
# pull out and check zip information
1112+
(
1113+
header, vers, os, flags, comp, csize, usize, fn_len,
1114+
ex_total_len, filename, ex_id, ex_len, ex_usize, ex_csize, cd_sig
1115+
) = struct.unpack("<4sBBHH8xIIHH8shhQQx4s", zipdata[:63])
1116+
1117+
self.assertEqual(header, b"PK\x03\x04") # local file header
1118+
self.assertGreaterEqual(vers, zipfile.ZIP64_VERSION) # requires zip64 to extract
1119+
self.assertEqual(os, 0) # compatible with MS-DOS
1120+
self.assertEqual(flags, 0) # no flags
1121+
self.assertEqual(comp, 0) # compression method = stored
1122+
self.assertEqual(csize, 0xFFFFFFFF) # sizes are in zip64 extra
1123+
self.assertEqual(usize, 0xFFFFFFFF)
1124+
self.assertEqual(fn_len, 8) # filename len
1125+
self.assertEqual(ex_total_len, 20) # size of extra records
1126+
self.assertEqual(ex_id, 1) # Zip64 extra record
1127+
self.assertEqual(ex_len, 16) # 16 bytes of data
1128+
self.assertEqual(ex_usize, 1) # uncompressed size
1129+
self.assertEqual(ex_csize, 1) # compressed size
1130+
self.assertEqual(cd_sig, b"PK\x01\x02") # ensure the central directory header is next
1131+
1132+
z = zipfile.ZipFile(io.BytesIO(zipdata))
1133+
zinfos = z.infolist()
1134+
self.assertEqual(len(zinfos), 1)
1135+
self.assertGreaterEqual(zinfos[0].extract_version, zipfile.ZIP64_VERSION) # requires zip64 to extract
1136+
1137+
def test_unseekable_zip_unknown_filesize(self):
1138+
"""Test that creating a zip with/without seeking will raise a RuntimeError if zip64 was required but not used"""
1139+
1140+
def make_zip(fp):
1141+
with zipfile.ZipFile(fp, mode="w", allowZip64=True) as zf:
1142+
with zf.open("text.txt", mode="w", force_zip64=False) as zi:
1143+
zi.write(b"_" * (zipfile.ZIP64_LIMIT + 1))
1144+
1145+
self.assertRaises(RuntimeError, make_zip, io.BytesIO())
1146+
self.assertRaises(RuntimeError, make_zip, Unseekable(io.BytesIO()))
1147+
1148+
def test_zip64_required_not_allowed_fail(self):
1149+
"""Test that trying to add a large file to a zip that doesn't allow zip64 extensions fails on add"""
1150+
def make_zip(fp):
1151+
with zipfile.ZipFile(fp, mode="w", allowZip64=False) as zf:
1152+
# pretend zipfile.ZipInfo.from_file was used to get the name and filesize
1153+
info = zipfile.ZipInfo("text.txt")
1154+
info.file_size = zipfile.ZIP64_LIMIT + 1
1155+
zf.open(info, mode="w")
1156+
1157+
self.assertRaises(zipfile.LargeZipFile, make_zip, io.BytesIO())
1158+
self.assertRaises(zipfile.LargeZipFile, make_zip, Unseekable(io.BytesIO()))
1159+
1160+
def test_unseekable_zip_known_filesize(self):
1161+
"""Test that creating a zip without seeking will use zip64 extensions if the file size is provided up-front"""
1162+
1163+
# This test ensures that the zip will use a zip64 data descriptor (same
1164+
# as a regular data descriptor except the sizes are 8 bytes instead of
1165+
# 4) record to communicate the size of a file if the zip is being
1166+
# written to an unseekable stream.
1167+
# Because this sort of thing is hard to verify by parsing the data back
1168+
# in as a zip, this test looks at the raw bytes created to ensure that
1169+
# the correct data has been generated.
1170+
# The spec for this can be found at: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
1171+
# The relevent sections for this test are:
1172+
# - 4.3.7 for local file header
1173+
# - 4.3.9 for the data descriptor
1174+
# - 4.5.3 for zip64 extra field
1175+
1176+
file_size = zipfile.ZIP64_LIMIT + 1
1177+
1178+
def make_zip(fp):
1179+
with zipfile.ZipFile(fp, mode="w", allowZip64=True) as zf:
1180+
# pretend zipfile.ZipInfo.from_file was used to get the name and filesize
1181+
info = zipfile.ZipInfo("text.txt")
1182+
info.file_size = file_size
1183+
with zf.open(info, mode="w", force_zip64=False) as zi:
1184+
zi.write(b"_" * file_size)
1185+
return fp
1186+
1187+
# check seekable file information
1188+
seekable_data = make_zip(io.BytesIO()).getvalue()
1189+
(
1190+
header, vers, os, flags, comp, csize, usize, fn_len,
1191+
ex_total_len, filename, ex_id, ex_len, ex_usize, ex_csize,
1192+
cd_sig
1193+
) = struct.unpack("<4sBBHH8xIIHH8shhQQ{}x4s".format(file_size), seekable_data[:62 + file_size])
1194+
1195+
self.assertEqual(header, b"PK\x03\x04") # local file header
1196+
self.assertGreaterEqual(vers, zipfile.ZIP64_VERSION) # requires zip64 to extract
1197+
self.assertEqual(os, 0) # compatible with MS-DOS
1198+
self.assertEqual(flags, 0) # no flags set
1199+
self.assertEqual(comp, 0) # compression method = stored
1200+
self.assertEqual(csize, 0xFFFFFFFF) # sizes are in zip64 extra
1201+
self.assertEqual(usize, 0xFFFFFFFF)
1202+
self.assertEqual(fn_len, 8) # filename len
1203+
self.assertEqual(ex_total_len, 20) # size of extra records
1204+
self.assertEqual(ex_id, 1) # Zip64 extra record
1205+
self.assertEqual(ex_len, 16) # 16 bytes of data
1206+
self.assertEqual(ex_usize, file_size) # uncompressed size
1207+
self.assertEqual(ex_csize, file_size) # compressed size
1208+
self.assertEqual(cd_sig, b"PK\x01\x02") # ensure the central directory header is next
1209+
1210+
# check unseekable file information
1211+
unseekable_data = make_zip(Unseekable(io.BytesIO())).fp.getvalue()
1212+
(
1213+
header, vers, os, flags, comp, csize, usize, fn_len,
1214+
ex_total_len, filename, ex_id, ex_len, ex_usize, ex_csize,
1215+
dd_header, dd_usize, dd_csize, cd_sig
1216+
) = struct.unpack("<4sBBHH8xIIHH8shhQQ{}x4s4xQQ4s".format(file_size), unseekable_data[:86 + file_size])
1217+
1218+
self.assertEqual(header, b"PK\x03\x04") # local file header
1219+
self.assertGreaterEqual(vers, zipfile.ZIP64_VERSION) # requires zip64 to extract
1220+
self.assertEqual(os, 0) # compatible with MS-DOS
1221+
self.assertEqual("{:b}".format(flags), "1000") # streaming flag set
1222+
self.assertEqual(comp, 0) # compression method = stored
1223+
self.assertEqual(csize, 0xFFFFFFFF) # sizes are in zip64 extra
1224+
self.assertEqual(usize, 0xFFFFFFFF)
1225+
self.assertEqual(fn_len, 8) # filename len
1226+
self.assertEqual(ex_total_len, 20) # size of extra records
1227+
self.assertEqual(ex_id, 1) # Zip64 extra record
1228+
self.assertEqual(ex_len, 16) # 16 bytes of data
1229+
self.assertEqual(ex_usize, 0) # uncompressed size - 0 to defer to data descriptor
1230+
self.assertEqual(ex_csize, 0) # compressed size - 0 to defer to data descriptor
1231+
self.assertEqual(dd_header, b"PK\07\x08") # data descriptor
1232+
self.assertEqual(dd_usize, file_size) # file size (8 bytes because zip64)
1233+
self.assertEqual(dd_csize, file_size) # compressed size (8 bytes because zip64)
1234+
self.assertEqual(cd_sig, b"PK\x01\x02") # ensure the central directory header is next
1235+
10831236

10841237
@requires_zlib()
10851238
class DeflateTestZip64InSmallFiles(AbstractTestZip64InSmallFiles,

Lib/zipfile/__init__.py

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -442,7 +442,12 @@ def __repr__(self):
442442
return ''.join(result)
443443

444444
def FileHeader(self, zip64=None):
445-
"""Return the per-file header as a bytes object."""
445+
"""Return the per-file header as a bytes object.
446+
447+
When the optional zip64 arg is None rather than a bool, we will
448+
decide based upon the file_size and compress_size, if known,
449+
False otherwise.
450+
"""
446451
dt = self.date_time
447452
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
448453
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
@@ -458,16 +463,13 @@ def FileHeader(self, zip64=None):
458463

459464
min_version = 0
460465
if zip64 is None:
466+
# We always explicitly pass zip64 within this module.... This
467+
# remains for anyone using ZipInfo.FileHeader as a public API.
461468
zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
462469
if zip64:
463470
fmt = '<HHQQ'
464471
extra = extra + struct.pack(fmt,
465472
1, struct.calcsize(fmt)-4, file_size, compress_size)
466-
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
467-
if not zip64:
468-
raise LargeZipFile("Filesize would require ZIP64 extensions")
469-
# File is larger than what fits into a 4 byte integer,
470-
# fall back to the ZIP64 extension
471473
file_size = 0xffffffff
472474
compress_size = 0xffffffff
473475
min_version = ZIP64_VERSION
@@ -1219,6 +1221,12 @@ def close(self):
12191221
self._zinfo.CRC = self._crc
12201222
self._zinfo.file_size = self._file_size
12211223

1224+
if not self._zip64:
1225+
if self._file_size > ZIP64_LIMIT:
1226+
raise RuntimeError("File size too large, try using force_zip64")
1227+
if self._compress_size > ZIP64_LIMIT:
1228+
raise RuntimeError("Compressed size too large, try using force_zip64")
1229+
12221230
# Write updated header info
12231231
if self._zinfo.flag_bits & _MASK_USE_DATA_DESCRIPTOR:
12241232
# Write CRC and file sizes after the file data
@@ -1227,13 +1235,6 @@ def close(self):
12271235
self._zinfo.compress_size, self._zinfo.file_size))
12281236
self._zipfile.start_dir = self._fileobj.tell()
12291237
else:
1230-
if not self._zip64:
1231-
if self._file_size > ZIP64_LIMIT:
1232-
raise RuntimeError(
1233-
'File size too large, try using force_zip64')
1234-
if self._compress_size > ZIP64_LIMIT:
1235-
raise RuntimeError(
1236-
'Compressed size too large, try using force_zip64')
12371238
# Seek backwards and write file header (which will now include
12381239
# correct CRC and file sizes)
12391240

@@ -1672,8 +1673,9 @@ def _open_to_write(self, zinfo, force_zip64=False):
16721673
zinfo.external_attr = 0o600 << 16 # permissions: ?rw-------
16731674

16741675
# Compressed size can be larger than uncompressed size
1675-
zip64 = self._allowZip64 and \
1676-
(force_zip64 or zinfo.file_size * 1.05 > ZIP64_LIMIT)
1676+
zip64 = force_zip64 or (zinfo.file_size * 1.05 > ZIP64_LIMIT)
1677+
if not self._allowZip64 and zip64:
1678+
raise LargeZipFile("Filesize would require ZIP64 extensions")
16771679

16781680
if self._seekable:
16791681
self.fp.seek(self.start_dir)

0 commit comments

Comments
 (0)