Skip to content

Commit f28eb86

Browse files
Merge python#23
23: warn for tokenize() r=ltratt a=nanjekyejoannah The constructor for `tokenize()` has changed in Python 3, to behave like the `tokenize.generate_token(`) function on Python 2. Co-authored-by: Joannah Nanjekye <jnanjeky@unb.ca>
2 parents 1f12b38 + ab79d5c commit f28eb86

2 files changed

Lines changed: 14 additions & 0 deletions

File tree

Lib/test/test_py3kwarn.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -331,6 +331,17 @@ def test_file_open(self):
331331
with check_py3k_warnings() as w:
332332
self.assertWarning(f.read(), w, expected)
333333

334+
def test_tokenize(self):
335+
import tokenize
336+
import io
337+
expected = "tokenize() changed in 3.x: use generate_tokens() instead."
338+
def helper_tok():
339+
for tok in tokenize.tokenize(io.BytesIO('1 + 2').readline):
340+
print tok
341+
with check_py3k_warnings() as w:
342+
self.assertWarning(helper_tok(), w, expected)
343+
344+
334345
def test_file(self):
335346
expected = ("The builtin 'file()'/'open()' function is not supported in 3.x, "
336347
"use the 'io.open()' function instead with the encoding keyword argument")

Lib/tokenize.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
from itertools import chain
3030
import string, re
3131
from token import *
32+
import warnings
3233

3334
import token
3435
__all__ = [x for x in dir(token) if not x.startswith("_")]
@@ -166,6 +167,8 @@ def tokenize(readline, tokeneater=printtoken):
166167
called once for each token, with five arguments, corresponding to the
167168
tuples generated by generate_tokens().
168169
"""
170+
warnings.warnpy3k_with_fix("tokenize() changed in 3.x", "use generate_tokens() instead.",
171+
stacklevel=2)
169172
try:
170173
tokenize_loop(readline, tokeneater)
171174
except StopTokenizing:

0 commit comments

Comments
 (0)