[Python-checkins] r76264 - python/branches/py3k/Lib/tokenize.py
benjamin.peterson
python-checkins at python.org
Sat Nov 14 17:27:26 CET 2009
Author: benjamin.peterson
Date: Sat Nov 14 17:27:26 2009
New Revision: 76264
Log:
simply by using itertools.chain()
Modified:
python/branches/py3k/Lib/tokenize.py
Modified: python/branches/py3k/Lib/tokenize.py
==============================================================================
--- python/branches/py3k/Lib/tokenize.py (original)
+++ python/branches/py3k/Lib/tokenize.py Sat Nov 14 17:27:26 2009
@@ -377,17 +377,12 @@
The first token sequence will always be an ENCODING token
which tells you which encoding was used to decode the bytes stream.
"""
+ # This import is here to avoid problems when the itertools module is not
+ # built yet and tokenize is imported.
+ from itertools import chain
encoding, consumed = detect_encoding(readline)
- def readline_generator(consumed):
- for line in consumed:
- yield line
- while True:
- try:
- yield readline()
- except StopIteration:
- return
- chained = readline_generator(consumed)
- return _tokenize(chained.__next__, encoding)
+ rl_iter = iter(readline, "")
+ return _tokenize(chain(consumed, rl_iter).__next__, encoding)
def _tokenize(readline, encoding):
More information about the Python-checkins
mailing list