Searched defs:generate_tokens (Results 1 - 3 of 3) sorted by relevance

/external/python/cpython2/Lib/lib2to3/pgen2/
H A Dtokenize.py6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
171 tuples generated by generate_tokens().
180 for token_info in generate_tokens(readline):
338 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
341 t2 = [tok[:2] for tokin generate_tokens(readline)]
347 def generate_tokens(readline): function
349 The generate_tokens() generator requires one argument, readline, which
/external/python/cpython2/Lib/test/
H A Dtest_tokenize.py2 from tokenize import (untokenize, generate_tokens, NUMBER, NAME, OP, namespace
21 for type, token, start, end, line in generate_tokens(f.readline):
63 for tok in generate_tokens(StringIO(indent_error_file).readline):
520 g = generate_tokens(StringIO(s).readline) # tokenize the string
600 token_list = list(generate_tokens(f.readline))
605 tokens2 = [tok[:2] for tok in generate_tokens(readline)]
703 tokens = generate_tokens(StringIO(code).readline)
/external/python/cpython2/Lib/
H A Dtokenize.py3 generate_tokens(readline) is a generator that breaks a stream of
35 __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
167 tuples generated by generate_tokens().
176 for token_info in generate_tokens(readline):
278 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
281 t2 = [tok[:2] for tok in generate_tokens(readline)]
287 def generate_tokens(readline): function
289 The generate_tokens() generator requires one argument, readline, which

Completed in 173 milliseconds