Changeset 391 for python/trunk/Lib/test/test_tokenize.py
- Timestamp:
- Mar 19, 2014, 11:31:01 PM (11 years ago)
- Location:
- python/trunk
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
python/trunk
-
Property svn:mergeinfo
set to
/python/vendor/Python-2.7.6 merged eligible /python/vendor/current merged eligible
-
Property svn:mergeinfo
set to
-
python/trunk/Lib/test/test_tokenize.py
r2 r391 57 57 ... 58 58 59 There are some standard formatti g practises that are easy to get right.59 There are some standard formatting practices that are easy to get right. 60 60 61 61 >>> roundtrip("if x == 1:\\n" … … 279 279 STRING 'UR"ABC"' (1, 34) (1, 41) 280 280 281 >>> dump_tokens("b'abc' + B'abc'") 282 STRING "b'abc'" (1, 0) (1, 6) 283 OP '+' (1, 7) (1, 8) 284 STRING "B'abc'" (1, 9) (1, 15) 285 >>> dump_tokens('b"abc" + B"abc"') 286 STRING 'b"abc"' (1, 0) (1, 6) 287 OP '+' (1, 7) (1, 8) 288 STRING 'B"abc"' (1, 9) (1, 15) 289 >>> dump_tokens("br'abc' + bR'abc' + Br'abc' + BR'abc'") 290 STRING "br'abc'" (1, 0) (1, 7) 291 OP '+' (1, 8) (1, 9) 292 STRING "bR'abc'" (1, 10) (1, 17) 293 OP '+' (1, 18) (1, 19) 294 STRING "Br'abc'" (1, 20) (1, 27) 295 OP '+' (1, 28) (1, 29) 296 STRING "BR'abc'" (1, 30) (1, 37) 297 >>> dump_tokens('br"abc" + bR"abc" + Br"abc" + BR"abc"') 298 STRING 'br"abc"' (1, 0) (1, 7) 299 OP '+' (1, 8) (1, 9) 300 STRING 'bR"abc"' (1, 10) (1, 17) 301 OP '+' (1, 18) (1, 19) 302 STRING 'Br"abc"' (1, 20) (1, 27) 303 OP '+' (1, 28) (1, 29) 304 STRING 'BR"abc"' (1, 30) (1, 37) 305 281 306 Operators 282 307 … … 494 519 495 520 Test roundtrip on random python modules. 496 pass the '-uc ompiler' option to process the full directory.521 pass the '-ucpu' option to process the full directory. 497 522 498 523 >>> … … 500 525 >>> testfiles = glob.glob(os.path.join(tempdir, "test*.py")) 501 526 502 >>> if not test_support.is_resource_enabled("c ompiler"):527 >>> if not test_support.is_resource_enabled("cpu"): 503 528 ... testfiles = random.sample(testfiles, 10) 504 529 ... … … 509 534 ... else: True 510 535 True 536 537 Evil tabs 538 >>> dump_tokens("def f():\\n\\tif x\\n \\tpass") 539 NAME 'def' (1, 0) (1, 3) 540 NAME 'f' (1, 4) (1, 5) 541 OP '(' (1, 5) (1, 6) 542 OP ')' (1, 6) (1, 7) 543 OP ':' (1, 7) (1, 8) 544 NEWLINE '\\n' (1, 8) (1, 9) 545 INDENT '\\t' (2, 0) (2, 1) 546 NAME 'if' (2, 1) (2, 3) 547 NAME 'x' (2, 4) (2, 5) 548 NEWLINE '\\n' (2, 5) (2, 6) 549 INDENT ' \\t' (3, 0) (3, 9) 550 NAME 'pass' (3, 9) (3, 13) 551 DEDENT '' (4, 0) (4, 0) 552 DEDENT '' (4, 0) (4, 0) 553 554 Pathological whitespace (http://bugs.python.org/issue16152) 555 >>> dump_tokens("@ ") 556 OP '@' (1, 0) (1, 1) 511 557 """ 512 558 513 559 514 560 from test import test_support 515 from tokenize import ( tokenize,untokenize, generate_tokens, NUMBER, NAME, OP,561 from tokenize import (untokenize, generate_tokens, NUMBER, NAME, OP, 516 562 STRING, ENDMARKER, tok_name) 517 563 from StringIO import StringIO
Note:
See TracChangeset
for help on using the changeset viewer.