Ignore:
Timestamp:
Mar 19, 2014, 11:31:01 PM (11 years ago)
Author:
dmik
Message:

python: Merge vendor 2.7.6 to trunk.

Location:
python/trunk
Files:
68 edited
5 copied

Legend:

Unmodified
Added
Removed
  • python/trunk

  • python/trunk/Lib/lib2to3/Grammar.txt

    r2 r391  
    1 # Grammar for Python
     1# Grammar for 2to3. This grammar supports Python 2.x and 3.x.
    22
    33# Note:  Changing the grammar specified in this file will most likely
     
    129129       NAME | NUMBER | STRING+ | '.' '.' '.')
    130130listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
    131 testlist_gexp: test ( comp_for | (',' (test|star_expr))* [','] )
     131testlist_gexp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
    132132lambdef: 'lambda' [varargslist] ':' test
    133133trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
  • python/trunk/Lib/lib2to3/fixer_base.py

    r2 r391  
    2525    PATTERN = None  # Most subclasses should override with a string literal
    2626    pattern = None  # Compiled pattern, set by compile_pattern()
     27    pattern_tree = None # Tree representation of the pattern
    2728    options = None  # Options object passed to initializer
    2829    filename = None # The filename (set by set_filename)
     
    3637    _accept_type = None # [Advanced and not public] This tells RefactoringTool
    3738                        # which node type to accept when there's not a pattern.
     39
     40    keep_line_order = False # For the bottom matcher: match with the
     41                            # original line order
     42    BM_compatible = False # Compatibility with the bottom matching
     43                          # module; every fixer should set this
     44                          # manually
    3845
    3946    # Shortcut for access to Python grammar symbols
     
    5966        """
    6067        if self.PATTERN is not None:
    61             self.pattern = PatternCompiler().compile_pattern(self.PATTERN)
     68            PC = PatternCompiler()
     69            self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN,
     70                                                                 with_tree=True)
    6271
    6372    def set_filename(self, filename):
  • python/trunk/Lib/lib2to3/fixer_util.py

    r2 r391  
    11"""Utility functions, node construction macros, etc."""
    22# Author: Collin Winter
     3
     4from itertools import islice
    35
    46# Local imports
     
    1517def KeywordArg(keyword, value):
    1618    return Node(syms.argument,
    17                 [keyword, Leaf(token.EQUAL, u'='), value])
     19                [keyword, Leaf(token.EQUAL, u"="), value])
    1820
    1921def LParen():
     
    7779def Subscript(index_node):
    7880    """A numeric or string subscript"""
    79     return Node(syms.trailer, [Leaf(token.LBRACE, u'['),
     81    return Node(syms.trailer, [Leaf(token.LBRACE, u"["),
    8082                               index_node,
    81                                Leaf(token.RBRACE, u']')])
     83                               Leaf(token.RBRACE, u"]")])
    8284
    8385def String(string, prefix=None):
     
    121123        leaf.remove()
    122124
    123     children = [Leaf(token.NAME, u'from'),
     125    children = [Leaf(token.NAME, u"from"),
    124126                Leaf(token.NAME, package_name, prefix=u" "),
    125                 Leaf(token.NAME, u'import', prefix=u" "),
     127                Leaf(token.NAME, u"import", prefix=u" "),
    126128                Node(syms.import_as_names, name_leafs)]
    127129    imp = Node(syms.import_from, children)
     
    164166
    165167consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum",
    166                        "min", "max"])
     168                       "min", "max", "enumerate"])
    167169
    168170def attr_chain(obj, attr):
     
    191193power<
    192194    ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' |
    193       'any' | 'all' | (any* trailer< '.' 'join' >) )
     195      'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) )
    194196    trailer< '(' node=any ')' >
    195197    any*
     
    198200p2 = """
    199201power<
    200     'sorted'
     202    ( 'sorted' | 'enumerate' )
    201203    trailer< '(' arglist<node=any any*> ')' >
    202204    any*
     
    206208def in_special_context(node):
    207209    """ Returns true if node is in an environment where all that is required
    208         of it is being itterable (ie, it doesn't matter if it returns a list
    209         or an itterator).
     210        of it is being iterable (ie, it doesn't matter if it returns a list
     211        or an iterator).
    210212        See test_map_nochange in test_fixers.py for some examples and tests.
    211213        """
    212214    global p0, p1, p2, pats_built
    213215    if not pats_built:
     216        p0 = patcomp.compile_pattern(p0)
    214217        p1 = patcomp.compile_pattern(p1)
    215         p0 = patcomp.compile_pattern(p0)
    216218        p2 = patcomp.compile_pattern(p2)
    217219        pats_built = True
     
    246248    return True
    247249
     250def find_indentation(node):
     251    """Find the indentation of *node*."""
     252    while node is not None:
     253        if node.type == syms.suite and len(node.children) > 2:
     254            indent = node.children[1]
     255            if indent.type == token.INDENT:
     256                return indent.value
     257        node = node.parent
     258    return u""
     259
    248260###########################################################
    249261### The following functions are to find bindings in a suite
     
    263275    # Scamper up to the top level namespace
    264276    while node.type != syms.file_input:
    265         assert node.parent, "Tree is insane! root found before "\
    266                            "file_input node was found."
    267277        node = node.parent
     278        if not node:
     279            raise ValueError("root found before file_input node was found.")
    268280    return node
    269281
     
    284296        if it was not imported. """
    285297    def is_import_stmt(node):
    286         return node.type == syms.simple_stmt and node.children and \
    287                is_import(node.children[0])
     298        return (node.type == syms.simple_stmt and node.children and
     299                is_import(node.children[0]))
    288300
    289301    root = find_root(node)
     
    308320    if insert_pos == 0:
    309321        for idx, node in enumerate(root.children):
    310             if node.type == syms.simple_stmt and node.children and \
    311                node.children[0].type == token.STRING:
     322            if (node.type == syms.simple_stmt and node.children and
     323               node.children[0].type == token.STRING):
    312324                insert_pos = idx + 1
    313325                break
     
    315327    if package is None:
    316328        import_ = Node(syms.import_name, [
    317             Leaf(token.NAME, u'import'),
    318             Leaf(token.NAME, name, prefix=u' ')
     329            Leaf(token.NAME, u"import"),
     330            Leaf(token.NAME, name, prefix=u" ")
    319331        ])
    320332    else:
    321         import_ = FromImport(package, [Leaf(token.NAME, name, prefix=u' ')])
     333        import_ = FromImport(package, [Leaf(token.NAME, name, prefix=u" ")])
    322334
    323335    children = [import_, Newline()]
     
    405417            return None
    406418        n = node.children[3]
    407         if package and _find(u'as', n):
     419        if package and _find(u"as", n):
    408420            # See test_from_import_as for explanation
    409421            return None
  • python/trunk/Lib/lib2to3/fixes/fix_apply.py

    r2 r391  
    1313
    1414class FixApply(fixer_base.BaseFix):
     15    BM_compatible = True
    1516
    1617    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_basestring.py

    r2 r391  
    77
    88class FixBasestring(fixer_base.BaseFix):
     9    BM_compatible = True
    910
    1011    PATTERN = "'basestring'"
  • python/trunk/Lib/lib2to3/fixes/fix_buffer.py

    r2 r391  
    1010
    1111class FixBuffer(fixer_base.BaseFix):
     12    BM_compatible = True
    1213
    1314    explicit = True # The user must ask for this fixer
  • python/trunk/Lib/lib2to3/fixes/fix_callable.py

    r2 r391  
    1212
    1313class FixCallable(fixer_base.BaseFix):
     14    BM_compatible = True
     15
     16    order = "pre"
    1417
    1518    # Ignore callable(*args) or use of keywords.
  • python/trunk/Lib/lib2to3/fixes/fix_dict.py

    r2 r391  
    4141
    4242class FixDict(fixer_base.BaseFix):
     43    BM_compatible = True
     44
    4345    PATTERN = """
    4446    power< head=any+
  • python/trunk/Lib/lib2to3/fixes/fix_except.py

    r2 r391  
    3535
    3636class FixExcept(fixer_base.BaseFix):
     37    BM_compatible = True
    3738
    3839    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_exec.py

    r2 r391  
    1717
    1818class FixExec(fixer_base.BaseFix):
     19    BM_compatible = True
    1920
    2021    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_execfile.py

    r2 r391  
    1414
    1515class FixExecfile(fixer_base.BaseFix):
     16    BM_compatible = True
    1617
    1718    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_filter.py

    r2 r391  
    2020
    2121class FixFilter(fixer_base.ConditionalFix):
     22    BM_compatible = True
    2223
    2324    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_funcattrs.py

    r2 r391  
    88
    99class FixFuncattrs(fixer_base.BaseFix):
     10    BM_compatible = True
     11
    1012    PATTERN = """
    1113    power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals'
  • python/trunk/Lib/lib2to3/fixes/fix_future.py

    r2 r391  
    1010
    1111class FixFuture(fixer_base.BaseFix):
     12    BM_compatible = True
     13
    1214    PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
    1315
  • python/trunk/Lib/lib2to3/fixes/fix_getcwdu.py

    r2 r391  
    99
    1010class FixGetcwdu(fixer_base.BaseFix):
     11    BM_compatible = True
    1112
    1213    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_has_key.py

    r2 r391  
    3838
    3939class FixHasKey(fixer_base.BaseFix):
     40    BM_compatible = True
    4041
    4142    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_idioms.py

    r2 r391  
    3636
    3737class FixIdioms(fixer_base.BaseFix):
    38 
    3938    explicit = True # The user must ask for this fixer
    4039
  • python/trunk/Lib/lib2to3/fixes/fix_import.py

    r2 r391  
    3737
    3838class FixImport(fixer_base.BaseFix):
     39    BM_compatible = True
    3940
    4041    PATTERN = """
     
    4445    """
    4546
     47    def start_tree(self, tree, name):
     48        super(FixImport, self).start_tree(tree, name)
     49        self.skip = "absolute_import" in tree.future_features
     50
    4651    def transform(self, node, results):
     52        if self.skip:
     53            return
    4754        imp = results['imp']
    4855
     
    7279                return
    7380
    74             new = FromImport('.', [imp])
     81            new = FromImport(u".", [imp])
    7582            new.prefix = node.prefix
    7683            return new
    7784
    7885    def probably_a_local_import(self, imp_name):
    79         imp_name = imp_name.split('.', 1)[0]
     86        if imp_name.startswith(u"."):
     87            # Relative imports are certainly not local imports.
     88            return False
     89        imp_name = imp_name.split(u".", 1)[0]
    8090        base_path = dirname(self.filename)
    8191        base_path = join(base_path, imp_name)
    8292        # If there is no __init__.py next to the file its not in a package
    8393        # so can't be a relative import.
    84         if not exists(join(dirname(base_path), '__init__.py')):
     94        if not exists(join(dirname(base_path), "__init__.py")):
    8595            return False
    86         for ext in ['.py', sep, '.pyc', '.so', '.sl', '.pyd']:
     96        for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]:
    8797            if exists(base_path + ext):
    8898                return True
  • python/trunk/Lib/lib2to3/fixes/fix_imports.py

    r2 r391  
    8585class FixImports(fixer_base.BaseFix):
    8686
     87    BM_compatible = True
     88    keep_line_order = True
    8789    # This is overridden in fix_imports2.
    8890    mapping = MAPPING
  • python/trunk/Lib/lib2to3/fixes/fix_input.py

    r2 r391  
    1212
    1313class FixInput(fixer_base.BaseFix):
    14 
     14    BM_compatible = True
    1515    PATTERN = """
    1616              power< 'input' args=trailer< '(' [any] ')' > >
  • python/trunk/Lib/lib2to3/fixes/fix_intern.py

    r2 r391  
    1313
    1414class FixIntern(fixer_base.BaseFix):
     15    BM_compatible = True
     16    order = "pre"
    1517
    1618    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_isinstance.py

    r2 r391  
    1515
    1616class FixIsinstance(fixer_base.BaseFix):
    17 
     17    BM_compatible = True
    1818    PATTERN = """
    1919    power<
  • python/trunk/Lib/lib2to3/fixes/fix_itertools.py

    r2 r391  
    1313
    1414class FixItertools(fixer_base.BaseFix):
    15     it_funcs = "('imap'|'ifilter'|'izip'|'ifilterfalse')"
     15    BM_compatible = True
     16    it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')"
    1617    PATTERN = """
    1718              power< it='itertools'
     
    2829        prefix = None
    2930        func = results['func'][0]
    30         if 'it' in results and func.value != u'ifilterfalse':
     31        if ('it' in results and
     32            func.value not in (u'ifilterfalse', u'izip_longest')):
    3133            dot, it = (results['dot'], results['it'])
    3234            # Remove the 'itertools'
    3335            prefix = it.prefix
    3436            it.remove()
    35             # Replace the node wich contains ('.', 'function') with the
    36             # function (to be consistant with the second part of the pattern)
     37            # Replace the node which contains ('.', 'function') with the
     38            # function (to be consistent with the second part of the pattern)
    3739            dot.remove()
    3840            func.parent.replace(func)
  • python/trunk/Lib/lib2to3/fixes/fix_itertools_imports.py

    r2 r391  
    77
    88class FixItertoolsImports(fixer_base.BaseFix):
     9    BM_compatible = True
    910    PATTERN = """
    1011              import_from< 'from' 'itertools' 'import' imports=any >
     
    2122                member = child.value
    2223                name_node = child
     24            elif child.type == token.STAR:
     25                # Just leave the import as is.
     26                return
    2327            else:
    2428                assert child.type == syms.import_as_name
     
    2832                child.value = None
    2933                child.remove()
    30             elif member_name == u'ifilterfalse':
     34            elif member_name in (u'ifilterfalse', u'izip_longest'):
    3135                node.changed()
    32                 name_node.value = u'filterfalse'
     36                name_node.value = (u'filterfalse' if member_name[1] == u'f'
     37                                   else u'zip_longest')
    3338
    3439        # Make sure the import statement is still sane
     
    4146                remove_comma ^= True
    4247
    43         if children[-1].type == token.COMMA:
    44             children[-1].remove()
     48        while children and children[-1].type == token.COMMA:
     49            children.pop().remove()
    4550
    4651        # If there are no imports left, just get rid of the entire statement
    47         if not (imports.children or getattr(imports, 'value', None)) or \
    48                 imports.parent is None:
     52        if (not (imports.children or getattr(imports, 'value', None)) or
     53            imports.parent is None):
    4954            p = node.prefix
    5055            node = BlankLine()
  • python/trunk/Lib/lib2to3/fixes/fix_long.py

    r2 r391  
    1111
    1212class FixLong(fixer_base.BaseFix):
    13 
     13    BM_compatible = True
    1414    PATTERN = "'long'"
    1515
  • python/trunk/Lib/lib2to3/fixes/fix_map.py

    r2 r391  
    2727
    2828class FixMap(fixer_base.ConditionalFix):
     29    BM_compatible = True
    2930
    3031    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_metaclass.py

    r2 r391  
    4949    for node in cls_node.children:
    5050        if node.type == syms.suite:
    51             # already in the prefered format, do nothing
     51            # already in the preferred format, do nothing
    5252            return
    5353
     
    7272    """ if there is a semi-colon all the parts count as part of the same
    7373        simple_stmt.  We just want the __metaclass__ part so we move
    74         everything efter the semi-colon into its own simple_stmt node
     74        everything after the semi-colon into its own simple_stmt node
    7575    """
    7676    for semi_ind, node in enumerate(stmt_node.children):
     
    144144
    145145class FixMetaclass(fixer_base.BaseFix):
     146    BM_compatible = True
    146147
    147148    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_methodattrs.py

    r2 r391  
    1414
    1515class FixMethodattrs(fixer_base.BaseFix):
     16    BM_compatible = True
    1617    PATTERN = """
    1718    power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
  • python/trunk/Lib/lib2to3/fixes/fix_next.py

    r2 r391  
    1616
    1717class FixNext(fixer_base.BaseFix):
     18    BM_compatible = True
    1819    PATTERN = """
    1920    power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
  • python/trunk/Lib/lib2to3/fixes/fix_nonzero.py

    r2 r391  
    77
    88class FixNonzero(fixer_base.BaseFix):
     9    BM_compatible = True
    910    PATTERN = """
    1011    classdef< 'class' any+ ':'
  • python/trunk/Lib/lib2to3/fixes/fix_operator.py

    r2 r391  
    1 """Fixer for operator.{isCallable,sequenceIncludes}
     1"""Fixer for operator functions.
    22
    3 operator.isCallable(obj) -> hasattr(obj, '__call__')
     3operator.isCallable(obj)       -> hasattr(obj, '__call__')
    44operator.sequenceIncludes(obj) -> operator.contains(obj)
     5operator.isSequenceType(obj)   -> isinstance(obj, collections.Sequence)
     6operator.isMappingType(obj)    -> isinstance(obj, collections.Mapping)
     7operator.isNumberType(obj)     -> isinstance(obj, numbers.Number)
     8operator.repeat(obj, n)        -> operator.mul(obj, n)
     9operator.irepeat(obj, n)       -> operator.imul(obj, n)
    510"""
    611
    712# Local imports
    8 from .. import fixer_base
    9 from ..fixer_util import Call, Name, String
     13from lib2to3 import fixer_base
     14from lib2to3.fixer_util import Call, Name, String, touch_import
     15
     16
     17def invocation(s):
     18    def dec(f):
     19        f.invocation = s
     20        return f
     21    return dec
     22
    1023
    1124class FixOperator(fixer_base.BaseFix):
     25    BM_compatible = True
     26    order = "pre"
    1227
    13     methods = "method=('isCallable'|'sequenceIncludes')"
    14     func = "'(' func=any ')'"
     28    methods = """
     29              method=('isCallable'|'sequenceIncludes'
     30                     |'isSequenceType'|'isMappingType'|'isNumberType'
     31                     |'repeat'|'irepeat')
     32              """
     33    obj = "'(' obj=any ')'"
    1534    PATTERN = """
    1635              power< module='operator'
    17                 trailer< '.' {methods} > trailer< {func} > >
     36                trailer< '.' %(methods)s > trailer< %(obj)s > >
    1837              |
    19               power< {methods} trailer< {func} > >
    20               """.format(methods=methods, func=func)
     38              power< %(methods)s trailer< %(obj)s > >
     39              """ % dict(methods=methods, obj=obj)
    2140
    2241    def transform(self, node, results):
     42        method = self._check_method(node, results)
     43        if method is not None:
     44            return method(node, results)
     45
     46    @invocation("operator.contains(%s)")
     47    def _sequenceIncludes(self, node, results):
     48        return self._handle_rename(node, results, u"contains")
     49
     50    @invocation("hasattr(%s, '__call__')")
     51    def _isCallable(self, node, results):
     52        obj = results["obj"]
     53        args = [obj.clone(), String(u", "), String(u"'__call__'")]
     54        return Call(Name(u"hasattr"), args, prefix=node.prefix)
     55
     56    @invocation("operator.mul(%s)")
     57    def _repeat(self, node, results):
     58        return self._handle_rename(node, results, u"mul")
     59
     60    @invocation("operator.imul(%s)")
     61    def _irepeat(self, node, results):
     62        return self._handle_rename(node, results, u"imul")
     63
     64    @invocation("isinstance(%s, collections.Sequence)")
     65    def _isSequenceType(self, node, results):
     66        return self._handle_type2abc(node, results, u"collections", u"Sequence")
     67
     68    @invocation("isinstance(%s, collections.Mapping)")
     69    def _isMappingType(self, node, results):
     70        return self._handle_type2abc(node, results, u"collections", u"Mapping")
     71
     72    @invocation("isinstance(%s, numbers.Number)")
     73    def _isNumberType(self, node, results):
     74        return self._handle_type2abc(node, results, u"numbers", u"Number")
     75
     76    def _handle_rename(self, node, results, name):
    2377        method = results["method"][0]
     78        method.value = name
     79        method.changed()
    2480
    25         if method.value == u"sequenceIncludes":
    26             if "module" not in results:
    27                 # operator may not be in scope, so we can't make a change.
    28                 self.warning(node, "You should use operator.contains here.")
     81    def _handle_type2abc(self, node, results, module, abc):
     82        touch_import(None, module, node)
     83        obj = results["obj"]
     84        args = [obj.clone(), String(u", " + u".".join([module, abc]))]
     85        return Call(Name(u"isinstance"), args, prefix=node.prefix)
     86
     87    def _check_method(self, node, results):
     88        method = getattr(self, "_" + results["method"][0].value.encode("ascii"))
     89        if callable(method):
     90            if "module" in results:
     91                return method
    2992            else:
    30                 method.value = u"contains"
    31                 method.changed()
    32         elif method.value == u"isCallable":
    33             if "module" not in results:
    34                 self.warning(node,
    35                              "You should use hasattr(%s, '__call__') here." %
    36                              results["func"].value)
    37             else:
    38                 func = results["func"]
    39                 args = [func.clone(), String(u", "), String(u"'__call__'")]
    40                 return Call(Name(u"hasattr"), args, prefix=node.prefix)
     93                sub = (unicode(results["obj"]),)
     94                invocation_str = unicode(method.invocation) % sub
     95                self.warning(node, u"You should use '%s' here." % invocation_str)
     96        return None
  • python/trunk/Lib/lib2to3/fixes/fix_paren.py

    r2 r391  
    1111# XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2]
    1212class FixParen(fixer_base.BaseFix):
     13    BM_compatible = True
     14
    1315    PATTERN = """
    1416        atom< ('[' | '(')
  • python/trunk/Lib/lib2to3/fixes/fix_print.py

    r2 r391  
    2828
    2929class FixPrint(fixer_base.BaseFix):
     30
     31    BM_compatible = True
    3032
    3133    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_raise.py

    r2 r391  
    55raise E, V    -> raise E(V)
    66raise E, V, T -> raise E(V).with_traceback(T)
     7raise E, None, T -> raise E.with_traceback(T)
    78
    89raise (((E, E'), E''), E'''), V -> raise E(V)
     
    3031class FixRaise(fixer_base.BaseFix):
    3132
     33    BM_compatible = True
    3234    PATTERN = """
    3335    raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
     
    3840
    3941        exc = results["exc"].clone()
    40         if exc.type is token.STRING:
    41             self.cannot_convert(node, "Python 3 does not support string exceptions")
     42        if exc.type == token.STRING:
     43            msg = "Python 3 does not support string exceptions"
     44            self.cannot_convert(node, msg)
    4245            return
    4346
     
    5356                # exc.children[1].children[0] is the first element of the tuple
    5457                exc = exc.children[1].children[0].clone()
    55             exc.prefix = " "
     58            exc.prefix = u" "
    5659
    5760        if "val" not in results:
     
    7275            tb.prefix = u""
    7376
    74             e = Call(exc, args)
     77            e = exc
     78            # If there's a traceback and None is passed as the value, then don't
     79            # add a call, since the user probably just wants to add a
     80            # traceback. See issue #9661.
     81            if val.type != token.NAME or val.value != u"None":
     82                e = Call(exc, args)
    7583            with_tb = Attr(e, Name(u'with_traceback')) + [ArgList([tb])]
    7684            new = pytree.Node(syms.simple_stmt, [Name(u"raise")] + with_tb)
  • python/trunk/Lib/lib2to3/fixes/fix_raw_input.py

    r2 r391  
    88class FixRawInput(fixer_base.BaseFix):
    99
     10    BM_compatible = True
    1011    PATTERN = """
    1112              power< name='raw_input' trailer< '(' [any] ')' > any* >
  • python/trunk/Lib/lib2to3/fixes/fix_reduce.py

    r2 r391  
    88"""
    99
    10 from .. import pytree
    11 from .. import fixer_base
    12 from ..fixer_util import Name, Attr, touch_import
     10from lib2to3 import fixer_base
     11from lib2to3.fixer_util import touch_import
    1312
    1413
    1514
    1615class FixReduce(fixer_base.BaseFix):
     16
     17    BM_compatible = True
     18    order = "pre"
    1719
    1820    PATTERN = """
  • python/trunk/Lib/lib2to3/fixes/fix_renames.py

    r2 r391  
    4141
    4242class FixRenames(fixer_base.BaseFix):
     43    BM_compatible = True
    4344    PATTERN = "|".join(build_pattern())
    4445
  • python/trunk/Lib/lib2to3/fixes/fix_repr.py

    r2 r391  
    1111class FixRepr(fixer_base.BaseFix):
    1212
     13    BM_compatible = True
    1314    PATTERN = """
    1415              atom < '`' expr=any '`' >
  • python/trunk/Lib/lib2to3/fixes/fix_set_literal.py

    r2 r391  
    1212class FixSetLiteral(fixer_base.BaseFix):
    1313
     14    BM_compatible = True
    1415    explicit = True
    1516
  • python/trunk/Lib/lib2to3/fixes/fix_standarderror.py

    r2 r391  
    1010
    1111class FixStandarderror(fixer_base.BaseFix):
    12 
     12    BM_compatible = True
    1313    PATTERN = """
    1414              'StandardError'
  • python/trunk/Lib/lib2to3/fixes/fix_sys_exc.py

    r2 r391  
    1414class FixSysExc(fixer_base.BaseFix):
    1515    # This order matches the ordering of sys.exc_info().
    16     exc_info = ["exc_type", "exc_value", "exc_traceback"]
     16    exc_info = [u"exc_type", u"exc_value", u"exc_traceback"]
     17    BM_compatible = True
    1718    PATTERN = """
    1819              power< 'sys' trailer< dot='.' attribute=(%s) > >
  • python/trunk/Lib/lib2to3/fixes/fix_throw.py

    r2 r391  
    1515
    1616class FixThrow(fixer_base.BaseFix):
    17 
     17    BM_compatible = True
    1818    PATTERN = """
    1919    power< any trailer< '.' 'throw' >
  • python/trunk/Lib/lib2to3/fixes/fix_tuple_params.py

    r2 r391  
    3030
    3131class FixTupleParams(fixer_base.BaseFix):
     32    run_order = 4 #use a lower order since lambda is part of other
     33                  #patterns
     34    BM_compatible = True
     35
    3236    PATTERN = """
    3337              funcdef< 'def' any parameters< '(' args=any ')' >
     
    5559        else:
    5660            start = 0
    57             indent = "; "
     61            indent = u"; "
    5862            end = pytree.Leaf(token.INDENT, u"")
    5963
     
    155159        d = {}
    156160    for i, obj in enumerate(param_list):
    157         trailer = [Subscript(Number(i))]
     161        trailer = [Subscript(Number(unicode(i)))]
    158162        if isinstance(obj, list):
    159163            map_to_index(obj, trailer, d=d)
  • python/trunk/Lib/lib2to3/fixes/fix_types.py

    r2 r391  
    5353
    5454class FixTypes(fixer_base.BaseFix):
    55 
     55    BM_compatible = True
    5656    PATTERN = '|'.join(_pats)
    5757
  • python/trunk/Lib/lib2to3/fixes/fix_unicode.py

    r2 r391  
    1 """Fixer that changes unicode to str, unichr to chr, and u"..." into "...".
     1r"""Fixer for unicode.
     2
     3* Changes unicode to str and unichr to chr.
     4
     5* If "...\u..." is not unicode literal change it into "...\\u...".
     6
     7* Change u"..." into "...".
    28
    39"""
    410
    5 import re
    611from ..pgen2 import token
    712from .. import fixer_base
    813
    914_mapping = {u"unichr" : u"chr", u"unicode" : u"str"}
    10 _literal_re = re.compile(ur"[uU][rR]?[\'\"]")
    1115
    1216class FixUnicode(fixer_base.BaseFix):
     17    BM_compatible = True
     18    PATTERN = "STRING | 'unicode' | 'unichr'"
    1319
    14     PATTERN = "STRING | 'unicode' | 'unichr'"
     20    def start_tree(self, tree, filename):
     21        super(FixUnicode, self).start_tree(tree, filename)
     22        self.unicode_literals = 'unicode_literals' in tree.future_features
    1523
    1624    def transform(self, node, results):
     
    2028            return new
    2129        elif node.type == token.STRING:
    22             if _literal_re.match(node.value):
    23                 new = node.clone()
    24                 new.value = new.value[1:]
    25                 return new
     30            val = node.value
     31            if not self.unicode_literals and val[0] in u'\'"' and u'\\' in val:
     32                val = ur'\\'.join([
     33                    v.replace(u'\\u', ur'\\u').replace(u'\\U', ur'\\U')
     34                    for v in val.split(ur'\\')
     35                ])
     36            if val[0] in u'uU':
     37                val = val[1:]
     38            if val == node.value:
     39                return node
     40            new = node.clone()
     41            new.value = val
     42            return new
  • python/trunk/Lib/lib2to3/fixes/fix_urllib.py

    r2 r391  
    66
    77# Local imports
    8 from .fix_imports import alternates, FixImports
    9 from .. import fixer_base
    10 from ..fixer_util import Name, Comma, FromImport, Newline, attr_chain
     8from lib2to3.fixes.fix_imports import alternates, FixImports
     9from lib2to3 import fixer_base
     10from lib2to3.fixer_util import (Name, Comma, FromImport, Newline,
     11                                find_indentation, Node, syms)
    1112
    12 MAPPING = {'urllib':  [
    13                 ('urllib.request',
    14                     ['URLOpener', 'FancyURLOpener', 'urlretrieve',
    15                      '_urlopener', 'urlopen', 'urlcleanup',
    16                      'pathname2url', 'url2pathname']),
    17                 ('urllib.parse',
    18                     ['quote', 'quote_plus', 'unquote', 'unquote_plus',
    19                      'urlencode', 'splitattr', 'splithost', 'splitnport',
    20                      'splitpasswd', 'splitport', 'splitquery', 'splittag',
    21                      'splittype', 'splituser', 'splitvalue', ]),
    22                 ('urllib.error',
    23                     ['ContentTooShortError'])],
    24            'urllib2' : [
    25                 ('urllib.request',
    26                     ['urlopen', 'install_opener', 'build_opener',
    27                      'Request', 'OpenerDirector', 'BaseHandler',
    28                      'HTTPDefaultErrorHandler', 'HTTPRedirectHandler',
    29                      'HTTPCookieProcessor', 'ProxyHandler',
    30                      'HTTPPasswordMgr',
    31                      'HTTPPasswordMgrWithDefaultRealm',
    32                      'AbstractBasicAuthHandler',
    33                      'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler',
    34                      'AbstractDigestAuthHandler',
    35                      'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler',
    36                      'HTTPHandler', 'HTTPSHandler', 'FileHandler',
    37                      'FTPHandler', 'CacheFTPHandler',
    38                      'UnknownHandler']),
    39                 ('urllib.error',
    40                     ['URLError', 'HTTPError']),
     13MAPPING = {"urllib":  [
     14                ("urllib.request",
     15                    ["URLopener", "FancyURLopener", "urlretrieve",
     16                     "_urlopener", "urlopen", "urlcleanup",
     17                     "pathname2url", "url2pathname"]),
     18                ("urllib.parse",
     19                    ["quote", "quote_plus", "unquote", "unquote_plus",
     20                     "urlencode", "splitattr", "splithost", "splitnport",
     21                     "splitpasswd", "splitport", "splitquery", "splittag",
     22                     "splittype", "splituser", "splitvalue", ]),
     23                ("urllib.error",
     24                    ["ContentTooShortError"])],
     25           "urllib2" : [
     26                ("urllib.request",
     27                    ["urlopen", "install_opener", "build_opener",
     28                     "Request", "OpenerDirector", "BaseHandler",
     29                     "HTTPDefaultErrorHandler", "HTTPRedirectHandler",
     30                     "HTTPCookieProcessor", "ProxyHandler",
     31                     "HTTPPasswordMgr",
     32                     "HTTPPasswordMgrWithDefaultRealm",
     33                     "AbstractBasicAuthHandler",
     34                     "HTTPBasicAuthHandler", "ProxyBasicAuthHandler",
     35                     "AbstractDigestAuthHandler",
     36                     "HTTPDigestAuthHandler", "ProxyDigestAuthHandler",
     37                     "HTTPHandler", "HTTPSHandler", "FileHandler",
     38                     "FTPHandler", "CacheFTPHandler",
     39                     "UnknownHandler"]),
     40                ("urllib.error",
     41                    ["URLError", "HTTPError"]),
    4142           ]
    4243}
     
    7980           replacements.
    8081        """
    81         import_mod = results.get('module')
     82        import_mod = results.get("module")
    8283        pref = import_mod.prefix
    8384
     
    9596           module.
    9697        """
    97         mod_member = results.get('mod_member')
     98        mod_member = results.get("mod_member")
    9899        pref = mod_member.prefix
    99         member = results.get('member')
     100        member = results.get("member")
    100101
    101102        # Simple case with only a single member being imported
     
    112113                mod_member.replace(Name(new_name, prefix=pref))
    113114            else:
    114                 self.cannot_convert(node,
    115                                     'This is an invalid module element')
     115                self.cannot_convert(node, "This is an invalid module element")
    116116
    117117        # Multiple members being imported
     
    120120            modules = []
    121121            mod_dict = {}
    122             members = results.get('members')
     122            members = results["members"]
    123123            for member in members:
    124                 member = member.value
    125124                # we only care about the actual members
    126                 if member != ',':
     125                if member.type == syms.import_as_name:
     126                    as_name = member.children[2].value
     127                    member_name = member.children[0].value
     128                else:
     129                    member_name = member.value
     130                    as_name = None
     131                if member_name != u",":
    127132                    for change in MAPPING[mod_member.value]:
    128                         if member in change[1]:
    129                             if change[0] in mod_dict:
    130                                 mod_dict[change[0]].append(member)
    131                             else:
    132                                 mod_dict[change[0]] = [member]
     133                        if member_name in change[1]:
     134                            if change[0] not in mod_dict:
    133135                                modules.append(change[0])
     136                            mod_dict.setdefault(change[0], []).append(member)
    134137
    135138            new_nodes = []
     139            indentation = find_indentation(node)
     140            first = True
     141            def handle_name(name, prefix):
     142                if name.type == syms.import_as_name:
     143                    kids = [Name(name.children[0].value, prefix=prefix),
     144                            name.children[1].clone(),
     145                            name.children[2].clone()]
     146                    return [Node(syms.import_as_name, kids)]
     147                return [Name(name.value, prefix=prefix)]
    136148            for module in modules:
    137149                elts = mod_dict[module]
    138150                names = []
    139151                for elt in elts[:-1]:
    140                     names.extend([Name(elt, prefix=pref), Comma()])
    141                 names.append(Name(elts[-1], prefix=pref))
    142                 new_nodes.append(FromImport(module, names))
     152                    names.extend(handle_name(elt, pref))
     153                    names.append(Comma())
     154                names.extend(handle_name(elts[-1], pref))
     155                new = FromImport(module, names)
     156                if not first or node.parent.prefix.endswith(indentation):
     157                    new.prefix = indentation
     158                new_nodes.append(new)
     159                first = False
    143160            if new_nodes:
    144161                nodes = []
     
    148165                node.replace(nodes)
    149166            else:
    150                 self.cannot_convert(node, 'All module elements are invalid')
     167                self.cannot_convert(node, "All module elements are invalid")
    151168
    152169    def transform_dot(self, node, results):
    153170        """Transform for calls to module members in code."""
    154         module_dot = results.get('bare_with_attr')
    155         member = results.get('member')
     171        module_dot = results.get("bare_with_attr")
     172        member = results.get("member")
    156173        new_name = None
    157174        if isinstance(member, list):
     
    165182                                    prefix=module_dot.prefix))
    166183        else:
    167             self.cannot_convert(node, 'This is an invalid module element')
     184            self.cannot_convert(node, "This is an invalid module element")
    168185
    169186    def transform(self, node, results):
    170         if results.get('module'):
     187        if results.get("module"):
    171188            self.transform_import(node, results)
    172         elif results.get('mod_member'):
     189        elif results.get("mod_member"):
    173190            self.transform_member(node, results)
    174         elif results.get('bare_with_attr'):
     191        elif results.get("bare_with_attr"):
    175192            self.transform_dot(node, results)
    176193        # Renaming and star imports are not supported for these modules.
    177         elif results.get('module_star'):
    178             self.cannot_convert(node, 'Cannot handle star imports.')
    179         elif results.get('module_as'):
    180             self.cannot_convert(node, 'This module is now multiple modules')
     194        elif results.get("module_star"):
     195            self.cannot_convert(node, "Cannot handle star imports.")
     196        elif results.get("module_as"):
     197            self.cannot_convert(node, "This module is now multiple modules")
  • python/trunk/Lib/lib2to3/fixes/fix_xrange.py

    r2 r391  
    1111
    1212class FixXrange(fixer_base.BaseFix):
    13 
     13    BM_compatible = True
    1414    PATTERN = """
    1515              power<
     
    1717              rest=any* >
    1818              """
     19
     20    def start_tree(self, tree, filename):
     21        super(FixXrange, self).start_tree(tree, filename)
     22        self.transformed_xranges = set()
     23
     24    def finish_tree(self, tree, filename):
     25        self.transformed_xranges = None
    1926
    2027    def transform(self, node, results):
     
    3037        name = results["name"]
    3138        name.replace(Name(u"range", prefix=name.prefix))
     39        # This prevents the new range call from being wrapped in a list later.
     40        self.transformed_xranges.add(id(node))
    3241
    3342    def transform_range(self, node, results):
    34         if not self.in_special_context(node):
     43        if (id(node) not in self.transformed_xranges and
     44            not self.in_special_context(node)):
    3545            range_call = Call(Name(u"range"), [results["args"].clone()])
    3646            # Encase the range call in list().
  • python/trunk/Lib/lib2to3/fixes/fix_xreadlines.py

    r2 r391  
    1010
    1111class FixXreadlines(fixer_base.BaseFix):
     12    BM_compatible = True
    1213    PATTERN = """
    1314    power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
  • python/trunk/Lib/lib2to3/fixes/fix_zip.py

    r2 r391  
    1414class FixZip(fixer_base.ConditionalFix):
    1515
     16    BM_compatible = True
    1617    PATTERN = """
    1718    power< 'zip' args=trailer< '(' [any] ')' >
  • python/trunk/Lib/lib2to3/main.py

    r2 r391  
    22Main program for 2to3.
    33"""
     4
     5from __future__ import with_statement
    46
    57import sys
     
    2426class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
    2527    """
     28    A refactoring tool that can avoid overwriting its input files.
    2629    Prints output to stdout.
     30
     31    Output files can optionally be written to a different directory and or
     32    have an extra file suffix appended to their name for use in situations
     33    where you do not want to replace the input files.
    2734    """
    2835
    29     def __init__(self, fixers, options, explicit, nobackups, show_diffs):
     36    def __init__(self, fixers, options, explicit, nobackups, show_diffs,
     37                 input_base_dir='', output_dir='', append_suffix=''):
     38        """
     39        Args:
     40            fixers: A list of fixers to import.
     41            options: A dict with RefactoringTool configuration.
     42            explicit: A list of fixers to run even if they are explicit.
     43            nobackups: If true no backup '.bak' files will be created for those
     44                files that are being refactored.
     45            show_diffs: Should diffs of the refactoring be printed to stdout?
     46            input_base_dir: The base directory for all input files.  This class
     47                will strip this path prefix off of filenames before substituting
     48                it with output_dir.  Only meaningful if output_dir is supplied.
     49                All files processed by refactor() must start with this path.
     50            output_dir: If supplied, all converted files will be written into
     51                this directory tree instead of input_base_dir.
     52            append_suffix: If supplied, all files output by this tool will have
     53                this appended to their filename.  Useful for changing .py to
     54                .py3 for example by passing append_suffix='3'.
     55        """
    3056        self.nobackups = nobackups
    3157        self.show_diffs = show_diffs
     58        if input_base_dir and not input_base_dir.endswith(os.sep):
     59            input_base_dir += os.sep
     60        self._input_base_dir = input_base_dir
     61        self._output_dir = output_dir
     62        self._append_suffix = append_suffix
    3263        super(StdoutRefactoringTool, self).__init__(fixers, options, explicit)
    3364
     
    3768
    3869    def write_file(self, new_text, filename, old_text, encoding):
     70        orig_filename = filename
     71        if self._output_dir:
     72            if filename.startswith(self._input_base_dir):
     73                filename = os.path.join(self._output_dir,
     74                                        filename[len(self._input_base_dir):])
     75            else:
     76                raise ValueError('filename %s does not start with the '
     77                                 'input_base_dir %s' % (
     78                                         filename, self._input_base_dir))
     79        if self._append_suffix:
     80            filename += self._append_suffix
     81        if orig_filename != filename:
     82            output_dir = os.path.dirname(filename)
     83            if not os.path.isdir(output_dir):
     84                os.makedirs(output_dir)
     85            self.log_message('Writing converted %s to %s.', orig_filename,
     86                             filename)
    3987        if not self.nobackups:
    4088            # Make backup
     
    54102        if not self.nobackups:
    55103            shutil.copymode(backup, filename)
     104        if orig_filename != filename:
     105            # Preserve the file mode in the new output directory.
     106            shutil.copymode(orig_filename, filename)
    56107
    57108    def print_output(self, old, new, filename, equal):
     
    63114                diff_lines = diff_texts(old, new, filename)
    64115                try:
    65                     for line in diff_lines:
    66                         print line
     116                    if self.output_lock is not None:
     117                        with self.output_lock:
     118                            for line in diff_lines:
     119                                print line
     120                            sys.stdout.flush()
     121                    else:
     122                        for line in diff_lines:
     123                            print line
    67124                except UnicodeEncodeError:
    68125                    warn("couldn't encode %s's diff for your terminal" %
     
    94151                      type="int", help="Run 2to3 concurrently")
    95152    parser.add_option("-x", "--nofix", action="append", default=[],
    96                       help="Prevent a fixer from being run.")
     153                      help="Prevent a transformation from being run")
    97154    parser.add_option("-l", "--list-fixes", action="store_true",
    98                       help="List available transformations (fixes/fix_*.py)")
     155                      help="List available transformations")
    99156    parser.add_option("-p", "--print-function", action="store_true",
    100157                      help="Modify the grammar so that print() is a function")
     
    106163                      help="Write back modified files")
    107164    parser.add_option("-n", "--nobackups", action="store_true", default=False,
    108                       help="Don't write backups for modified files.")
     165                      help="Don't write backups for modified files")
     166    parser.add_option("-o", "--output-dir", action="store", type="str",
     167                      default="", help="Put output files in this directory "
     168                      "instead of overwriting the input files.  Requires -n.")
     169    parser.add_option("-W", "--write-unchanged-files", action="store_true",
     170                      help="Also write files even if no changes were required"
     171                      " (useful with --output-dir); implies -w.")
     172    parser.add_option("--add-suffix", action="store", type="str", default="",
     173                      help="Append this string to all output filenames."
     174                      " Requires -n if non-empty.  "
     175                      "ex: --add-suffix='3' will generate .py3 files.")
    109176
    110177    # Parse command line arguments
     
    112179    flags = {}
    113180    options, args = parser.parse_args(args)
     181    if options.write_unchanged_files:
     182        flags["write_unchanged_files"] = True
     183        if not options.write:
     184            warn("--write-unchanged-files/-W implies -w.")
     185        options.write = True
     186    # If we allowed these, the original files would be renamed to backup names
     187    # but not replaced.
     188    if options.output_dir and not options.nobackups:
     189        parser.error("Can't use --output-dir/-o without -n.")
     190    if options.add_suffix and not options.nobackups:
     191        parser.error("Can't use --add-suffix without -n.")
     192
    114193    if not options.write and options.no_diffs:
    115194        warn("not writing files and not printing diffs; that's not very useful")
     
    137216    level = logging.DEBUG if options.verbose else logging.INFO
    138217    logging.basicConfig(format='%(name)s: %(message)s', level=level)
     218    logger = logging.getLogger('lib2to3.main')
    139219
    140220    # Initialize the refactoring tool
     
    153233        requested = avail_fixes.union(explicit)
    154234    fixer_names = requested.difference(unwanted_fixes)
    155     rt = StdoutRefactoringTool(sorted(fixer_names), flags, sorted(explicit),
    156                                options.nobackups, not options.no_diffs)
     235    input_base_dir = os.path.commonprefix(args)
     236    if (input_base_dir and not input_base_dir.endswith(os.sep)
     237        and not os.path.isdir(input_base_dir)):
     238        # One or more similar names were passed, their directory is the base.
     239        # os.path.commonprefix() is ignorant of path elements, this corrects
     240        # for that weird API.
     241        input_base_dir = os.path.dirname(input_base_dir)
     242    if options.output_dir:
     243        input_base_dir = input_base_dir.rstrip(os.sep)
     244        logger.info('Output in %r will mirror the input directory %r layout.',
     245                    options.output_dir, input_base_dir)
     246    rt = StdoutRefactoringTool(
     247            sorted(fixer_names), flags, sorted(explicit),
     248            options.nobackups, not options.no_diffs,
     249            input_base_dir=input_base_dir,
     250            output_dir=options.output_dir,
     251            append_suffix=options.add_suffix)
    157252
    158253    # Refactor all files and directories passed as arguments
  • python/trunk/Lib/lib2to3/patcomp.py

    r2 r391  
    1313# Python imports
    1414import os
     15import StringIO
    1516
    1617# Fairly local imports
     
    3334    """Tokenizes a string suppressing significant whitespace."""
    3435    skip = set((token.NEWLINE, token.INDENT, token.DEDENT))
    35     tokens = tokenize.generate_tokens(driver.generate_lines(input).next)
     36    tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline)
    3637    for quintuple in tokens:
    3738        type, value, start, end, line_text = quintuple
     
    5354        self.driver = driver.Driver(self.grammar, convert=pattern_convert)
    5455
    55     def compile_pattern(self, input, debug=False):
     56    def compile_pattern(self, input, debug=False, with_tree=False):
    5657        """Compiles a pattern string to a nested pytree.*Pattern object."""
    5758        tokens = tokenize_wrapper(input)
     
    6061        except parse.ParseError as e:
    6162            raise PatternSyntaxError(str(e))
    62         return self.compile_node(root)
     63        if with_tree:
     64            return self.compile_node(root), root
     65        else:
     66            return self.compile_node(root)
    6367
    6468    def compile_node(self, node):
  • python/trunk/Lib/lib2to3/pgen2/conv.py

    r2 r391  
    5252
    5353    def parse_graminit_h(self, filename):
    54         """Parse the .h file writen by pgen.  (Internal)
     54        """Parse the .h file written by pgen.  (Internal)
    5555
    5656        This file is a sequence of #define statements defining the
     
    8383
    8484    def parse_graminit_c(self, filename):
    85         """Parse the .c file writen by pgen.  (Internal)
     85        """Parse the .c file written by pgen.  (Internal)
    8686
    8787        The file looks as follows.  The first two lines are always this:
  • python/trunk/Lib/lib2to3/pgen2/driver.py

    r2 r391  
    2020import os
    2121import logging
     22import StringIO
    2223import sys
    2324
     
    102103    def parse_string(self, text, debug=False):
    103104        """Parse a string and return the syntax tree."""
    104         tokens = tokenize.generate_tokens(generate_lines(text).next)
     105        tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
    105106        return self.parse_tokens(tokens, debug)
    106 
    107 
    108 def generate_lines(text):
    109     """Generator that behaves like readline without using StringIO."""
    110     for line in text.splitlines(True):
    111         yield line
    112     while True:
    113         yield ""
    114107
    115108
     
    146139        return True
    147140    return os.path.getmtime(a) >= os.path.getmtime(b)
     141
     142
     143def main(*args):
     144    """Main program, when run as a script: produce grammar pickle files.
     145
     146    Calls load_grammar for each argument, a path to a grammar text file.
     147    """
     148    if not args:
     149        args = sys.argv[1:]
     150    logging.basicConfig(level=logging.INFO, stream=sys.stdout,
     151                        format='%(message)s')
     152    for gt in args:
     153        load_grammar(gt, save=True, force=True)
     154    return True
     155
     156if __name__ == "__main__":
     157    sys.exit(int(not main()))
  • python/trunk/Lib/lib2to3/pgen2/grammar.py

    r2 r391  
    2121
    2222class Grammar(object):
    23     """Pgen parsing tables tables conversion class.
     23    """Pgen parsing tables conversion class.
    2424
    2525    Once initialized, this class supplies the grammar tables for the
     
    4646
    4747    states        -- a list of DFAs, where each DFA is a list of
    48                      states, each state is is a list of arcs, and each
     48                     states, each state is a list of arcs, and each
    4949                     arc is a (i, j) pair where i is a label and j is
    5050                     a state number.  The DFA number is the index into
  • python/trunk/Lib/lib2to3/pgen2/tokenize.py

    r2 r391  
    3838           "generate_tokens", "untokenize"]
    3939del token
     40
     41try:
     42    bytes
     43except NameError:
     44    # Support bytes type in Python <= 2.5, so 2to3 turns itself into
     45    # valid Python 3 code.
     46    bytes = str
    4047
    4148def group(*choices): return '(' + '|'.join(choices) + ')'
     
    230237            toks_append(tokval)
    231238
    232 cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
     239cookie_re = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)')
    233240
    234241def _get_normal_name(orig_enc):
     
    254261
    255262    It detects the encoding from the presence of a utf-8 bom or an encoding
    256     cookie as specified in pep-0263. If both a bom and a cookie are present,
    257     but disagree, a SyntaxError will be raised. If the encoding cookie is an
    258     invalid charset, raise a SyntaxError.
     263    cookie as specified in pep-0263. If both a bom and a cookie are present, but
     264    disagree, a SyntaxError will be raised. If the encoding cookie is an invalid
     265    charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
     266    'utf-8-sig' is returned.
    259267
    260268    If no encoding is specified, then the default of 'utf-8' will be returned.
     
    262270    bom_found = False
    263271    encoding = None
     272    default = 'utf-8'
    264273    def read_or_stop():
    265274        try:
    266275            return readline()
    267276        except StopIteration:
    268             return b''
     277            return bytes()
    269278
    270279    def find_cookie(line):
     
    273282        except UnicodeDecodeError:
    274283            return None
    275 
    276         matches = cookie_re.findall(line_string)
    277         if not matches:
     284        match = cookie_re.match(line_string)
     285        if not match:
    278286            return None
    279         encoding = _get_normal_name(matches[0])
     287        encoding = _get_normal_name(match.group(1))
    280288        try:
    281289            codec = lookup(encoding)
     
    288296                # This behaviour mimics the Python interpreter
    289297                raise SyntaxError('encoding problem: utf-8')
    290             else:
    291                 # Allow it to be properly encoded and decoded.
    292                 encoding = 'utf-8-sig'
     298            encoding += '-sig'
    293299        return encoding
    294300
     
    297303        bom_found = True
    298304        first = first[3:]
     305        default = 'utf-8-sig'
    299306    if not first:
    300         return 'utf-8', []
     307        return default, []
    301308
    302309    encoding = find_cookie(first)
     
    306313    second = read_or_stop()
    307314    if not second:
    308         return 'utf-8', [first]
     315        return default, [first]
    309316
    310317    encoding = find_cookie(second)
     
    312319        return encoding, [first, second]
    313320
    314     return 'utf-8', [first, second]
     321    return default, [first, second]
    315322
    316323def untokenize(iterable):
  • python/trunk/Lib/lib2to3/pygram.py

    r2 r391  
    1414# The grammar file
    1515_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt")
     16_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__),
     17                                     "PatternGrammar.txt")
    1618
    1719
     
    3436python_grammar_no_print_statement = python_grammar.copy()
    3537del python_grammar_no_print_statement.keywords["print"]
     38
     39pattern_grammar = driver.load_grammar(_PATTERN_GRAMMAR_FILE)
     40pattern_symbols = Symbols(pattern_grammar)
  • python/trunk/Lib/lib2to3/pytree.py

    r2 r391  
    1616import warnings
    1717from StringIO import StringIO
    18 
    1918
    2019HUGE = 0x7FFFFFFF  # maximum repeat count, default max
     
    3130    return _type_reprs.setdefault(type_num, type_num)
    3231
    33 
    3432class Base(object):
    3533
     
    4846    children = ()  # Tuple of subnodes
    4947    was_changed = False
     48    was_checked = False
    5049
    5150    def __new__(cls, *args, **kwds):
     
    214213                return self.parent.children[i-1]
    215214
     215    def leaves(self):
     216        for child in self.children:
     217            for x in child.leaves():
     218                yield x
     219
     220    def depth(self):
     221        if self.parent is None:
     222            return 0
     223        return 1 + self.parent.depth()
     224
    216225    def get_suffix(self):
    217226        """
     
    228237            return unicode(self).encode("ascii")
    229238
    230 
    231239class Node(Base):
    232240
    233241    """Concrete implementation for interior nodes."""
    234242
    235     def __init__(self, type, children, context=None, prefix=None):
     243    def __init__(self,type, children,
     244                 context=None,
     245                 prefix=None,
     246                 fixers_applied=None):
    236247        """
    237248        Initializer.
     
    250261        if prefix is not None:
    251262            self.prefix = prefix
     263        if fixers_applied:
     264            self.fixers_applied = fixers_applied[:]
     265        else:
     266            self.fixers_applied = None
    252267
    253268    def __repr__(self):
     
    274289    def clone(self):
    275290        """Return a cloned (deep) copy of self."""
    276         return Node(self.type, [ch.clone() for ch in self.children])
     291        return Node(self.type, [ch.clone() for ch in self.children],
     292                    fixers_applied=self.fixers_applied)
    277293
    278294    def post_order(self):
     
    287303        yield self
    288304        for child in self.children:
    289             for node in child.post_order():
     305            for node in child.pre_order():
    290306                yield node
    291307
    292     @property
    293     def prefix(self):
     308    def _prefix_getter(self):
    294309        """
    295310        The whitespace and comments preceding this node in the input.
     
    299314        return self.children[0].prefix
    300315
    301     @prefix.setter
    302     def prefix(self, prefix):
     316    def _prefix_setter(self, prefix):
    303317        if self.children:
    304318            self.children[0].prefix = prefix
     319
     320    prefix = property(_prefix_getter, _prefix_setter)
    305321
    306322    def set_child(self, i, child):
     
    342358    column = 0    # Column where this token tarts in the input
    343359
    344     def __init__(self, type, value, context=None, prefix=None):
     360    def __init__(self, type, value,
     361                 context=None,
     362                 prefix=None,
     363                 fixers_applied=[]):
    345364        """
    346365        Initializer.
     
    356375        if prefix is not None:
    357376            self._prefix = prefix
     377        self.fixers_applied = fixers_applied[:]
    358378
    359379    def __repr__(self):
     
    381401        """Return a cloned (deep) copy of self."""
    382402        return Leaf(self.type, self.value,
    383                     (self.prefix, (self.lineno, self.column)))
     403                    (self.prefix, (self.lineno, self.column)),
     404                    fixers_applied=self.fixers_applied)
     405
     406    def leaves(self):
     407        yield self
    384408
    385409    def post_order(self):
     
    391415        yield self
    392416
    393     @property
    394     def prefix(self):
     417    def _prefix_getter(self):
    395418        """
    396419        The whitespace and comments preceding this token in the input.
     
    398421        return self._prefix
    399422
    400     @prefix.setter
    401     def prefix(self, prefix):
     423    def _prefix_setter(self, prefix):
    402424        self.changed()
    403425        self._prefix = prefix
    404426
     427    prefix = property(_prefix_getter, _prefix_setter)
    405428
    406429def convert(gr, raw_node):
     
    636659                     if absent, matches one node;
    637660                     if present, each subsequence is an alternative [*]
    638             min: optinal minumum number of times to match, default 0
    639             max: optional maximum number of times tro match, default HUGE
     661            min: optional minimum number of times to match, default 0
     662            max: optional maximum number of times to match, default HUGE
    640663            name: optional name assigned to this match
    641664
     
    721744            # The reason for this is that hitting the recursion limit usually
    722745            # results in some ugly messages about how RuntimeErrors are being
    723             # ignored.
    724             save_stderr = sys.stderr
    725             sys.stderr = StringIO()
     746            # ignored. We don't do this on non-CPython implementation because
     747            # they don't have this problem.
     748            if hasattr(sys, "getrefcount"):
     749                save_stderr = sys.stderr
     750                sys.stderr = StringIO()
    726751            try:
    727752                for count, r in self._recursive_matches(nodes, 0):
     
    737762                    yield count, r
    738763            finally:
    739                 sys.stderr = save_stderr
     764                if hasattr(sys, "getrefcount"):
     765                    sys.stderr = save_stderr
    740766
    741767    def _iterative_matches(self, nodes):
  • python/trunk/Lib/lib2to3/refactor.py

    r2 r391  
    88provides infrastructure to write your own refactoring tool.
    99"""
     10
     11from __future__ import with_statement
    1012
    1113__author__ = "Guido van Rossum <guido@python.org>"
     
    2325# Local imports
    2426from .pgen2 import driver, tokenize, token
     27from .fixer_util import find_root
    2528from . import pytree, pygram
     29from . import btm_utils as bu
     30from . import btm_matcher as bm
    2631
    2732
     
    123128
    124129
    125 def _detect_future_print(source):
     130def _detect_future_features(source):
    126131    have_docstring = False
    127132    gen = tokenize.generate_tokens(StringIO.StringIO(source).readline)
    128133    def advance():
    129         tok = next(gen)
     134        tok = gen.next()
    130135        return tok[0], tok[1]
    131136    ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT))
     137    features = set()
    132138    try:
    133139        while True:
     
    141147            elif tp == token.NAME and value == u"from":
    142148                tp, value = advance()
    143                 if tp != token.NAME and value != u"__future__":
     149                if tp != token.NAME or value != u"__future__":
    144150                    break
    145151                tp, value = advance()
    146                 if tp != token.NAME and value != u"import":
     152                if tp != token.NAME or value != u"import":
    147153                    break
    148154                tp, value = advance()
     
    150156                    tp, value = advance()
    151157                while tp == token.NAME:
    152                     if value == u"print_function":
    153                         return True
     158                    features.add(value)
    154159                    tp, value = advance()
    155                     if tp != token.OP and value != u",":
     160                    if tp != token.OP or value != u",":
    156161                        break
    157162                    tp, value = advance()
     
    160165    except StopIteration:
    161166        pass
    162     return False
     167    return frozenset(features)
    163168
    164169
     
    169174class RefactoringTool(object):
    170175
    171     _default_options = {"print_function" : False}
     176    _default_options = {"print_function" : False,
     177                        "write_unchanged_files" : False}
    172178
    173179    CLASS_PREFIX = "Fix" # The prefix for fixer classes
     
    191197        else:
    192198            self.grammar = pygram.python_grammar
     199        # When this is True, the refactor*() methods will call write_file() for
     200        # files processed even if they were not changed during refactoring. If
     201        # and only if the refactor method's write parameter was True.
     202        self.write_unchanged_files = self.options.get("write_unchanged_files")
    193203        self.errors = []
    194204        self.logger = logging.getLogger("RefactoringTool")
     
    200210        self.pre_order, self.post_order = self.get_fixers()
    201211
    202         self.pre_order_heads = _get_headnode_dict(self.pre_order)
    203         self.post_order_heads = _get_headnode_dict(self.post_order)
    204212
    205213        self.files = []  # List of files that were or should be modified
     214
     215        self.BM = bm.BottomMatcher()
     216        self.bmi_pre_order = [] # Bottom Matcher incompatible fixers
     217        self.bmi_post_order = []
     218
     219        for fixer in chain(self.post_order, self.pre_order):
     220            if fixer.BM_compatible:
     221                self.BM.add_fixer(fixer)
     222                # remove fixers that will be handled by the bottom-up
     223                # matcher
     224            elif fixer in self.pre_order:
     225                self.bmi_pre_order.append(fixer)
     226            elif fixer in self.post_order:
     227                self.bmi_post_order.append(fixer)
     228
     229        self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order)
     230        self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order)
     231
     232
    206233
    207234    def get_fixers(self):
     
    267294    def refactor(self, items, write=False, doctests_only=False):
    268295        """Refactor a list of files and directories."""
     296
    269297        for dir_or_file in items:
    270298            if os.path.isdir(dir_or_file):
     
    280308        Files and subdirectories starting with '.' are skipped.
    281309        """
     310        py_ext = os.extsep + "py"
    282311        for dirpath, dirnames, filenames in os.walk(dir_name):
    283312            self.log_debug("Descending into %s", dirpath)
     
    285314            filenames.sort()
    286315            for name in filenames:
    287                 if not name.startswith(".") and \
    288                         os.path.splitext(name)[1].endswith("py"):
     316                if (not name.startswith(".") and
     317                    os.path.splitext(name)[1] == py_ext):
    289318                    fullname = os.path.join(dirpath, name)
    290319                    self.refactor_file(fullname, write, doctests_only)
     
    298327        try:
    299328            f = open(filename, "rb")
    300         except IOError, err:
     329        except IOError as err:
    301330            self.log_error("Can't open %s: %s", filename, err)
    302331            return None, None
     
    318347            self.log_debug("Refactoring doctests in %s", filename)
    319348            output = self.refactor_docstring(input, filename)
    320             if output != input:
     349            if self.write_unchanged_files or output != input:
    321350                self.processed_file(output, filename, input, write, encoding)
    322351            else:
     
    324353        else:
    325354            tree = self.refactor_string(input, filename)
    326             if tree and tree.was_changed:
     355            if self.write_unchanged_files or (tree and tree.was_changed):
    327356                # The [:-1] is to take off the \n we added earlier
    328357                self.processed_file(unicode(tree)[:-1], filename,
     
    342371            there were errors during the parse.
    343372        """
    344         if _detect_future_print(data):
     373        features = _detect_future_features(data)
     374        if "print_function" in features:
    345375            self.driver.grammar = pygram.python_grammar_no_print_statement
    346376        try:
    347377            tree = self.driver.parse_string(data)
    348         except Exception, err:
     378        except Exception as err:
    349379            self.log_error("Can't parse %s: %s: %s",
    350380                           name, err.__class__.__name__, err)
     
    352382        finally:
    353383            self.driver.grammar = self.grammar
     384        tree.future_features = features
    354385        self.log_debug("Refactoring %s", name)
    355386        self.refactor_tree(tree, name)
     
    361392            self.log_debug("Refactoring doctests in stdin")
    362393            output = self.refactor_docstring(input, "<stdin>")
    363             if output != input:
     394            if self.write_unchanged_files or output != input:
    364395                self.processed_file(output, "<stdin>", input)
    365396            else:
     
    367398        else:
    368399            tree = self.refactor_string(input, "<stdin>")
    369             if tree and tree.was_changed:
     400            if self.write_unchanged_files or (tree and tree.was_changed):
    370401                self.processed_file(unicode(tree), "<stdin>", input)
    371402            else:
     
    374405    def refactor_tree(self, tree, name):
    375406        """Refactors a parse tree (modifying the tree in place).
     407
     408        For compatible patterns the bottom matcher module is
     409        used. Otherwise the tree is traversed node-to-node for
     410        matches.
    376411
    377412        Args:
     
    383418            True if the tree was modified, False otherwise.
    384419        """
     420
    385421        for fixer in chain(self.pre_order, self.post_order):
    386422            fixer.start_tree(tree, name)
    387423
    388         self.traverse_by(self.pre_order_heads, tree.pre_order())
    389         self.traverse_by(self.post_order_heads, tree.post_order())
     424        #use traditional matching for the incompatible fixers
     425        self.traverse_by(self.bmi_pre_order_heads, tree.pre_order())
     426        self.traverse_by(self.bmi_post_order_heads, tree.post_order())
     427
     428        # obtain a set of candidate nodes
     429        match_set = self.BM.run(tree.leaves())
     430
     431        while any(match_set.values()):
     432            for fixer in self.BM.fixers:
     433                if fixer in match_set and match_set[fixer]:
     434                    #sort by depth; apply fixers from bottom(of the AST) to top
     435                    match_set[fixer].sort(key=pytree.Base.depth, reverse=True)
     436
     437                    if fixer.keep_line_order:
     438                        #some fixers(eg fix_imports) must be applied
     439                        #with the original file's line order
     440                        match_set[fixer].sort(key=pytree.Base.get_lineno)
     441
     442                    for node in list(match_set[fixer]):
     443                        if node in match_set[fixer]:
     444                            match_set[fixer].remove(node)
     445
     446                        try:
     447                            find_root(node)
     448                        except ValueError:
     449                            # this node has been cut off from a
     450                            # previous transformation ; skip
     451                            continue
     452
     453                        if node.fixers_applied and fixer in node.fixers_applied:
     454                            # do not apply the same fixer again
     455                            continue
     456
     457                        results = fixer.match(node)
     458
     459                        if results:
     460                            new = fixer.transform(node, results)
     461                            if new is not None:
     462                                node.replace(new)
     463                                #new.fixers_applied.append(fixer)
     464                                for node in new.post_order():
     465                                    # do not apply the fixer again to
     466                                    # this or any subnode
     467                                    if not node.fixers_applied:
     468                                        node.fixers_applied = []
     469                                    node.fixers_applied.append(fixer)
     470
     471                                # update the original match set for
     472                                # the added code
     473                                new_matches = self.BM.run(new.leaves())
     474                                for fxr in new_matches:
     475                                    if not fxr in match_set:
     476                                        match_set[fxr]=[]
     477
     478                                    match_set[fxr].extend(new_matches[fxr])
    390479
    391480        for fixer in chain(self.pre_order, self.post_order):
     
    419508                       encoding=None):
    420509        """
    421         Called when a file has been refactored, and there are changes.
     510        Called when a file has been refactored and there may be changes.
    422511        """
    423512        self.files.append(filename)
     
    430519        if equal:
    431520            self.log_debug("No changes to %s", filename)
    432             return
     521            if not self.write_unchanged_files:
     522                return
    433523        if write:
    434524            self.write_file(new_text, filename, old_text, encoding)
     
    445535        try:
    446536            f = _open_with_encoding(filename, "w", encoding=encoding)
    447         except os.error, err:
     537        except os.error as err:
    448538            self.log_error("Can't create %s: %s", filename, err)
    449539            return
    450540        try:
    451541            f.write(_to_system_newlines(new_text))
    452         except os.error, err:
     542        except os.error as err:
    453543            self.log_error("Can't write %s: %s", filename, err)
    454544        finally:
     
    513603        try:
    514604            tree = self.parse_block(block, lineno, indent)
    515         except Exception, err:
    516             if self.log.isEnabledFor(logging.DEBUG):
     605        except Exception as err:
     606            if self.logger.isEnabledFor(logging.DEBUG):
    517607                for line in block:
    518608                    self.log_debug("Source: %s", line.rstrip(u"\n"))
     
    561651        in the parser diagnostics and embedded into the parse tree.
    562652        """
    563         return self.driver.parse_tokens(self.wrap_toks(block, lineno, indent))
     653        tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent))
     654        tree.future_features = frozenset()
     655        return tree
    564656
    565657    def wrap_toks(self, block, lineno, indent):
     
    606698        super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
    607699        self.queue = None
     700        self.output_lock = None
    608701
    609702    def refactor(self, items, write=False, doctests_only=False,
     
    619712            raise RuntimeError("already doing multiple processes")
    620713        self.queue = multiprocessing.JoinableQueue()
     714        self.output_lock = multiprocessing.Lock()
    621715        processes = [multiprocessing.Process(target=self._child)
    622716                     for i in xrange(num_processes)]
  • python/trunk/Lib/lib2to3/tests/data/bom.py

    r2 r391  
    11# coding: utf-8
    22print "BOM BOOM!"
    3 
  • python/trunk/Lib/lib2to3/tests/data/py2_test_grammar.py

    r2 r391  
    317317        x = 1; pass; del x
    318318        def foo():
    319             # verify statments that end with semi-colons
     319            # verify statements that end with semi-colons
    320320            x = 1; pass; del x;
    321321        foo()
  • python/trunk/Lib/lib2to3/tests/data/py3_test_grammar.py

    r2 r391  
    357357        x = 1; pass; del x
    358358        def foo():
    359             # verify statments that end with semi-colons
     359            # verify statements that end with semi-colons
    360360            x = 1; pass; del x;
    361361        foo()
  • python/trunk/Lib/lib2to3/tests/test_fixers.py

    r2 r391  
    869869        self.check(b, a)
    870870
     871    def test_None_value(self):
     872        b = """raise Exception(5), None, tb"""
     873        a = """raise Exception(5).with_traceback(tb)"""
     874        self.check(b, a)
     875
    871876    def test_tuple_value(self):
    872877        b = """raise Exception, (5, 6, 7)"""
     
    14011406        self.check(b, a)
    14021407
    1403     def test_14(self):
     1408    def test_28(self):
    14041409        b = "[i for i in d.viewkeys()]"
    14051410        a = "[i for i in d.keys()]"
    14061411        self.check(b, a)
    14071412
    1408     def test_15(self):
     1413    def test_29(self):
    14091414        b = "(i for i in d.viewkeys())"
    14101415        a = "(i for i in d.keys())"
    14111416        self.check(b, a)
    14121417
    1413     def test_17(self):
     1418    def test_30(self):
    14141419        b = "iter(d.viewkeys())"
    14151420        a = "iter(d.keys())"
    14161421        self.check(b, a)
    14171422
    1418     def test_18(self):
     1423    def test_31(self):
    14191424        b = "list(d.viewkeys())"
    14201425        a = "list(d.keys())"
    14211426        self.check(b, a)
    14221427
    1423     def test_19(self):
     1428    def test_32(self):
    14241429        b = "sorted(d.viewkeys())"
    14251430        a = "sorted(d.keys())"
     
    14971502        for call in fixer_util.consuming_calls:
    14981503            self.unchanged("a = %s(range(10))" % call)
     1504
     1505class Test_xrange_with_reduce(FixerTestCase):
     1506
     1507    def setUp(self):
     1508        super(Test_xrange_with_reduce, self).setUp(["xrange", "reduce"])
     1509
     1510    def test_double_transform(self):
     1511        b = """reduce(x, xrange(5))"""
     1512        a = """from functools import reduce
     1513reduce(x, range(5))"""
     1514        self.check(b, a)
    14991515
    15001516class Test_raw_input(FixerTestCase):
     
    18021818                    a = "from %s import %s as foo_bar" % (new, member)
    18031819                    self.check(b, a)
     1820                    b = "from %s import %s as blah, %s" % (old, member, member)
     1821                    a = "from %s import %s as blah, %s" % (new, member, member)
     1822                    self.check(b, a)
    18041823
    18051824    def test_star(self):
     
    18071826            s = "from %s import *" % old
    18081827            self.warns_unchanged(s, "Cannot handle star imports")
     1828
     1829    def test_indented(self):
     1830        b = """
     1831def foo():
     1832    from urllib import urlencode, urlopen
     1833"""
     1834        a = """
     1835def foo():
     1836    from urllib.parse import urlencode
     1837    from urllib.request import urlopen
     1838"""
     1839        self.check(b, a)
     1840
     1841        b = """
     1842def foo():
     1843    other()
     1844    from urllib import urlencode, urlopen
     1845"""
     1846        a = """
     1847def foo():
     1848    other()
     1849    from urllib.parse import urlencode
     1850    from urllib.request import urlopen
     1851"""
     1852        self.check(b, a)
     1853
     1854
    18091855
    18101856    def test_import_module_usage(self):
     
    27792825        self.check(b, a)
    27802826
     2827    def test_native_literal_escape_u(self):
     2828        b = """'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2829        a = """'\\\\\\\\u20ac\\\\U0001d121\\\\u20ac'"""
     2830        self.check(b, a)
     2831
     2832        b = """r'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2833        a = """r'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2834        self.check(b, a)
     2835
     2836    def test_bytes_literal_escape_u(self):
     2837        b = """b'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2838        a = """b'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2839        self.check(b, a)
     2840
     2841        b = """br'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2842        a = """br'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2843        self.check(b, a)
     2844
     2845    def test_unicode_literal_escape_u(self):
     2846        b = """u'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2847        a = """'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2848        self.check(b, a)
     2849
     2850        b = """ur'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2851        a = """r'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2852        self.check(b, a)
     2853
     2854    def test_native_unicode_literal_escape_u(self):
     2855        f = 'from __future__ import unicode_literals\n'
     2856        b = f + """'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2857        a = f + """'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2858        self.check(b, a)
     2859
     2860        b = f + """r'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2861        a = f + """r'\\\\\\u20ac\\U0001d121\\\\u20ac'"""
     2862        self.check(b, a)
     2863
    27812864class Test_callable(FixerTestCase):
    27822865    fixer = "callable"
     
    29363019        a = """sorted(filter(f, 'abc'), key=blah)[0]"""
    29373020        self.unchanged(a)
     3021        a = """enumerate(filter(f, 'abc'))"""
     3022        self.unchanged(a)
     3023        a = """enumerate(filter(f, 'abc'), start=1)"""
     3024        self.unchanged(a)
    29383025        a = """for i in filter(f, 'abc'): pass"""
    29393026        self.unchanged(a)
     
    30443131        a = """sorted(map(f, 'abc'), key=blah)[0]"""
    30453132        self.unchanged(a)
     3133        a = """enumerate(map(f, 'abc'))"""
     3134        self.unchanged(a)
     3135        a = """enumerate(map(f, 'abc'), start=1)"""
     3136        self.unchanged(a)
    30463137        a = """for i in map(f, 'abc'): pass"""
    30473138        self.unchanged(a)
     
    31063197        self.unchanged(a)
    31073198        a = """sorted(zip(a, b), key=blah)[0]"""
     3199        self.unchanged(a)
     3200        a = """enumerate(zip(a, b))"""
     3201        self.unchanged(a)
     3202        a = """enumerate(zip(a, b), start=1)"""
    31083203        self.unchanged(a)
    31093204        a = """for i in zip(a, b): pass"""
     
    35783673        self.checkall(b, a)
    35793674
    3580     def test_2(self):
     3675    def test_qualified(self):
    35813676        b = """itertools.ifilterfalse(a, b)"""
    35823677        a = """itertools.filterfalse(a, b)"""
    35833678        self.check(b, a)
    35843679
    3585     def test_4(self):
     3680        b = """itertools.izip_longest(a, b)"""
     3681        a = """itertools.zip_longest(a, b)"""
     3682        self.check(b, a)
     3683
     3684    def test_2(self):
    35863685        b = """ifilterfalse(a, b)"""
    35873686        a = """filterfalse(a, b)"""
     3687        self.check(b, a)
     3688
     3689        b = """izip_longest(a, b)"""
     3690        a = """zip_longest(a, b)"""
    35883691        self.check(b, a)
    35893692
     
    35983701        self.check(b, a)
    35993702
     3703        b = """    itertools.izip_longest(a, b)"""
     3704        a = """    itertools.zip_longest(a, b)"""
     3705        self.check(b, a)
     3706
    36003707    def test_run_order(self):
    36013708        self.assert_runs_after('map', 'zip', 'filter')
     3709
    36023710
    36033711class Test_itertools_imports(FixerTestCase):
     
    36133721        self.check(b, a)
    36143722
     3723        b = "from itertools import chain, imap, izip"
     3724        a = "from itertools import chain"
     3725        self.check(b, a)
     3726
    36153727    def test_comments(self):
    36163728        b = "#foo\nfrom itertools import imap, izip"
     
    36473759        self.unchanged(s)
    36483760
    3649     def test_ifilter(self):
    3650         b = "from itertools import ifilterfalse"
    3651         a = "from itertools import filterfalse"
    3652         self.check(b, a)
    3653 
    3654         b = "from itertools import imap, ifilterfalse, foo"
    3655         a = "from itertools import filterfalse, foo"
    3656         self.check(b, a)
    3657 
    3658         b = "from itertools import bar, ifilterfalse, foo"
    3659         a = "from itertools import bar, filterfalse, foo"
    3660         self.check(b, a)
     3761    def test_ifilter_and_zip_longest(self):
     3762        for name in "filterfalse", "zip_longest":
     3763            b = "from itertools import i%s" % (name,)
     3764            a = "from itertools import %s" % (name,)
     3765            self.check(b, a)
     3766
     3767            b = "from itertools import imap, i%s, foo" % (name,)
     3768            a = "from itertools import %s, foo" % (name,)
     3769            self.check(b, a)
     3770
     3771            b = "from itertools import bar, i%s, foo" % (name,)
     3772            a = "from itertools import bar, %s, foo" % (name,)
     3773            self.check(b, a)
     3774
     3775    def test_import_star(self):
     3776        s = "from itertools import *"
     3777        self.unchanged(s)
    36613778
    36623779
     
    36803797            return self.always_exists or (name in self.present_files)
    36813798
    3682         from ..fixes import fix_import
     3799        from lib2to3.fixes import fix_import
    36833800        fix_import.exists = fake_exists
    36843801
     
    37233840        self.unchanged(s)
    37243841
     3842    def test_with_absolute_import_enabled(self):
     3843        s = "from __future__ import absolute_import\nimport bar"
     3844        self.always_exists = False
     3845        self.present_files = set(["__init__.py", "bar.py"])
     3846        self.unchanged(s)
     3847
    37253848    def test_in_package(self):
    37263849        b = "import bar"
     
    37363859        self.present_files = set(["__init__.py", "bar" + os.path.sep])
    37373860        self.check(b, a)
     3861
     3862    def test_already_relative_import(self):
     3863        s = "from . import bar"
     3864        self.unchanged(s)
    37383865
    37393866    def test_comments_and_indent(self):
     
    42794406        self.check(b, a)
    42804407
     4408        b = "operator .sequenceIncludes(x, y)"
     4409        a = "operator .contains(x, y)"
     4410        self.check(b, a)
     4411
     4412        b = "operator.  sequenceIncludes(x, y)"
     4413        a = "operator.  contains(x, y)"
     4414        self.check(b, a)
     4415
     4416    def test_operator_isSequenceType(self):
     4417        b = "operator.isSequenceType(x)"
     4418        a = "import collections\nisinstance(x, collections.Sequence)"
     4419        self.check(b, a)
     4420
     4421    def test_operator_isMappingType(self):
     4422        b = "operator.isMappingType(x)"
     4423        a = "import collections\nisinstance(x, collections.Mapping)"
     4424        self.check(b, a)
     4425
     4426    def test_operator_isNumberType(self):
     4427        b = "operator.isNumberType(x)"
     4428        a = "import numbers\nisinstance(x, numbers.Number)"
     4429        self.check(b, a)
     4430
     4431    def test_operator_repeat(self):
     4432        b = "operator.repeat(x, n)"
     4433        a = "operator.mul(x, n)"
     4434        self.check(b, a)
     4435
     4436        b = "operator .repeat(x, n)"
     4437        a = "operator .mul(x, n)"
     4438        self.check(b, a)
     4439
     4440        b = "operator.  repeat(x, n)"
     4441        a = "operator.  mul(x, n)"
     4442        self.check(b, a)
     4443
     4444    def test_operator_irepeat(self):
     4445        b = "operator.irepeat(x, n)"
     4446        a = "operator.imul(x, n)"
     4447        self.check(b, a)
     4448
     4449        b = "operator .irepeat(x, n)"
     4450        a = "operator .imul(x, n)"
     4451        self.check(b, a)
     4452
     4453        b = "operator.  irepeat(x, n)"
     4454        a = "operator.  imul(x, n)"
     4455        self.check(b, a)
     4456
    42814457    def test_bare_isCallable(self):
    42824458        s = "isCallable(x)"
    4283         self.warns_unchanged(s, "You should use hasattr(x, '__call__') here.")
     4459        t = "You should use 'hasattr(x, '__call__')' here."
     4460        self.warns_unchanged(s, t)
    42844461
    42854462    def test_bare_sequenceIncludes(self):
    42864463        s = "sequenceIncludes(x, y)"
    4287         self.warns_unchanged(s, "You should use operator.contains here.")
     4464        t = "You should use 'operator.contains(x, y)' here."
     4465        self.warns_unchanged(s, t)
     4466
     4467    def test_bare_operator_isSequenceType(self):
     4468        s = "isSequenceType(z)"
     4469        t = "You should use 'isinstance(z, collections.Sequence)' here."
     4470        self.warns_unchanged(s, t)
     4471
     4472    def test_bare_operator_isMappingType(self):
     4473        s = "isMappingType(x)"
     4474        t = "You should use 'isinstance(x, collections.Mapping)' here."
     4475        self.warns_unchanged(s, t)
     4476
     4477    def test_bare_operator_isNumberType(self):
     4478        s = "isNumberType(y)"
     4479        t = "You should use 'isinstance(y, numbers.Number)' here."
     4480        self.warns_unchanged(s, t)
     4481
     4482    def test_bare_operator_repeat(self):
     4483        s = "repeat(x, n)"
     4484        t = "You should use 'operator.mul(x, n)' here."
     4485        self.warns_unchanged(s, t)
     4486
     4487    def test_bare_operator_irepeat(self):
     4488        s = "irepeat(y, 187)"
     4489        t = "You should use 'operator.imul(y, 187)' here."
     4490        self.warns_unchanged(s, t)
     4491
     4492
     4493class Test_exitfunc(FixerTestCase):
     4494
     4495    fixer = "exitfunc"
     4496
     4497    def test_simple(self):
     4498        b = """
     4499            import sys
     4500            sys.exitfunc = my_atexit
     4501            """
     4502        a = """
     4503            import sys
     4504            import atexit
     4505            atexit.register(my_atexit)
     4506            """
     4507        self.check(b, a)
     4508
     4509    def test_names_import(self):
     4510        b = """
     4511            import sys, crumbs
     4512            sys.exitfunc = my_func
     4513            """
     4514        a = """
     4515            import sys, crumbs, atexit
     4516            atexit.register(my_func)
     4517            """
     4518        self.check(b, a)
     4519
     4520    def test_complex_expression(self):
     4521        b = """
     4522            import sys
     4523            sys.exitfunc = do(d)/a()+complex(f=23, g=23)*expression
     4524            """
     4525        a = """
     4526            import sys
     4527            import atexit
     4528            atexit.register(do(d)/a()+complex(f=23, g=23)*expression)
     4529            """
     4530        self.check(b, a)
     4531
     4532    def test_comments(self):
     4533        b = """
     4534            import sys # Foo
     4535            sys.exitfunc = f # Blah
     4536            """
     4537        a = """
     4538            import sys
     4539            import atexit # Foo
     4540            atexit.register(f) # Blah
     4541            """
     4542        self.check(b, a)
     4543
     4544        b = """
     4545            import apples, sys, crumbs, larry # Pleasant comments
     4546            sys.exitfunc = func
     4547            """
     4548        a = """
     4549            import apples, sys, crumbs, larry, atexit # Pleasant comments
     4550            atexit.register(func)
     4551            """
     4552        self.check(b, a)
     4553
     4554    def test_in_a_function(self):
     4555        b = """
     4556            import sys
     4557            def f():
     4558                sys.exitfunc = func
     4559            """
     4560        a = """
     4561            import sys
     4562            import atexit
     4563            def f():
     4564                atexit.register(func)
     4565             """
     4566        self.check(b, a)
     4567
     4568    def test_no_sys_import(self):
     4569        b = """sys.exitfunc = f"""
     4570        a = """atexit.register(f)"""
     4571        msg = ("Can't find sys import; Please add an atexit import at the "
     4572            "top of your file.")
     4573        self.warns(b, a, msg)
     4574
     4575
     4576    def test_unchanged(self):
     4577        s = """f(sys.exitfunc)"""
     4578        self.unchanged(s)
  • python/trunk/Lib/lib2to3/tests/test_main.py

    r2 r391  
    33import codecs
    44import logging
     5import os
     6import re
     7import shutil
    58import StringIO
     9import sys
     10import tempfile
    611import unittest
    712
     
    914
    1015
     16TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
     17PY2_TEST_MODULE = os.path.join(TEST_DATA_DIR, "py2_test_grammar.py")
     18
     19
    1120class TestMain(unittest.TestCase):
     21
     22    if not hasattr(unittest.TestCase, 'assertNotRegex'):
     23        # This method was only introduced in 3.2.
     24        def assertNotRegex(self, text, regexp, msg=None):
     25            import re
     26            if not hasattr(regexp, 'search'):
     27                regexp = re.compile(regexp)
     28            if regexp.search(text):
     29                self.fail("regexp %s MATCHED text %r" % (regexp.pattern, text))
     30
     31    def setUp(self):
     32        self.temp_dir = None  # tearDown() will rmtree this directory if set.
    1233
    1334    def tearDown(self):
    1435        # Clean up logging configuration down by main.
    1536        del logging.root.handlers[:]
     37        if self.temp_dir:
     38            shutil.rmtree(self.temp_dir)
    1639
    1740    def run_2to3_capture(self, args, in_capture, out_capture, err_capture):
     
    4063        self.assertTrue("WARNING: couldn't encode <stdin>'s diff for "
    4164                        "your terminal" in err.getvalue())
     65
     66    def setup_test_source_trees(self):
     67        """Setup a test source tree and output destination tree."""
     68        self.temp_dir = tempfile.mkdtemp()  # tearDown() cleans this up.
     69        self.py2_src_dir = os.path.join(self.temp_dir, "python2_project")
     70        self.py3_dest_dir = os.path.join(self.temp_dir, "python3_project")
     71        os.mkdir(self.py2_src_dir)
     72        os.mkdir(self.py3_dest_dir)
     73        # Turn it into a package with a few files.
     74        self.setup_files = []
     75        open(os.path.join(self.py2_src_dir, "__init__.py"), "w").close()
     76        self.setup_files.append("__init__.py")
     77        shutil.copy(PY2_TEST_MODULE, self.py2_src_dir)
     78        self.setup_files.append(os.path.basename(PY2_TEST_MODULE))
     79        self.trivial_py2_file = os.path.join(self.py2_src_dir, "trivial.py")
     80        self.init_py2_file = os.path.join(self.py2_src_dir, "__init__.py")
     81        with open(self.trivial_py2_file, "w") as trivial:
     82            trivial.write("print 'I need a simple conversion.'")
     83        self.setup_files.append("trivial.py")
     84
     85    def test_filename_changing_on_output_single_dir(self):
     86        """2to3 a single directory with a new output dir and suffix."""
     87        self.setup_test_source_trees()
     88        out = StringIO.StringIO()
     89        err = StringIO.StringIO()
     90        suffix = "TEST"
     91        ret = self.run_2to3_capture(
     92                ["-n", "--add-suffix", suffix, "--write-unchanged-files",
     93                 "--no-diffs", "--output-dir",
     94                 self.py3_dest_dir, self.py2_src_dir],
     95                StringIO.StringIO(""), out, err)
     96        self.assertEqual(ret, 0)
     97        stderr = err.getvalue()
     98        self.assertIn(" implies -w.", stderr)
     99        self.assertIn(
     100                "Output in %r will mirror the input directory %r layout" % (
     101                        self.py3_dest_dir, self.py2_src_dir), stderr)
     102        self.assertEqual(set(name+suffix for name in self.setup_files),
     103                         set(os.listdir(self.py3_dest_dir)))
     104        for name in self.setup_files:
     105            self.assertIn("Writing converted %s to %s" % (
     106                    os.path.join(self.py2_src_dir, name),
     107                    os.path.join(self.py3_dest_dir, name+suffix)), stderr)
     108        sep = re.escape(os.sep)
     109        self.assertRegexpMatches(
     110                stderr, r"No changes to .*/__init__\.py".replace("/", sep))
     111        self.assertNotRegex(
     112                stderr, r"No changes to .*/trivial\.py".replace("/", sep))
     113
     114    def test_filename_changing_on_output_two_files(self):
     115        """2to3 two files in one directory with a new output dir."""
     116        self.setup_test_source_trees()
     117        err = StringIO.StringIO()
     118        py2_files = [self.trivial_py2_file, self.init_py2_file]
     119        expected_files = set(os.path.basename(name) for name in py2_files)
     120        ret = self.run_2to3_capture(
     121                ["-n", "-w", "--write-unchanged-files",
     122                 "--no-diffs", "--output-dir", self.py3_dest_dir] + py2_files,
     123                StringIO.StringIO(""), StringIO.StringIO(), err)
     124        self.assertEqual(ret, 0)
     125        stderr = err.getvalue()
     126        self.assertIn(
     127                "Output in %r will mirror the input directory %r layout" % (
     128                        self.py3_dest_dir, self.py2_src_dir), stderr)
     129        self.assertEqual(expected_files, set(os.listdir(self.py3_dest_dir)))
     130
     131    def test_filename_changing_on_output_single_file(self):
     132        """2to3 a single file with a new output dir."""
     133        self.setup_test_source_trees()
     134        err = StringIO.StringIO()
     135        ret = self.run_2to3_capture(
     136                ["-n", "-w", "--no-diffs", "--output-dir", self.py3_dest_dir,
     137                 self.trivial_py2_file],
     138                StringIO.StringIO(""), StringIO.StringIO(), err)
     139        self.assertEqual(ret, 0)
     140        stderr = err.getvalue()
     141        self.assertIn(
     142                "Output in %r will mirror the input directory %r layout" % (
     143                        self.py3_dest_dir, self.py2_src_dir), stderr)
     144        self.assertEqual(set([os.path.basename(self.trivial_py2_file)]),
     145                         set(os.listdir(self.py3_dest_dir)))
     146
     147
     148if __name__ == '__main__':
     149    unittest.main()
  • python/trunk/Lib/lib2to3/tests/test_parser.py

    r2 r391  
    77"""
    88
     9from __future__ import with_statement
     10
    911# Testing imports
    1012from . import support
     
    1315# Python imports
    1416import os
    15 import io
    1617import sys
    1718
     
    1920from lib2to3.pgen2 import tokenize
    2021from ..pgen2.parse import ParseError
     22from lib2to3.pygram import python_symbols as syms
     23
     24
     25class TestDriver(support.TestCase):
     26
     27    def test_formfeed(self):
     28        s = """print 1\n\x0Cprint 2\n"""
     29        t = driver.parse_string(s)
     30        self.assertEqual(t.children[0].children[0].type, syms.print_stmt)
     31        self.assertEqual(t.children[1].children[0].type, syms.print_stmt)
    2132
    2233
     
    6374
    6475
    65 # Adapated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef
     76# Adaptated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef
    6677class TestFunctionAnnotations(GrammarTest):
    6778    def test_1(self):
     
    157168            self.assertTrue(encoding is not None,
    158169                            "can't detect encoding for %s" % filepath)
    159             with io.open(filepath, "r", encoding=encoding) as fp:
     170            with open(filepath, "r") as fp:
    160171                source = fp.read()
     172                source = source.decode(encoding)
    161173            tree = driver.parse_string(source)
    162174            new = unicode(tree)
     
    204216
    205217def diff(fn, result, encoding):
    206     f = io.open("@", "w", encoding=encoding)
     218    f = open("@", "w")
    207219    try:
    208         f.write(result)
     220        f.write(result.encode(encoding))
    209221    finally:
    210222        f.close()
    211223    try:
    212         return os.system("diff -u %r @" % fn)
     224        fn = fn.replace('"', '\\"')
     225        return os.system('diff -u "%s" @' % fn)
    213226    finally:
    214227        os.remove("@")
  • python/trunk/Lib/lib2to3/tests/test_pytree.py

    r2 r391  
    1010"""
    1111
     12from __future__ import with_statement
     13
     14import sys
    1215import warnings
    1316
     
    2932    """Unit tests for nodes (Base, Leaf, Node)."""
    3033
    31     def test_deprecated_prefix_methods(self):
    32         l = pytree.Leaf(100, "foo")
    33         with warnings.catch_warnings(record=True) as w:
    34             warnings.simplefilter("always", DeprecationWarning)
    35             self.assertEqual(l.get_prefix(), "")
    36             l.set_prefix("hi")
    37         self.assertEqual(l.prefix, "hi")
    38         self.assertEqual(len(w), 2)
    39         for warning in w:
    40             self.assertTrue(warning.category is DeprecationWarning)
    41         self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \
    42                              "use the prefix property")
    43         self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \
    44                              "use the prefix property")
     34    if sys.version_info >= (2,6):
     35        # warnings.catch_warnings is new in 2.6.
     36        def test_deprecated_prefix_methods(self):
     37            l = pytree.Leaf(100, "foo")
     38            with warnings.catch_warnings(record=True) as w:
     39                warnings.simplefilter("always", DeprecationWarning)
     40                self.assertEqual(l.get_prefix(), "")
     41                l.set_prefix("hi")
     42            self.assertEqual(l.prefix, "hi")
     43            self.assertEqual(len(w), 2)
     44            for warning in w:
     45                self.assertTrue(warning.category is DeprecationWarning)
     46            self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \
     47                                 "use the prefix property")
     48            self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \
     49                                 "use the prefix property")
    4550
    4651    def test_instantiate_base(self):
     
    174179        self.assertTrue(isinstance(n1.children, list))
    175180
     181    def test_leaves(self):
     182        l1 = pytree.Leaf(100, "foo")
     183        l2 = pytree.Leaf(100, "bar")
     184        l3 = pytree.Leaf(100, "fooey")
     185        n2 = pytree.Node(1000, [l1, l2])
     186        n3 = pytree.Node(1000, [l3])
     187        n1 = pytree.Node(1000, [n2, n3])
     188
     189        self.assertEqual(list(n1.leaves()), [l1, l2, l3])
     190
     191    def test_depth(self):
     192        l1 = pytree.Leaf(100, "foo")
     193        l2 = pytree.Leaf(100, "bar")
     194        n2 = pytree.Node(1000, [l1, l2])
     195        n3 = pytree.Node(1000, [])
     196        n1 = pytree.Node(1000, [n2, n3])
     197
     198        self.assertEqual(l1.depth(), 2)
     199        self.assertEqual(n3.depth(), 1)
     200        self.assertEqual(n1.depth(), 0)
     201
    176202    def test_post_order(self):
    177203        l1 = pytree.Leaf(100, "foo")
    178204        l2 = pytree.Leaf(100, "bar")
    179         n1 = pytree.Node(1000, [l1, l2])
    180         self.assertEqual(list(n1.post_order()), [l1, l2, n1])
     205        l3 = pytree.Leaf(100, "fooey")
     206        c1 = pytree.Node(1000, [l1, l2])
     207        n1 = pytree.Node(1000, [c1, l3])
     208        self.assertEqual(list(n1.post_order()), [l1, l2, c1, l3, n1])
    181209
    182210    def test_pre_order(self):
    183211        l1 = pytree.Leaf(100, "foo")
    184212        l2 = pytree.Leaf(100, "bar")
    185         n1 = pytree.Node(1000, [l1, l2])
    186         self.assertEqual(list(n1.pre_order()), [n1, l1, l2])
     213        l3 = pytree.Leaf(100, "fooey")
     214        c1 = pytree.Node(1000, [l1, l2])
     215        n1 = pytree.Node(1000, [c1, l3])
     216        self.assertEqual(list(n1.pre_order()), [n1, c1, l1, l2, l3])
    187217
    188218    def test_changed(self):
  • python/trunk/Lib/lib2to3/tests/test_refactor.py

    r2 r391  
    22Unit tests for refactor.py.
    33"""
     4
     5from __future__ import with_statement
    46
    57import sys
     
    5254                        pygram.python_grammar_no_print_statement)
    5355
     56    def test_write_unchanged_files_option(self):
     57        rt = self.rt()
     58        self.assertFalse(rt.write_unchanged_files)
     59        rt = self.rt({"write_unchanged_files" : True})
     60        self.assertTrue(rt.write_unchanged_files)
     61
    5462    def test_fixer_loading_helpers(self):
    5563        contents = ["explicit", "first", "last", "parrot", "preorder"]
     
    6270                         ["myfixes.fix_" + name for name in contents])
    6371
    64     def test_detect_future_print(self):
    65         run = refactor._detect_future_print
    66         self.assertFalse(run(""))
    67         self.assertTrue(run("from __future__ import print_function"))
    68         self.assertFalse(run("from __future__ import generators"))
    69         self.assertFalse(run("from __future__ import generators, feature"))
    70         input = "from __future__ import generators, print_function"
    71         self.assertTrue(run(input))
    72         input ="from __future__ import print_function, generators"
    73         self.assertTrue(run(input))
    74         input = "from __future__ import (print_function,)"
    75         self.assertTrue(run(input))
    76         input = "from __future__ import (generators, print_function)"
    77         self.assertTrue(run(input))
    78         input = "from __future__ import (generators, nested_scopes)"
    79         self.assertFalse(run(input))
    80         input = """from __future__ import generators
     72    def test_detect_future_features(self):
     73        run = refactor._detect_future_features
     74        fs = frozenset
     75        empty = fs()
     76        self.assertEqual(run(""), empty)
     77        self.assertEqual(run("from __future__ import print_function"),
     78                         fs(("print_function",)))
     79        self.assertEqual(run("from __future__ import generators"),
     80                         fs(("generators",)))
     81        self.assertEqual(run("from __future__ import generators, feature"),
     82                         fs(("generators", "feature")))
     83        inp = "from __future__ import generators, print_function"
     84        self.assertEqual(run(inp), fs(("generators", "print_function")))
     85        inp ="from __future__ import print_function, generators"
     86        self.assertEqual(run(inp), fs(("print_function", "generators")))
     87        inp = "from __future__ import (print_function,)"
     88        self.assertEqual(run(inp), fs(("print_function",)))
     89        inp = "from __future__ import (generators, print_function)"
     90        self.assertEqual(run(inp), fs(("generators", "print_function")))
     91        inp = "from __future__ import (generators, nested_scopes)"
     92        self.assertEqual(run(inp), fs(("generators", "nested_scopes")))
     93        inp = """from __future__ import generators
    8194from __future__ import print_function"""
    82         self.assertTrue(run(input))
    83         self.assertFalse(run("from"))
    84         self.assertFalse(run("from 4"))
    85         self.assertFalse(run("from x"))
    86         self.assertFalse(run("from x 5"))
    87         self.assertFalse(run("from x im"))
    88         self.assertFalse(run("from x import"))
    89         self.assertFalse(run("from x import 4"))
    90         input = "'docstring'\nfrom __future__ import print_function"
    91         self.assertTrue(run(input))
    92         input = "'docstring'\n'somng'\nfrom __future__ import print_function"
    93         self.assertFalse(run(input))
    94         input = "# comment\nfrom __future__ import print_function"
    95         self.assertTrue(run(input))
    96         input = "# comment\n'doc'\nfrom __future__ import print_function"
    97         self.assertTrue(run(input))
    98         input = "class x: pass\nfrom __future__ import print_function"
    99         self.assertFalse(run(input))
     95        self.assertEqual(run(inp), fs(("generators", "print_function")))
     96        invalid = ("from",
     97                   "from 4",
     98                   "from x",
     99                   "from x 5",
     100                   "from x im",
     101                   "from x import",
     102                   "from x import 4",
     103                   )
     104        for inp in invalid:
     105            self.assertEqual(run(inp), empty)
     106        inp = "'docstring'\nfrom __future__ import print_function"
     107        self.assertEqual(run(inp), fs(("print_function",)))
     108        inp = "'docstring'\n'somng'\nfrom __future__ import print_function"
     109        self.assertEqual(run(inp), empty)
     110        inp = "# comment\nfrom __future__ import print_function"
     111        self.assertEqual(run(inp), fs(("print_function",)))
     112        inp = "# comment\n'doc'\nfrom __future__ import print_function"
     113        self.assertEqual(run(inp), fs(("print_function",)))
     114        inp = "class x: pass\nfrom __future__ import print_function"
     115        self.assertEqual(run(inp), empty)
    100116
    101117    def test_get_headnode_dict(self):
     
    167183        self.assertEqual(results, expected)
    168184
    169     def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS):
     185    def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS,
     186                               options=None, mock_log_debug=None,
     187                               actually_write=True):
     188        tmpdir = tempfile.mkdtemp(prefix="2to3-test_refactor")
     189        self.addCleanup(shutil.rmtree, tmpdir)
     190        # make a copy of the tested file that we can write to
     191        shutil.copy(test_file, tmpdir)
     192        test_file = os.path.join(tmpdir, os.path.basename(test_file))
     193        os.chmod(test_file, 0o644)
     194
    170195        def read_file():
    171196            with open(test_file, "rb") as fp:
    172197                return fp.read()
     198
    173199        old_contents = read_file()
    174         rt = self.rt(fixers=fixers)
     200        rt = self.rt(fixers=fixers, options=options)
     201        if mock_log_debug:
     202            rt.log_debug = mock_log_debug
    175203
    176204        rt.refactor_file(test_file)
    177205        self.assertEqual(old_contents, read_file())
    178206
    179         try:
    180             rt.refactor_file(test_file, True)
    181             new_contents = read_file()
    182             self.assertNotEqual(old_contents, new_contents)
    183         finally:
    184             with open(test_file, "wb") as fp:
    185                 fp.write(old_contents)
     207        if not actually_write:
     208            return
     209        rt.refactor_file(test_file, True)
     210        new_contents = read_file()
     211        self.assertNotEqual(old_contents, new_contents)
    186212        return new_contents
    187213
     
    189215        test_file = os.path.join(FIXER_DIR, "parrot_example.py")
    190216        self.check_file_refactoring(test_file, _DEFAULT_FIXERS)
     217
     218    def test_refactor_file_write_unchanged_file(self):
     219        test_file = os.path.join(FIXER_DIR, "parrot_example.py")
     220        debug_messages = []
     221        def recording_log_debug(msg, *args):
     222            debug_messages.append(msg % args)
     223        self.check_file_refactoring(test_file, fixers=(),
     224                                    options={"write_unchanged_files": True},
     225                                    mock_log_debug=recording_log_debug,
     226                                    actually_write=False)
     227        # Testing that it logged this message when write=False was passed is
     228        # sufficient to see that it did not bail early after "No changes".
     229        message_regex = r"Not writing changes to .*%s%s" % (
     230                os.sep, os.path.basename(test_file))
     231        for message in debug_messages:
     232            if "Not writing changes" in message:
     233                self.assertRegexpMatches(message, message_regex)
     234                break
     235        else:
     236            self.fail("%r not matched in %r" % (message_regex, debug_messages))
    191237
    192238    def test_refactor_dir(self):
     
    214260                ".dumb",
    215261                ".after.py",
     262                "notpy.npy",
    216263                "sappy"]
    217264        expected = ["hi.py"]
     
    224271        fn = os.path.join(TEST_DATA_DIR, "different_encoding.py")
    225272        self.check_file_refactoring(fn)
     273
     274    def test_false_file_encoding(self):
     275        fn = os.path.join(TEST_DATA_DIR, "false_encoding.py")
     276        data = self.check_file_refactoring(fn)
    226277
    227278    def test_bom(self):
  • python/trunk/Lib/lib2to3/tests/test_util.py

    r2 r391  
    569569    def test_from_import(self):
    570570        node = parse('bar()')
    571         fixer_util.touch_import("cgi", "escape", node)
    572         self.assertEqual(str(node), 'from cgi import escape\nbar()\n\n')
     571        fixer_util.touch_import("html", "escape", node)
     572        self.assertEqual(str(node), 'from html import escape\nbar()\n\n')
    573573
    574574    def test_name_import(self):
     
    576576        fixer_util.touch_import(None, "cgi", node)
    577577        self.assertEqual(str(node), 'import cgi\nbar()\n\n')
     578
     579class Test_find_indentation(support.TestCase):
     580
     581    def test_nothing(self):
     582        fi = fixer_util.find_indentation
     583        node = parse("node()")
     584        self.assertEqual(fi(node), u"")
     585        node = parse("")
     586        self.assertEqual(fi(node), u"")
     587
     588    def test_simple(self):
     589        fi = fixer_util.find_indentation
     590        node = parse("def f():\n    x()")
     591        self.assertEqual(fi(node), u"")
     592        self.assertEqual(fi(node.children[0].children[4].children[2]), u"    ")
     593        node = parse("def f():\n    x()\n    y()")
     594        self.assertEqual(fi(node.children[0].children[4].children[4]), u"    ")
Note: See TracChangeset for help on using the changeset viewer.