Ignore:
Timestamp:
Mar 19, 2014, 11:31:01 PM (11 years ago)
Author:
dmik
Message:

python: Merge vendor 2.7.6 to trunk.

Location:
python/trunk
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • python/trunk

  • python/trunk/Lib/lib2to3/refactor.py

    r2 r391  
    88provides infrastructure to write your own refactoring tool.
    99"""
     10
     11from __future__ import with_statement
    1012
    1113__author__ = "Guido van Rossum <guido@python.org>"
     
    2325# Local imports
    2426from .pgen2 import driver, tokenize, token
     27from .fixer_util import find_root
    2528from . import pytree, pygram
     29from . import btm_utils as bu
     30from . import btm_matcher as bm
    2631
    2732
     
    123128
    124129
    125 def _detect_future_print(source):
     130def _detect_future_features(source):
    126131    have_docstring = False
    127132    gen = tokenize.generate_tokens(StringIO.StringIO(source).readline)
    128133    def advance():
    129         tok = next(gen)
     134        tok = gen.next()
    130135        return tok[0], tok[1]
    131136    ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT))
     137    features = set()
    132138    try:
    133139        while True:
     
    141147            elif tp == token.NAME and value == u"from":
    142148                tp, value = advance()
    143                 if tp != token.NAME and value != u"__future__":
     149                if tp != token.NAME or value != u"__future__":
    144150                    break
    145151                tp, value = advance()
    146                 if tp != token.NAME and value != u"import":
     152                if tp != token.NAME or value != u"import":
    147153                    break
    148154                tp, value = advance()
     
    150156                    tp, value = advance()
    151157                while tp == token.NAME:
    152                     if value == u"print_function":
    153                         return True
     158                    features.add(value)
    154159                    tp, value = advance()
    155                     if tp != token.OP and value != u",":
     160                    if tp != token.OP or value != u",":
    156161                        break
    157162                    tp, value = advance()
     
    160165    except StopIteration:
    161166        pass
    162     return False
     167    return frozenset(features)
    163168
    164169
     
    169174class RefactoringTool(object):
    170175
    171     _default_options = {"print_function" : False}
     176    _default_options = {"print_function" : False,
     177                        "write_unchanged_files" : False}
    172178
    173179    CLASS_PREFIX = "Fix" # The prefix for fixer classes
     
    191197        else:
    192198            self.grammar = pygram.python_grammar
     199        # When this is True, the refactor*() methods will call write_file() for
     200        # files processed even if they were not changed during refactoring. If
     201        # and only if the refactor method's write parameter was True.
     202        self.write_unchanged_files = self.options.get("write_unchanged_files")
    193203        self.errors = []
    194204        self.logger = logging.getLogger("RefactoringTool")
     
    200210        self.pre_order, self.post_order = self.get_fixers()
    201211
    202         self.pre_order_heads = _get_headnode_dict(self.pre_order)
    203         self.post_order_heads = _get_headnode_dict(self.post_order)
    204212
    205213        self.files = []  # List of files that were or should be modified
     214
     215        self.BM = bm.BottomMatcher()
     216        self.bmi_pre_order = [] # Bottom Matcher incompatible fixers
     217        self.bmi_post_order = []
     218
     219        for fixer in chain(self.post_order, self.pre_order):
     220            if fixer.BM_compatible:
     221                self.BM.add_fixer(fixer)
     222                # remove fixers that will be handled by the bottom-up
     223                # matcher
     224            elif fixer in self.pre_order:
     225                self.bmi_pre_order.append(fixer)
     226            elif fixer in self.post_order:
     227                self.bmi_post_order.append(fixer)
     228
     229        self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order)
     230        self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order)
     231
     232
    206233
    207234    def get_fixers(self):
     
    267294    def refactor(self, items, write=False, doctests_only=False):
    268295        """Refactor a list of files and directories."""
     296
    269297        for dir_or_file in items:
    270298            if os.path.isdir(dir_or_file):
     
    280308        Files and subdirectories starting with '.' are skipped.
    281309        """
     310        py_ext = os.extsep + "py"
    282311        for dirpath, dirnames, filenames in os.walk(dir_name):
    283312            self.log_debug("Descending into %s", dirpath)
     
    285314            filenames.sort()
    286315            for name in filenames:
    287                 if not name.startswith(".") and \
    288                         os.path.splitext(name)[1].endswith("py"):
     316                if (not name.startswith(".") and
     317                    os.path.splitext(name)[1] == py_ext):
    289318                    fullname = os.path.join(dirpath, name)
    290319                    self.refactor_file(fullname, write, doctests_only)
     
    298327        try:
    299328            f = open(filename, "rb")
    300         except IOError, err:
     329        except IOError as err:
    301330            self.log_error("Can't open %s: %s", filename, err)
    302331            return None, None
     
    318347            self.log_debug("Refactoring doctests in %s", filename)
    319348            output = self.refactor_docstring(input, filename)
    320             if output != input:
     349            if self.write_unchanged_files or output != input:
    321350                self.processed_file(output, filename, input, write, encoding)
    322351            else:
     
    324353        else:
    325354            tree = self.refactor_string(input, filename)
    326             if tree and tree.was_changed:
     355            if self.write_unchanged_files or (tree and tree.was_changed):
    327356                # The [:-1] is to take off the \n we added earlier
    328357                self.processed_file(unicode(tree)[:-1], filename,
     
    342371            there were errors during the parse.
    343372        """
    344         if _detect_future_print(data):
     373        features = _detect_future_features(data)
     374        if "print_function" in features:
    345375            self.driver.grammar = pygram.python_grammar_no_print_statement
    346376        try:
    347377            tree = self.driver.parse_string(data)
    348         except Exception, err:
     378        except Exception as err:
    349379            self.log_error("Can't parse %s: %s: %s",
    350380                           name, err.__class__.__name__, err)
     
    352382        finally:
    353383            self.driver.grammar = self.grammar
     384        tree.future_features = features
    354385        self.log_debug("Refactoring %s", name)
    355386        self.refactor_tree(tree, name)
     
    361392            self.log_debug("Refactoring doctests in stdin")
    362393            output = self.refactor_docstring(input, "<stdin>")
    363             if output != input:
     394            if self.write_unchanged_files or output != input:
    364395                self.processed_file(output, "<stdin>", input)
    365396            else:
     
    367398        else:
    368399            tree = self.refactor_string(input, "<stdin>")
    369             if tree and tree.was_changed:
     400            if self.write_unchanged_files or (tree and tree.was_changed):
    370401                self.processed_file(unicode(tree), "<stdin>", input)
    371402            else:
     
    374405    def refactor_tree(self, tree, name):
    375406        """Refactors a parse tree (modifying the tree in place).
     407
     408        For compatible patterns the bottom matcher module is
     409        used. Otherwise the tree is traversed node-to-node for
     410        matches.
    376411
    377412        Args:
     
    383418            True if the tree was modified, False otherwise.
    384419        """
     420
    385421        for fixer in chain(self.pre_order, self.post_order):
    386422            fixer.start_tree(tree, name)
    387423
    388         self.traverse_by(self.pre_order_heads, tree.pre_order())
    389         self.traverse_by(self.post_order_heads, tree.post_order())
     424        #use traditional matching for the incompatible fixers
     425        self.traverse_by(self.bmi_pre_order_heads, tree.pre_order())
     426        self.traverse_by(self.bmi_post_order_heads, tree.post_order())
     427
     428        # obtain a set of candidate nodes
     429        match_set = self.BM.run(tree.leaves())
     430
     431        while any(match_set.values()):
     432            for fixer in self.BM.fixers:
     433                if fixer in match_set and match_set[fixer]:
     434                    #sort by depth; apply fixers from bottom(of the AST) to top
     435                    match_set[fixer].sort(key=pytree.Base.depth, reverse=True)
     436
     437                    if fixer.keep_line_order:
     438                        #some fixers(eg fix_imports) must be applied
     439                        #with the original file's line order
     440                        match_set[fixer].sort(key=pytree.Base.get_lineno)
     441
     442                    for node in list(match_set[fixer]):
     443                        if node in match_set[fixer]:
     444                            match_set[fixer].remove(node)
     445
     446                        try:
     447                            find_root(node)
     448                        except ValueError:
     449                            # this node has been cut off from a
     450                            # previous transformation ; skip
     451                            continue
     452
     453                        if node.fixers_applied and fixer in node.fixers_applied:
     454                            # do not apply the same fixer again
     455                            continue
     456
     457                        results = fixer.match(node)
     458
     459                        if results:
     460                            new = fixer.transform(node, results)
     461                            if new is not None:
     462                                node.replace(new)
     463                                #new.fixers_applied.append(fixer)
     464                                for node in new.post_order():
     465                                    # do not apply the fixer again to
     466                                    # this or any subnode
     467                                    if not node.fixers_applied:
     468                                        node.fixers_applied = []
     469                                    node.fixers_applied.append(fixer)
     470
     471                                # update the original match set for
     472                                # the added code
     473                                new_matches = self.BM.run(new.leaves())
     474                                for fxr in new_matches:
     475                                    if not fxr in match_set:
     476                                        match_set[fxr]=[]
     477
     478                                    match_set[fxr].extend(new_matches[fxr])
    390479
    391480        for fixer in chain(self.pre_order, self.post_order):
     
    419508                       encoding=None):
    420509        """
    421         Called when a file has been refactored, and there are changes.
     510        Called when a file has been refactored and there may be changes.
    422511        """
    423512        self.files.append(filename)
     
    430519        if equal:
    431520            self.log_debug("No changes to %s", filename)
    432             return
     521            if not self.write_unchanged_files:
     522                return
    433523        if write:
    434524            self.write_file(new_text, filename, old_text, encoding)
     
    445535        try:
    446536            f = _open_with_encoding(filename, "w", encoding=encoding)
    447         except os.error, err:
     537        except os.error as err:
    448538            self.log_error("Can't create %s: %s", filename, err)
    449539            return
    450540        try:
    451541            f.write(_to_system_newlines(new_text))
    452         except os.error, err:
     542        except os.error as err:
    453543            self.log_error("Can't write %s: %s", filename, err)
    454544        finally:
     
    513603        try:
    514604            tree = self.parse_block(block, lineno, indent)
    515         except Exception, err:
    516             if self.log.isEnabledFor(logging.DEBUG):
     605        except Exception as err:
     606            if self.logger.isEnabledFor(logging.DEBUG):
    517607                for line in block:
    518608                    self.log_debug("Source: %s", line.rstrip(u"\n"))
     
    561651        in the parser diagnostics and embedded into the parse tree.
    562652        """
    563         return self.driver.parse_tokens(self.wrap_toks(block, lineno, indent))
     653        tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent))
     654        tree.future_features = frozenset()
     655        return tree
    564656
    565657    def wrap_toks(self, block, lineno, indent):
     
    606698        super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
    607699        self.queue = None
     700        self.output_lock = None
    608701
    609702    def refactor(self, items, write=False, doctests_only=False,
     
    619712            raise RuntimeError("already doing multiple processes")
    620713        self.queue = multiprocessing.JoinableQueue()
     714        self.output_lock = multiprocessing.Lock()
    621715        processes = [multiprocessing.Process(target=self._child)
    622716                     for i in xrange(num_processes)]
Note: See TracChangeset for help on using the changeset viewer.