Commits

Anonymous committed 5eb7364

Added latest reindent.py file

  • Participants
  • Parent commits a571d04

Comments (0)

Files changed (2)

 
 reindent: convert-utils
 ifeq ($(PYTHON), python3)
-	@$(PYTHON) utils/reindent3.py -r -B .
+	@$(PYTHON) utils/reindent3.py -r -n .
 else
-	@$(PYTHON) utils/reindent.py -r -B .
+	@$(PYTHON) utils/reindent.py -r -n .
 endif
 
 test: build

File utils/reindent.py

 #! /usr/bin/env python
 
 # Released to the public domain, by Tim Peters, 03 October 2000.
-# -B option added by Georg Brandl, 2006.
 
 """reindent [-d][-r][-v] [ path ... ]
 
--d (--dryrun)  Dry run.  Analyze, but don't make any changes to files.
--r (--recurse) Recurse.  Search for all .py files in subdirectories too.
--B (--no-backup)         Don't write .bak backup files.
--v (--verbose) Verbose.  Print informative msgs; else only names of \
-changed files.
--h (--help)    Help.     Print this usage information and exit.
+-d (--dryrun)   Dry run.   Analyze, but don't make any changes to, files.
+-r (--recurse)  Recurse.   Search for all .py files in subdirectories too.
+-n (--nobackup) No backup. Does not make a ".bak" file before reindenting.
+-v (--verbose)  Verbose.   Print informative msgs; else no output.
+-h (--help)     Help.      Print this usage information and exit.
 
 Change Python (.py) files to use 4-space indents and no hard tab characters.
 Also trim excess spaces and tabs from ends of lines, and remove empty lines
 The hard part of reindenting is figuring out what to do with comment
 lines.  So long as the input files get a clean bill of health from
 tabnanny.py, reindent should do a good job.
+
+The backup file is a copy of the one that is being reindented. The ".bak"
+file is generated with shutil.copy(), but some corner cases regarding
+user/group and permissions could leave the backup file more readable that
+you'd prefer. You can always use the --nobackup option to prevent this.
 """
 
 __version__ = "1"
 
 import tokenize
-import os
+import os, shutil
 import sys
 
-verbose = 0
-recurse = 0
-dryrun  = 0
-no_backup = 0
-
 if sys.version_info >= (3, 0):
     def tokens(readline, tokeneater):
         for token in tokenize.tokenize(readline):
-            tokeneater(*token)
-
-    def b(s):
-        return s.encode('utf-8')
+            yield tokeneater(*token)
 else:
     tokens = tokenize.tokenize
-    b = str
+
+verbose    = 0
+recurse    = 0
+dryrun     = 0
+makebackup = True
 
 def usage(msg=None):
     if msg is not None:
 
 def main():
     import getopt
-    global verbose, recurse, dryrun, no_backup
-
+    global verbose, recurse, dryrun, makebackup
     try:
-        opts, args = getopt.getopt(sys.argv[1:], "drvhB",
-                                   ["dryrun", "recurse", "verbose", "help",
-                                    "no-backup"])
+        opts, args = getopt.getopt(sys.argv[1:], "drnvh",
+                        ["dryrun", "recurse", "nobackup", "verbose", "help"])
     except getopt.error, msg:
         usage(msg)
         return
             dryrun += 1
         elif o in ('-r', '--recurse'):
             recurse += 1
+        elif o in ('-n', '--nobackup'):
+            makebackup = False
         elif o in ('-v', '--verbose'):
             verbose += 1
-        elif o in ('-B', '--no-backup'):
-            no_backup += 1
         elif o in ('-h', '--help'):
             usage()
             return
         for name in names:
             fullname = os.path.join(file, name)
             if ((recurse and os.path.isdir(fullname) and
-                 not os.path.islink(fullname))
+                 not os.path.islink(fullname) and
+                 not os.path.split(fullname)[1].startswith("."))
                 or name.lower().endswith(".py")):
                 check(fullname)
         return
     if verbose:
         print "checking", file, "...",
     try:
-        f = open(file, 'rb')
+        f = open(file)
     except IOError, msg:
         errprint("%s: I/O Error: %s" % (file, str(msg)))
         return
             print "changed."
             if dryrun:
                 print "But this is a dry run, so leaving it alone."
-        else:
-            print "reindented", file, \
-                  (dryrun and "(dry run => not really)" or "")
         if not dryrun:
-            if not no_backup:
-                bak = file + ".bak"
-                if os.path.exists(bak):
-                    os.remove(bak)
-                os.rename(file, bak)
+            bak = file + ".bak"
+            if makebackup:
+                shutil.copyfile(file, bak)
                 if verbose:
-                    print "renamed", file, "to", bak
-            f = open(file, "wb")
+                    print "backed up", file, "to", bak
+            f = open(file, "w")
             r.write(f)
             f.close()
             if verbose:
                 print "wrote new", file
+        return True
     else:
         if verbose:
             print "unchanged."
+        return False
 
+def _rstrip(line, JUNK='\n \t'):
+    """Return line stripped of trailing spaces, tabs, newlines.
+
+    Note that line.rstrip() instead also strips sundry control characters,
+    but at least one known Emacs user expects to keep junk like that, not
+    mentioning Barry by name or anything <wink>.
+    """
+
+    i = len(line)
+    while i > 0 and line[i-1] in JUNK:
+        i -= 1
+    return line[:i]
 
 class Reindenter:
 
         # File lines, rstripped & tab-expanded.  Dummy at start is so
         # that we can use tokenize's 1-based line numbering easily.
         # Note that a line is all-blank iff it's "\n".
-        self.lines = [line.rstrip(b('\n \t')).expandtabs() + b("\n")
+        self.lines = [_rstrip(line).expandtabs() + "\n"
                       for line in self.raw]
         self.lines.insert(0, None)
         self.index = 1  # index into self.lines of next line
         tokens(self.getline, self.tokeneater)
         # Remove trailing empty lines.
         lines = self.lines
-        while lines and lines[-1] == b("\n"):
+        while lines and lines[-1] == "\n":
             lines.pop()
         # Sentinel.
         stats = self.stats
             else:
                 for line in lines[thisstmt:nextstmt]:
                     if diff > 0:
-                        if line == b("\n"):
+                        if line == "\n":
                             after.append(line)
                         else:
-                            after.append(b(" ") * diff + line)
+                            after.append(" " * diff + line)
                     else:
                         remove = min(getlspace(line), -diff)
                         after.append(line[remove:])
     # Line-getter for tokenize.
     def getline(self):
         if self.index >= len(self.lines):
-            line = b("")
+            line = ""
         else:
             line = self.lines[self.index]
             self.index += 1
         return line
 
     # Line-eater for tokenize.
-    def tokeneater(self, type, token, scell, end, line,
+    def tokeneater(self, type, token, (sline, scol), end, line,
                    INDENT=tokenize.INDENT,
                    DEDENT=tokenize.DEDENT,
                    NEWLINE=tokenize.NEWLINE,
                    COMMENT=tokenize.COMMENT,
                    NL=tokenize.NL):
-        sline, scol = scell
 
         if type == NEWLINE:
             # A program statement, or ENDMARKER, will eventually follow,
 # Count number of leading blanks.
 def getlspace(line):
     i, n = 0, len(line)
-    while i < n and line[i] == b(" "):
+    while i < n and line[i] == " ":
         i += 1
     return i