Commits

Amaury Forgeot d'Arc committed 1731f0a

I finally found how pytokenize.py is generated.

Tweak the generator until the result looks similar enough to the previous arrays.
the only notable difference is that state #2 and #3 have been swapped.

Comments (0)

Files changed (4)

pypy/interpreter/pyparser/autopath.py

+"""
+self cloning, automatic path configuration 
+
+copy this into any subdirectory of pypy from which scripts need 
+to be run, typically all of the test subdirs. 
+The idea is that any such script simply issues
+
+    import autopath
+
+and this will make sure that the parent directory containing "pypy"
+is in sys.path. 
+
+If you modify the master "autopath.py" version (in pypy/tool/autopath.py) 
+you can directly run it which will copy itself on all autopath.py files
+it finds under the pypy root directory. 
+
+This module always provides these attributes:
+
+    pypydir    pypy root directory path 
+    this_dir   directory where this autopath.py resides 
+
+"""
+
+def __dirinfo(part):
+    """ return (partdir, this_dir) and insert parent of partdir
+    into sys.path.  If the parent directories don't have the part
+    an EnvironmentError is raised."""
+
+    import sys, os
+    try:
+        head = this_dir = os.path.realpath(os.path.dirname(__file__))
+    except NameError:
+        head = this_dir = os.path.realpath(os.path.dirname(sys.argv[0]))
+
+    error = None
+    while head:
+        partdir = head
+        head, tail = os.path.split(head)
+        if tail == part:
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
+            if not os.path.exists(checkfile):
+                error = "Cannot find %r" % (os.path.normpath(checkfile),)
+            break
+    else:
+        error = "Cannot find the parent directory %r of the path %r" % (
+            partdir, this_dir)
+    if not error:
+        # check for bogus end-of-line style (e.g. files checked out on
+        # Windows and moved to Unix)
+        f = open(__file__.replace('.pyc', '.py'), 'r')
+        data = f.read()
+        f.close()
+        if data.endswith('\r\n') or data.endswith('\r'):
+            error = ("Bad end-of-line style in the .py files. Typically "
+                     "caused by a zip file or a checkout done on Windows and "
+                     "moved to Unix or vice-versa.")
+    if error:
+        raise EnvironmentError("Invalid source tree - bogus checkout! " +
+                               error)
+    
+    pypy_root = os.path.join(head, '')
+    try:
+        sys.path.remove(head)
+    except ValueError:
+        pass
+    sys.path.insert(0, head)
+
+    munged = {}
+    for name, mod in sys.modules.items():
+        if '.' in name:
+            continue
+        fn = getattr(mod, '__file__', None)
+        if not isinstance(fn, str):
+            continue
+        newname = os.path.splitext(os.path.basename(fn))[0]
+        if not newname.startswith(part + '.'):
+            continue
+        path = os.path.join(os.path.dirname(os.path.realpath(fn)), '')
+        if path.startswith(pypy_root) and newname != part:
+            modpaths = os.path.normpath(path[len(pypy_root):]).split(os.sep)
+            if newname != '__init__':
+                modpaths.append(newname)
+            modpath = '.'.join(modpaths)
+            if modpath not in sys.modules:
+                munged[modpath] = mod
+
+    for name, mod in munged.iteritems():
+        if name not in sys.modules:
+            sys.modules[name] = mod
+        if '.' in name:
+            prename = name[:name.rfind('.')]
+            postname = name[len(prename)+1:]
+            if prename not in sys.modules:
+                __import__(prename)
+                if not hasattr(sys.modules[prename], postname):
+                    setattr(sys.modules[prename], postname, mod)
+
+    return partdir, this_dir
+
+def __clone():
+    """ clone master version of autopath.py into all subdirs """
+    from os.path import join, walk
+    if not this_dir.endswith(join('pypy','tool')):
+        raise EnvironmentError("can only clone master version "
+                               "'%s'" % join(pypydir, 'tool',_myname))
+
+
+    def sync_walker(arg, dirname, fnames):
+        if _myname in fnames:
+            fn = join(dirname, _myname)
+            f = open(fn, 'rwb+')
+            try:
+                if f.read() == arg:
+                    print "checkok", fn
+                else:
+                    print "syncing", fn
+                    f = open(fn, 'w')
+                    f.write(arg)
+            finally:
+                f.close()
+    s = open(join(pypydir, 'tool', _myname), 'rb').read()
+    walk(pypydir, sync_walker, s)
+
+_myname = 'autopath.py'
+
+# set guaranteed attributes
+
+pypydir, this_dir = __dirinfo('pypy')
+import py # note: py is imported only AFTER the path has been set
+libpythondir = str(py.path.local(pypydir).dirpath().join('lib-python', '2.5.2'))
+libpythonmodifieddir = str(py.path.local(libpythondir).dirpath().join('modified-2.5.2'))
+
+if __name__ == '__main__':
+    __clone()

pypy/interpreter/pyparser/genpytokenize.py

+#! /usr/bin/env python
+"""Module genPytokenize
+
+Generates finite state automata for recognizing Python tokens.  These are hand
+coded versions of the regular expressions originally appearing in Ping's
+tokenize module in the Python standard library.
+
+When run from the command line, this should pretty print the DFA machinery.
+
+$Id: genPytokenize.py,v 1.1 2003/10/02 17:37:17 jriehl Exp $
+"""
+
+import autopath
+from pypy.interpreter.pyparser.pylexer import *
+from pypy.interpreter.pyparser.automata import NonGreedyDFA, DFA, DEFAULT
+
+def makePyPseudoDFA ():
+    import string
+    states = []
+    # ____________________________________________________________
+    def makeLineCont ():
+        return chain(states,
+                     newArcPair(states, "\\"),
+                     maybe(states, newArcPair(states, "\r")),
+                     newArcPair(states, "\n"))
+    # ____________________________________________________________
+    # Ignore stuff
+    def makeWhitespace ():
+        return any(states, groupStr(states, " \f\t"))
+    # ____________________________________________________________
+    def makeComment ():
+        return chain(states,
+                     newArcPair(states, "#"),
+                     any(states, notGroupStr(states, "\r\n")))
+    # ____________________________________________________________
+    #ignore = chain(states,
+    #               makeWhitespace(),
+    #               any(states, chain(states,
+    #                                 makeLineCont(),
+    #                                 makeWhitespace())),
+    #               maybe(states, makeComment()))
+    # ____________________________________________________________
+    # Names
+    name = chain(states,
+                 groupStr(states, string.letters + "_"),
+                 any(states, groupStr(states,
+                                      string.letters + string.digits + "_")))
+    # ____________________________________________________________
+    # Digits
+    def makeDigits ():
+        return groupStr(states, "0123456789")
+    # ____________________________________________________________
+    # Integer numbers
+    hexNumber = chain(states,
+                      newArcPair(states, "0"),
+                      groupStr(states, "xX"),
+                      any(states, groupStr(states, "0123456789abcdefABCDEF")),
+                      maybe(states, groupStr(states, "lL")))
+    octNumber = chain(states,
+                      newArcPair(states, "0"),
+                      any(states, groupStr(states, "01234567")),
+                      maybe(states, groupStr(states, "lL")))
+    decNumber = chain(states,
+                      groupStr(states, "123456789"),
+                      any(states, makeDigits()),
+                      maybe(states, groupStr(states, "lL")))
+    intNumber = group(states, hexNumber, octNumber, decNumber)
+    # ____________________________________________________________
+    # Exponents
+    def makeExp ():
+        return chain(states,
+                     groupStr(states, "eE"),
+                     maybe(states, groupStr(states, "+-")),
+                     atleastonce(states, makeDigits()))
+    # ____________________________________________________________
+    # Floating point numbers
+    def makeFloat ():
+        pointFloat = chain(states,
+                           group(states,
+                                 chain(states,
+                                       atleastonce(states, makeDigits()),
+                                       newArcPair(states, "."),
+                                       any(states, makeDigits())),
+                                 chain(states,
+                                       newArcPair(states, "."),
+                                       atleastonce(states, makeDigits()))),
+                           maybe(states, makeExp()))
+        expFloat = chain(states,
+                         atleastonce(states, makeDigits()),
+                         makeExp())
+        return group(states, pointFloat, expFloat)
+    # ____________________________________________________________
+    # Imaginary numbers
+    imagNumber = group(states,
+                       chain(states,
+                             atleastonce(states, makeDigits()),
+                             groupStr(states, "jJ")),
+                       chain(states,
+                             makeFloat(),
+                             groupStr(states, "jJ")))
+    # ____________________________________________________________
+    # Any old number.
+    number = group(states, imagNumber, makeFloat(), intNumber)
+    # ____________________________________________________________
+    # Funny
+    operator = group(states,
+                     chain(states,
+                           chainStr(states, "**"),
+                           maybe(states, newArcPair(states, "="))),
+                     chain(states,
+                           chainStr(states, ">>"),
+                           maybe(states, newArcPair(states, "="))),
+                     chain(states,
+                           chainStr(states, "<<"),
+                           maybe(states, newArcPair(states, "="))),
+                     chainStr(states, "<>"),
+                     chainStr(states, "!="),
+                     chain(states,
+                           chainStr(states, "//"),
+                           maybe(states, newArcPair(states, "="))),
+                     chain(states,
+                           groupStr(states, "+-*/%&|^=<>"),
+                           maybe(states, newArcPair(states, "="))),
+                     newArcPair(states, "~"))
+    bracket = groupStr(states, "[](){}")
+    special = group(states,
+                    chain(states,
+                          maybe(states, newArcPair(states, "\r")),
+                          newArcPair(states, "\n")),
+                    groupStr(states, "@:;.,`"))
+    funny = group(states, operator, bracket, special)
+    # ____________________________________________________________
+    def makeStrPrefix ():
+        return chain(states,
+                     maybe(states, groupStr(states, "uUbB")),
+                     maybe(states, groupStr(states, "rR")))
+    # ____________________________________________________________
+    contStr = group(states,
+                    chain(states,
+                          makeStrPrefix(),
+                          newArcPair(states, "'"),
+                          any(states,
+                              notGroupStr(states, "\n'\\")),
+                          any(states,
+                              chain(states,
+                                    newArcPair(states, "\\"),
+                                    newArcPair(states, DEFAULT),
+                                    any(states,
+                                        notGroupStr(states, "\n'\\")))),
+                          group(states,
+                                newArcPair(states, "'"),
+                                makeLineCont())),
+                    chain(states,
+                          makeStrPrefix(),
+                          newArcPair(states, '"'),
+                          any(states,
+                              notGroupStr(states, '\n"\\')),
+                          any(states,
+                              chain(states,
+                                    newArcPair(states, "\\"),
+                                    newArcPair(states, DEFAULT),
+                                    any(states,
+                                        notGroupStr(states, '\n"\\')))),
+                          group(states,
+                                newArcPair(states, '"'),
+                                makeLineCont())))
+    triple = group(states,
+                   makeStrPrefix(),
+                   group(states,
+                         chainStr(states, "'''"),
+                         chainStr(states, '"""')))
+    pseudoExtras = group(states,
+                         makeLineCont(),
+                         makeComment(),
+                         triple)
+    pseudoToken = chain(states,
+                        makeWhitespace(),
+                        group(states,
+                              pseudoExtras, number, funny, contStr, name))
+    dfaStates, dfaAccepts = nfaToDfa(states, *pseudoToken)
+    return DFA(dfaStates, dfaAccepts)
+
+# ______________________________________________________________________
+
+def makePyEndDFAMap ():
+    states = []
+    single = chain(states,
+                   any(states, notGroupStr(states, "'\\")),
+                   any(states,
+                       chain(states,
+                             newArcPair(states, "\\"),
+                             newArcPair(states, DEFAULT),
+                             any(states, notGroupStr(states, "'\\")))),
+                   newArcPair(states, "'"))
+    singleDFA = DFA(*nfaToDfa(states, *single))
+    states = []
+    double = chain(states,
+                   any(states, notGroupStr(states, '"\\')),
+                   any(states,
+                       chain(states,
+                             newArcPair(states, "\\"),
+                             newArcPair(states, DEFAULT),
+                             any(states, notGroupStr(states, '"\\')))),
+                   newArcPair(states, '"'))
+    doubleDFA = DFA(*nfaToDfa(states, *double))
+    states = []
+    single3 = chain(states,
+                    any(states, notGroupStr(states, "'\\")),
+                    any(states,
+                        chain(states,
+                              group(states,
+                                    chain(states,
+                                          newArcPair(states, "\\"),
+                                          newArcPair(states, DEFAULT)),
+                                    chain(states,
+                                          newArcPair(states, "'"),
+                                          notChainStr(states, "''"))),
+                              any(states, notGroupStr(states, "'\\")))),
+                    chainStr(states, "'''"))
+    single3DFA = NonGreedyDFA(*nfaToDfa(states, *single3))
+    states = []
+    double3 = chain(states,
+                    any(states, notGroupStr(states, '"\\')),
+                    any(states,
+                        chain(states,
+                              group(states,
+                                    chain(states,
+                                          newArcPair(states, "\\"),
+                                          newArcPair(states, DEFAULT)),
+                                    chain(states,
+                                          newArcPair(states, '"'),
+                                          notChainStr(states, '""'))),
+                              any(states, notGroupStr(states, '"\\')))),
+                    chainStr(states, '"""'))
+    double3DFA = NonGreedyDFA(*nfaToDfa(states, *double3))
+    map = {"'" : singleDFA,
+           '"' : doubleDFA,
+           "r" : None,
+           "R" : None,
+           "u" : None,
+           "U" : None,
+           "b" : None,
+           "B" : None}
+    for uniPrefix in ("", "u", "U", "b", "B", ):
+        for rawPrefix in ("", "r", "R"):
+            prefix = uniPrefix + rawPrefix
+            map[prefix + "'''"] = single3DFA
+            map[prefix + '"""'] = double3DFA
+    return map
+
+# ______________________________________________________________________
+
+def output(name, dfa_class, dfa):
+    import textwrap
+    i = 0
+    for line in textwrap.wrap(repr(dfa.accepts), width = 50):
+        if i == 0:
+            print "accepts =", line
+        else:
+            print "          ", line
+        i += 1
+    import StringIO
+    print "states = ["
+    for state in dfa.states:
+        s = StringIO.StringIO()
+        i = 0
+        for k, v in sorted(state.items()):
+            i += 1
+            if k == '\x00default':
+                k = "automata.DEFAULT"
+            else:
+                k = repr(k)
+            s.write(k)
+            s.write('::')
+            s.write(repr(v))
+            if i < len(state):
+                s.write(', ')
+        s.write('},')
+        i = 0
+        for line in textwrap.wrap(s.getvalue(), width=35):
+            line = line.replace('::', ': ')
+            if i == 0:
+                print '    {' + line
+            else:
+                print '     ' + line
+            i += 1
+        print
+    print "    ]"
+    print "%s = automata.%s(states, accepts)" % (name, dfa_class)
+    print
+
+def main ():
+    pseudoDFA = makePyPseudoDFA()
+    output("pseudoDFA", "DFA", pseudoDFA)
+    endDFAMap = makePyEndDFAMap()
+    output("double3DFA", "NonGreedyDFA", endDFAMap['"""'])
+    output("single3DFA", "NonGreedyDFA", endDFAMap["'''"])
+    output("doubleDFA", "DFA", endDFAMap['"'])
+    output("singleDFA", "DFA", endDFAMap["'"])
+    print "endDFAs = {\"'\" : singleDFA,"
+    print "           '\"' : doubleDFA,"
+    print "           'r' : None,"
+    print "           'R' : None,"
+    print "           'u' : None,"
+    print "           'U' : None,"
+    print "           'b' : None,"
+    print "           'B' : None}"
+
+# ______________________________________________________________________
+
+if __name__ == "__main__":
+    main()

pypy/interpreter/pyparser/pylexer.py

+# Used by genpytokenize.py to generate the parser in pytokenize.py
+from pypy.interpreter.pyparser.automata import DFA, DEFAULT
+
+class EMPTY: pass
+
+def newArcPair (states, transitionLabel):
+    s1Index = len(states)
+    s2Index = s1Index + 1
+    states.append([(transitionLabel, s2Index)])
+    states.append([])
+    return s1Index, s2Index
+
+# ______________________________________________________________________
+
+def chain (states, *stateIndexPairs):
+    if len(stateIndexPairs) > 1:
+        start, lastFinish = stateIndexPairs[0]
+        for nStart, nFinish in stateIndexPairs[1:]:
+            states[lastFinish].append((EMPTY, nStart))
+            lastFinish = nFinish
+        return start, nFinish
+    else:
+        return stateIndexPairs[0]
+
+
+# ______________________________________________________________________
+
+def chainStr (states, str):
+    return chain(states, *map(lambda x : newArcPair(states, x), str))
+
+# ______________________________________________________________________
+
+def notChainStr (states, str):
+    """XXX I'm not sure this is how it should be done, but I'm going to
+    try it anyway.  Note that for this case, I require only single character
+    arcs, since I would have to basically invert all accepting states and
+    non-accepting states of any sub-NFA's.
+    """
+    assert len(str) > 0
+    arcs = map(lambda x : newArcPair(states, x), str)
+    finish = len(states)
+    states.append([])
+    start, lastFinish = arcs[0]
+    states[start].append((EMPTY, finish))
+    for crntStart, crntFinish in arcs[1:]:
+        states[lastFinish].append((EMPTY, crntStart))
+        states[crntStart].append((EMPTY, finish))
+    return start, finish
+
+# ______________________________________________________________________
+
+def group (states, *stateIndexPairs):
+    if len(stateIndexPairs) > 1:
+        start = len(states)
+        finish = start + 1
+        startList = []
+        states.append(startList)
+        states.append([])
+        for eStart, eFinish in stateIndexPairs:
+            startList.append((EMPTY, eStart))
+            states[eFinish].append((EMPTY, finish))
+        return start, finish
+    else:
+        return stateIndexPairs[0]
+
+# ______________________________________________________________________
+
+def groupStr (states, str):
+    return group(states, *map(lambda x : newArcPair(states, x), str))
+
+# ______________________________________________________________________
+
+def notGroup (states, *stateIndexPairs):
+    """Like group, but will add a DEFAULT transition to a new end state,
+    causing anything in the group to not match by going to a dead state.
+    XXX I think this is right...
+    """
+    start, dead = group(states, *stateIndexPairs)
+    finish = len(states)
+    states.append([])
+    states[start].append((DEFAULT, finish))
+    return start, finish
+
+# ______________________________________________________________________
+
+def notGroupStr (states, str):
+    return notGroup(states, *map(lambda x : newArcPair(states, x), str))
+# ______________________________________________________________________
+
+def any (states, *stateIndexPairs):
+    start, finish = group(states, *stateIndexPairs)
+    states[finish].append((EMPTY, start))
+    return start, start
+
+# ______________________________________________________________________
+
+def maybe (states, *stateIndexPairs):
+    start, finish = group(states, *stateIndexPairs)
+    states[start].append((EMPTY, finish))
+    return start, finish
+
+# ______________________________________________________________________
+
+def atleastonce (states, *stateIndexPairs):
+    start, finish = group(states, *stateIndexPairs)
+    states[finish].append((EMPTY, start))
+    return start, finish
+
+# ______________________________________________________________________
+
+def closure (states, start, result = 0L):
+    if None == result:
+        result = 0L
+    if 0 == (result & (1L << start)):
+        result |= (1L << start)
+        for label, arrow in states[start]:
+            if label == EMPTY:
+                result |= closure(states, arrow, result)
+    return result
+
+# ______________________________________________________________________
+
+def nfaToDfa (states, start, finish):
+    tempStates = []
+    startClosure = closure(states, start)
+    crntTempState = [startClosure, [], 0 != (startClosure & (1L << finish))]
+    tempStates.append(crntTempState)
+    index = 0
+    while index < len(tempStates):
+        crntTempState = tempStates[index]
+        crntClosure, crntArcs, crntAccept = crntTempState
+        for index2 in range(0, len(states)):
+            if 0 != (crntClosure & (1L << index2)):
+                for label, nfaArrow in states[index2]:
+                    if label == EMPTY:
+                        continue
+                    foundTempArc = False
+                    for tempArc in crntArcs:
+                        if tempArc[0] == label:
+                            foundTempArc = True
+                            break
+                    if not foundTempArc:
+                        tempArc = [label, -1, 0L]
+                        crntArcs.append(tempArc)
+                    tempArc[2] = closure(states, nfaArrow, tempArc[2])
+        for arcIndex in range(0, len(crntArcs)):
+            label, arrow, targetStates = crntArcs[arcIndex]
+            targetFound = False
+            arrow = 0
+            for destTempState in tempStates:
+                if destTempState[0] == targetStates:
+                    targetFound = True
+                    break
+                arrow += 1
+            if not targetFound:
+                assert arrow == len(tempStates)
+                newState = [targetStates, [], 0 != (targetStates &
+                                                    (1L << finish))]
+                tempStates.append(newState)
+            crntArcs[arcIndex][1] = arrow
+        index += 1
+    tempStates = simplifyTempDfa(tempStates)
+    states = finalizeTempDfa(tempStates)
+    return states
+
+# ______________________________________________________________________
+
+def sameState (s1, s2):
+    """sameState(s1, s2)
+    Note:
+    state := [ nfaclosure : Long, [ arc ], accept : Boolean ]
+    arc := [ label, arrow : Int, nfaClosure : Long ]
+    """
+    if (len(s1[1]) != len(s2[1])) or (s1[2] != s2[2]):
+        return False
+    for arcIndex in range(0, len(s1[1])):
+        arc1 = s1[1][arcIndex]
+        arc2 = s2[1][arcIndex]
+        if arc1[:-1] != arc2[:-1]:
+            return False
+    return True
+
+# ______________________________________________________________________
+
+def simplifyTempDfa (tempStates):
+    """simplifyTempDfa (tempStates)
+    """
+    changes = True
+    deletedStates = []
+    while changes:
+        changes = False
+        for i in range(1, len(tempStates)):
+            if i in deletedStates:
+                continue
+            for j in range(0, i):
+                if j in deletedStates:
+                    continue
+                if sameState(tempStates[i], tempStates[j]):
+                    deletedStates.append(i)
+                    for k in range(0, len(tempStates)):
+                        if k in deletedStates:
+                            continue
+                        for arc in tempStates[k][1]:
+                            if arc[1] == i:
+                                arc[1] = j
+                    changes = True
+                    break
+    for stateIndex in deletedStates:
+        tempStates[stateIndex] = None
+    return tempStates
+# ______________________________________________________________________
+
+def finalizeTempDfa (tempStates):
+    """finalizeTempDfa (tempStates)
+    
+    Input domain:
+    tempState := [ nfaClosure : Long, [ tempArc ], accept : Boolean ]
+    tempArc := [ label, arrow, nfaClosure ]
+
+    Output domain:
+    state := [ arcMap, accept : Boolean ]
+    """
+    states = []
+    accepts = []
+    stateMap = {}
+    tempIndex = 0
+    for tempIndex in range(0, len(tempStates)):
+        tempState = tempStates[tempIndex]
+        if None != tempState:
+            stateMap[tempIndex] = len(states)
+            states.append({})
+            accepts.append(tempState[2])
+    for tempIndex in stateMap.keys():
+        stateBitset, tempArcs, accepting = tempStates[tempIndex]
+        newIndex = stateMap[tempIndex]
+        arcMap = states[newIndex]
+        for tempArc in tempArcs:
+            arcMap[tempArc[0]] = stateMap[tempArc[1]]
+    return states, accepts
+

pypy/interpreter/pyparser/pytokenize.py

 __all__ = [ "tokenize" ]
 
 # ______________________________________________________________________
-# Automatically generated DFA's (with one or two hand tweeks):
-pseudoStatesAccepts = [True, True, True, True, True, True, True, True,
-                       True, True, False, True, True, True, False, False,
-                       False, False, True, False, False, True, True, False,
-                       True, False, True, False, True, False, True, False,
-                       False, False, True, False, False, False, True]
+# Automatically generated DFA's
 
-pseudoStates = [
-    {'\t': 0, '\n': 13, '\x0c': 0, '\r': 14, ' ': 0, '!': 10,
-     '"': 16, '#': 18, '%': 12, '&': 12,
-     "'": 15, '(': 13, ')': 13, '*': 7,
-     '+': 12, ',': 13, '-': 12, '.': 6,
-     '/': 11, '0': 4, '1': 5, '2': 5,
-     '3': 5, '4': 5, '5': 5, '6': 5,
-     '7': 5, '8': 5, '9': 5, ':': 13,
-     ';': 13, '<': 9, '=': 12, '>': 8,
-     '@': 13, 'A': 1,
-     'B': 3, 'C': 1, 'D': 1, 'E': 1,
-     'F': 1, 'G': 1, 'H': 1, 'I': 1,
-     'J': 1, 'K': 1, 'L': 1, 'M': 1,
-     'N': 1, 'O': 1, 'P': 1, 'Q': 1,
-     'R': 2, 'S': 1, 'T': 1, 'U': 3,
-     'V': 1, 'W': 1, 'X': 1, 'Y': 1,
-     'Z': 1, '[': 13, '\\': 17, ']': 13,
-     '^': 12, '_': 1, '`': 13, 'a': 1,
-     'b': 3, 'c': 1, 'd': 1, 'e': 1,
-     'f': 1, 'g': 1, 'h': 1, 'i': 1,
-     'j': 1, 'k': 1, 'l': 1, 'm': 1,
-     'n': 1, 'o': 1, 'p': 1, 'q': 1,
-     'r': 2, 's': 1, 't': 1, 'u': 3,
-     'v': 1, 'w': 1, 'x': 1, 'y': 1,
-     'z': 1, '{': 13, '|': 12, '}': 13,
-     '~': 13},
+accepts = [True, True, True, True, True, True, True, True,
+           True, True, False, True, True, True, False, False,
+           False, False, True, False, False, True, True,
+           False, True, False, True, False, True, False,
+           True, False, False, False, True, False, False,
+           False, True]
+states = [
+    {'\t': 0, '\n': 13, '\x0c': 0,
+     '\r': 14, ' ': 0, '!': 10, '"': 16,
+     '#': 18, '%': 12, '&': 12, "'": 15,
+     '(': 13, ')': 13, '*': 7, '+': 12,
+     ',': 13, '-': 12, '.': 6, '/': 11,
+     '0': 4, '1': 5, '2': 5, '3': 5,
+     '4': 5, '5': 5, '6': 5, '7': 5,
+     '8': 5, '9': 5, ':': 13, ';': 13,
+     '<': 9, '=': 12, '>': 8, '@': 13,
+     'A': 1, 'B': 2, 'C': 1, 'D': 1,
+     'E': 1, 'F': 1, 'G': 1, 'H': 1,
+     'I': 1, 'J': 1, 'K': 1, 'L': 1,
+     'M': 1, 'N': 1, 'O': 1, 'P': 1,
+     'Q': 1, 'R': 3, 'S': 1, 'T': 1,
+     'U': 2, 'V': 1, 'W': 1, 'X': 1,
+     'Y': 1, 'Z': 1, '[': 13, '\\': 17,
+     ']': 13, '^': 12, '_': 1, '`': 13,
+     'a': 1, 'b': 2, 'c': 1, 'd': 1,
+     'e': 1, 'f': 1, 'g': 1, 'h': 1,
+     'i': 1, 'j': 1, 'k': 1, 'l': 1,
+     'm': 1, 'n': 1, 'o': 1, 'p': 1,
+     'q': 1, 'r': 3, 's': 1, 't': 1,
+     'u': 2, 'v': 1, 'w': 1, 'x': 1,
+     'y': 1, 'z': 1, '{': 13, '|': 12,
+     '}': 13, '~': 13},
 
     {'0': 1, '1': 1, '2': 1, '3': 1,
      '4': 1, '5': 1, '6': 1, '7': 1,
      't': 1, 'u': 1, 'v': 1, 'w': 1,
      'x': 1, 'y': 1, 'z': 1},
 
-    {'"': 16, "'": 15, '0': 1, '1': 1,
+    {'"': 20, "'": 19, '0': 1, '1': 1,
+     '2': 1, '3': 1, '4': 1, '5': 1,
+     '6': 1, '7': 1, '8': 1, '9': 1,
+     'A': 1, 'B': 1, 'C': 1, 'D': 1,
+     'E': 1, 'F': 1, 'G': 1, 'H': 1,
+     'I': 1, 'J': 1, 'K': 1, 'L': 1,
+     'M': 1, 'N': 1, 'O': 1, 'P': 1,
+     'Q': 1, 'R': 3, 'S': 1, 'T': 1,
+     'U': 1, 'V': 1, 'W': 1, 'X': 1,
+     'Y': 1, 'Z': 1, '_': 1, 'a': 1,
+     'b': 1, 'c': 1, 'd': 1, 'e': 1,
+     'f': 1, 'g': 1, 'h': 1, 'i': 1,
+     'j': 1, 'k': 1, 'l': 1, 'm': 1,
+     'n': 1, 'o': 1, 'p': 1, 'q': 1,
+     'r': 3, 's': 1, 't': 1, 'u': 1,
+     'v': 1, 'w': 1, 'x': 1, 'y': 1,
+     'z': 1},
+
+    {'"': 20, "'": 19, '0': 1, '1': 1,
      '2': 1, '3': 1, '4': 1, '5': 1,
      '6': 1, '7': 1, '8': 1, '9': 1,
      'A': 1, 'B': 1, 'C': 1, 'D': 1,
      'v': 1, 'w': 1, 'x': 1, 'y': 1,
      'z': 1},
 
-    {'"': 16, "'": 15, '0': 1, '1': 1,
-     '2': 1, '3': 1, '4': 1, '5': 1,
-     '6': 1, '7': 1, '8': 1, '9': 1,
-     'A': 1, 'B': 1, 'C': 1, 'D': 1,
-     'E': 1, 'F': 1, 'G': 1, 'H': 1,
-     'I': 1, 'J': 1, 'K': 1, 'L': 1,
-     'M': 1, 'N': 1, 'O': 1, 'P': 1,
-     'Q': 1, 'R': 2, 'S': 1, 'T': 1,
-     'U': 1, 'V': 1, 'W': 1, 'X': 1,
-     'Y': 1, 'Z': 1, '_': 1, 'a': 1,
-     'b': 1, 'c': 1, 'd': 1, 'e': 1,
-     'f': 1, 'g': 1, 'h': 1, 'i': 1,
-     'j': 1, 'k': 1, 'l': 1, 'm': 1,
-     'n': 1, 'o': 1, 'p': 1, 'q': 1,
-     'r': 2, 's': 1, 't': 1, 'u': 1,
-     'v': 1, 'w': 1, 'x': 1, 'y': 1,
-     'z': 1},
-
     {'.': 24, '0': 22, '1': 22, '2': 22,
      '3': 22, '4': 22, '5': 22, '6': 22,
      '7': 22, '8': 23, '9': 23, 'E': 25,
 
     {'=': 13, '>': 12},
 
-    {'=': 13, '<': 12, '>': 13},
+    {'<': 12, '=': 13, '>': 13},
 
     {'=': 13},
 
-    {'=': 13, '/': 12},
+    {'/': 12, '=': 13},
 
     {'=': 13},
 
 
     {'\n': 13},
 
-    {automata.DEFAULT: 19, '\n': 27, '\\': 29, "'": 28},
+    {automata.DEFAULT: 19, '\n': 27,
+     "'": 28, '\\': 29},
 
-    {automata.DEFAULT: 20, '"': 30, '\n': 27, '\\': 31},
+    {automata.DEFAULT: 20, '\n': 27,
+     '"': 30, '\\': 31},
 
     {'\n': 13, '\r': 14},
 
-    {automata.DEFAULT: 18, '\n': 27, '\r': 27},
+    {automata.DEFAULT: 18, '\n': 27,
+     '\r': 27},
 
-    {automata.DEFAULT: 19, '\n': 27, '\\': 29, "'": 13},
+    {automata.DEFAULT: 19, '\n': 27,
+     "'": 13, '\\': 29},
 
-    {automata.DEFAULT: 20, '"': 13, '\n': 27, '\\': 31},
-                
+    {automata.DEFAULT: 20, '\n': 27,
+     '"': 13, '\\': 31},
+
     {'0': 21, '1': 21, '2': 21, '3': 21,
      '4': 21, '5': 21, '6': 21, '7': 21,
      '8': 21, '9': 21, 'A': 21, 'B': 21,
      'C': 21, 'D': 21, 'E': 21, 'F': 21,
      'L': 13, 'a': 21, 'b': 21, 'c': 21,
      'd': 21, 'e': 21, 'f': 21, 'l': 13},
-    
+
     {'.': 24, '0': 22, '1': 22, '2': 22,
      '3': 22, '4': 22, '5': 22, '6': 22,
      '7': 22, '8': 23, '9': 23, 'E': 25,
      'J': 13, 'L': 13, 'e': 25, 'j': 13,
      'l': 13},
-    
+
     {'.': 24, '0': 23, '1': 23, '2': 23,
      '3': 23, '4': 23, '5': 23, '6': 23,
      '7': 23, '8': 23, '9': 23, 'E': 25,
      'J': 13, 'e': 25, 'j': 13},
-    
+
     {'0': 24, '1': 24, '2': 24, '3': 24,
      '4': 24, '5': 24, '6': 24, '7': 24,
      '8': 24, '9': 24, 'E': 32, 'J': 13,
      'e': 32, 'j': 13},
-    
+
     {'+': 33, '-': 33, '0': 34, '1': 34,
      '2': 34, '3': 34, '4': 34, '5': 34,
      '6': 34, '7': 34, '8': 34, '9': 34},
-    
+
     {'0': 26, '1': 26, '2': 26, '3': 26,
      '4': 26, '5': 26, '6': 26, '7': 26,
      '8': 26, '9': 26, 'E': 32, 'J': 13,
      'e': 32, 'j': 13},
-    
+
     {},
 
     {"'": 13},
 
-    {automata.DEFAULT: 35, '\n': 13, '\r': 14},
+    {automata.DEFAULT: 35, '\n': 13,
+     '\r': 14},
 
     {'"': 13},
 
-    {automata.DEFAULT: 36, '\n': 13, '\r': 14},
+    {automata.DEFAULT: 36, '\n': 13,
+     '\r': 14},
 
     {'+': 37, '-': 37, '0': 38, '1': 38,
      '2': 38, '3': 38, '4': 38, '5': 38,
      '6': 38, '7': 38, '8': 38, '9': 38},
-    
-    
+
     {'0': 34, '1': 34, '2': 34, '3': 34,
      '4': 34, '5': 34, '6': 34, '7': 34,
      '8': 34, '9': 34},
-    
+
     {'0': 34, '1': 34, '2': 34, '3': 34,
      '4': 34, '5': 34, '6': 34, '7': 34,
      '8': 34, '9': 34, 'J': 13, 'j': 13},
-    
-    {automata.DEFAULT: 35, '\n': 27, '\\': 29, "'": 13},
-    
-    {automata.DEFAULT: 36, '"': 13, '\n': 27, '\\': 31},
-    
+
+    {automata.DEFAULT: 35, '\n': 27,
+     "'": 13, '\\': 29},
+
+    {automata.DEFAULT: 36, '\n': 27,
+     '"': 13, '\\': 31},
+
     {'0': 38, '1': 38, '2': 38, '3': 38,
      '4': 38, '5': 38, '6': 38, '7': 38,
      '8': 38, '9': 38},
     {'0': 38, '1': 38, '2': 38, '3': 38,
      '4': 38, '5': 38, '6': 38, '7': 38,
      '8': 38, '9': 38, 'J': 13, 'j': 13},
+
     ]
+pseudoDFA = automata.DFA(states, accepts)
 
-pseudoDFA = automata.DFA(pseudoStates, pseudoStatesAccepts)
+accepts = [False, False, False, False, False, True]
+states = [
+    {automata.DEFAULT: 0, '"': 1,
+     '\\': 2},
 
-double3StatesAccepts = [False, False, False, False, False, True]
-double3States = [
-    {automata.DEFAULT: 0, '"': 1, '\\': 2},
-    {automata.DEFAULT: 4, '"': 3, '\\': 2},
+    {automata.DEFAULT: 4, '"': 3,
+     '\\': 2},
+
     {automata.DEFAULT: 4},
-    {automata.DEFAULT: 4, '"': 5, '\\': 2},
-    {automata.DEFAULT: 4, '"': 1, '\\': 2},
-    {automata.DEFAULT: 4, '"': 5, '\\': 2},
+
+    {automata.DEFAULT: 4, '"': 5,
+     '\\': 2},
+
+    {automata.DEFAULT: 4, '"': 1,
+     '\\': 2},
+
+    {automata.DEFAULT: 4, '"': 5,
+     '\\': 2},
+
     ]
-double3DFA = automata.NonGreedyDFA(double3States, double3StatesAccepts)
+double3DFA = automata.NonGreedyDFA(states, accepts)
 
-single3StatesAccepts = [False, False, False, False, False, True]
-single3States = [
-    {automata.DEFAULT: 0, '\\': 2, "'": 1},
-    {automata.DEFAULT: 4, '\\': 2, "'": 3},
+accepts = [False, False, False, False, False, True]
+states = [
+    {automata.DEFAULT: 0, "'": 1,
+     '\\': 2},
+
+    {automata.DEFAULT: 4, "'": 3,
+     '\\': 2},
+
     {automata.DEFAULT: 4},
-    {automata.DEFAULT: 4, '\\': 2, "'": 5},
-    {automata.DEFAULT: 4, '\\': 2, "'": 1},
-    {automata.DEFAULT: 4, '\\': 2, "'": 5},
+
+    {automata.DEFAULT: 4, "'": 5,
+     '\\': 2},
+
+    {automata.DEFAULT: 4, "'": 1,
+     '\\': 2},
+
+    {automata.DEFAULT: 4, "'": 5,
+     '\\': 2},
+
     ]
-single3DFA = automata.NonGreedyDFA(single3States, single3StatesAccepts)
+single3DFA = automata.NonGreedyDFA(states, accepts)
 
-singleStatesAccepts = [False, True, False]
-singleStates = [
-    {automata.DEFAULT: 0, '\\': 2, "'": 1},
+accepts = [False, True, False, False]
+states = [
+    {automata.DEFAULT: 0, '"': 1,
+     '\\': 2},
+
     {},
-    {automata.DEFAULT: 0},
+
+    {automata.DEFAULT: 3},
+
+    {automata.DEFAULT: 3, '"': 1,
+     '\\': 2},
+
     ]
-singleDFA = automata.DFA(singleStates, singleStatesAccepts)
+doubleDFA = automata.DFA(states, accepts)
 
-doubleStatesAccepts = [False, True, False]
-doubleStates = [
-    {automata.DEFAULT: 0, '"': 1, '\\': 2},
+accepts = [False, True, False, False]
+states = [
+    {automata.DEFAULT: 0, "'": 1,
+     '\\': 2},
+
     {},
-    {automata.DEFAULT: 0},
+
+    {automata.DEFAULT: 3},
+
+    {automata.DEFAULT: 3, "'": 1,
+     '\\': 2},
+
     ]
-doubleDFA = automata.DFA(doubleStates, doubleStatesAccepts)
+singleDFA = automata.DFA(states, accepts)
 
 endDFAs = {"'" : singleDFA,
            '"' : doubleDFA,
-           "r" : None,
-           "R" : None,
-           "u" : None,
-           "U" : None,
-           "b" : None,
-           "B" : None}
+           'r' : None,
+           'R' : None,
+           'u' : None,
+           'U' : None,
+           'b' : None,
+           'B' : None}
+
+#_______________________________________________________________________
+# End of automatically generated DFA's
 
 for uniPrefix in ("", "u", "U", "b", "B"):
     for rawPrefix in ("", "r", "R"):