Commits

committed 7481726

Introduced a directory 'tool/algo/' for the algorithmic modules. It's a good
place to regroup sparsemat, unionfind, and a new graph-manipulation module
doing cycle detection (graphs in the general sense, not flow graphs).

• Participants
• Parent commits 6670a60

File pypy/annotation/bookkeeper.py

` from pypy.interpreter.argument import Arguments, ArgErr`
` from pypy.rpython.rarithmetic import r_uint`
` from pypy.rpython.objectmodel import r_dict`
`-from pypy.tool.unionfind import UnionFind`
`+from pypy.tool.algo.unionfind import UnionFind`
` from pypy.rpython import lltype`
` from pypy.rpython.memory import lladdress`
` `

File pypy/tool/algo/__init__.py

`+#empty`

File pypy/tool/algo/graphlib.py

`+"""`
`+Utilities to manipulate graphs (vertices and edges, not control flow graphs).`
`+"""`
`+`
`+class Edge:`
`+    def __init__(self, source, target):`
`+        self.source = source`
`+        self.target = target`
`+`
`+def depth_first_search(root, vertices, edges):`
`+    seen = {}`
`+    result = []`
`+    def visit(vertex):`
`+        result.append(('start', vertex))`
`+        seen[vertex] = True`
`+        for edge in edges[vertex]:`
`+            w = edge.target`
`+            if w in vertices and w not in seen:`
`+                visit(w)`
`+        result.append(('stop', vertex))`
`+    visit(root)`
`+    return result`
`+`
`+def strong_components(vertices, edges):`
`+    """Enumerates the strongly connected components of a graph.  Each one is`
`+    a set of vertices where any node can be reached from any other vertex by`
`+    following the edges.  'edges' is a dict {vertex: [edges]})"""`
`+`
`+    component_root = {}`
`+    discovery_time = {}`
`+    stack = []`
`+`
`+    for root in vertices:`
`+        if root not in discovery_time:`
`+`
`+            for event, v in depth_first_search(root, vertices, edges):`
`+                if event == 'start':`
`+                    discovery_time[v] = len(discovery_time)`
`+                    component_root[v] = v`
`+                    stack.append(v)`
`+`
`+                else:  # event == 'stop'`
`+                    vroot = v`
`+                    for edge in edges[v]:`
`+                        w = edge.target`
`+                        if w in component_root:`
`+                            wroot = component_root[w]`
`+                            if discovery_time[wroot] < discovery_time[vroot]:`
`+                                vroot = wroot`
`+                    if vroot is v:`
`+                        component = {}`
`+                        while True:`
`+                            w = stack.pop()`
`+                            del component_root[w]`
`+                            component[w] = True`
`+                            if w is v:`
`+                                break`
`+                        yield component`
`+                    else:`
`+                        component_root[v] = vroot`
`+`
`+def all_cycles(root, vertices, edges):`
`+    """Enumerates cycles.  Each cycle is a list of edges."""`
`+    stackpos = {}`
`+    edgestack = []`
`+    result = []`
`+    def visit(v):`
`+        if v not in stackpos:`
`+            stackpos[v] = len(edgestack)`
`+            for edge in edges[v]:`
`+                if edge.target in vertices:`
`+                    edgestack.append(edge)`
`+                    visit(edge.target)`
`+                    edgestack.pop()`
`+            stackpos[v] = None`
`+        else:`
`+            if stackpos[v] is not None:   # back-edge`
`+                result.append(edgestack[stackpos[v]:])`
`+    visit(root)`
`+    return result`
`+`
`+def break_cycles(vertices, edges):`
`+    """Enumerates a reasonably minimal set of edges that must be removed to`
`+    make the graph acyclic."""`
`+    graphs = [(vertices, edges)]`
`+    for vertices, edges in graphs:`
`+        #print ''.join(vertices),`
`+        #print [e.source+e.target for l in edges.values() for e in l]`
`+        for component in strong_components(vertices, edges):`
`+            #print '-->', ''.join(component)`
`+            edge_weights = {}`
`+            random_vertex = component.iterkeys().next()`
`+            for cycle in all_cycles(random_vertex, vertices, edges):`
`+                #print '\tcycle:', [e.source+e.target for e in cycle]`
`+                for edge in cycle:`
`+                    edge_weights[edge] = edge_weights.get(edge, 0) + 1`
`+            if edge_weights:`
`+                max_weight = max(edge_weights.values())`
`+                for edge, weight in edge_weights.iteritems():`
`+                    if weight == max_weight:`
`+                        break`
`+                # kill this edge`
`+                yield edge`
`+                new_edges = edges.copy()`
`+                new_edges[edge.source] = [e for e in new_edges[edge.source]`
`+                                            if e is not edge]`
`+                graphs.append((component, new_edges))`

File pypy/tool/algo/sparsemat.py

`+from __future__ import division`
`+`
`+EPSILON = 1E-6`
`+`
`+`
`+class SparseMatrix:`
`+`
`+    def __init__(self, height):`
`+        self.lines = [{} for row in range(height)]`
`+`
`+    def __getitem__(self, (row, col)):`
`+        return self.lines[row].get(col, 0)`
`+`
`+    def __setitem__(self, (row, col), value):`
`+        if abs(value) > EPSILON:`
`+            self.lines[row][col] = value`
`+        else:`
`+            try:`
`+                del self.lines[row][col]`
`+            except KeyError:`
`+                pass`
`+`
`+    def copy(self):`
`+        m = SparseMatrix(len(self.lines))`
`+        for line1, line2 in zip(self.lines, m.lines):`
`+            line2.update(line1)`
`+        return m`
`+`
`+    def solve(self, vector):`
`+        """Solves  'self * [x1...xn] == vector'; returns the list [x1...xn].`
`+        Raises ValueError if no solution or indeterminate.`
`+        """`
`+        vector = list(vector)`
`+        lines = [line.copy() for line in self.lines]`
`+        columns = [{} for i in range(len(vector))]`
`+        for i, line in enumerate(lines):`
`+            for j, a in line.items():`
`+                columns[j][i] = a`
`+        lines_left = dict.fromkeys(range(len(self.lines)))`
`+        nrows = []`
`+        for ncol in range(len(vector)):`
`+            currentcolumn = columns[ncol]`
`+            lst = [(abs(a), i) for (i, a) in currentcolumn.items()`
`+                               if i in lines_left]`
`+            _, nrow = min(lst)    # ValueError -> no solution`
`+            nrows.append(nrow)`
`+            del lines_left[nrow]`
`+            line1 = lines[nrow]`
`+            mina = line1[ncol]`
`+            for _, i in lst:`
`+                if i != nrow:`
`+                    line2 = lines[i]`
`+                    a = line2.pop(ncol)`
`+                    #del currentcolumn[i]  -- but currentcolumn no longer used`
`+                    factor = a / mina`
`+                    vector[i] -= factor*vector[nrow]`
`+                    for col in line1:`
`+                        if col > ncol:`
`+                            value = line2.get(col, 0) - factor*line1[col]`
`+                            if abs(value) > EPSILON:`
`+                                line2[col] = columns[col][i] = value`
`+                            else:`
`+                                del line2[col]`
`+                                del columns[col][i]`
`+        solution = [None] * len(vector)`
`+        for i in range(len(vector)-1, -1, -1):`
`+            row = nrows[i]`
`+            line = lines[row]`
`+            total = vector[row]`
`+            for j, a in line.items():`
`+                if j != i:`
`+                    total -= a * solution[j]`
`+            solution[i] = total / line[i]`
`+        return solution`

File pypy/tool/algo/test/__init__.py

`+#empty`

File pypy/tool/algo/test/autopath.py

`+"""`
`+self cloning, automatic path configuration `
`+`
`+copy this into any subdirectory of pypy from which scripts need `
`+to be run, typically all of the test subdirs. `
`+The idea is that any such script simply issues`
`+`
`+    import autopath`
`+`
`+and this will make sure that the parent directory containing "pypy"`
`+is in sys.path. `
`+`
`+If you modify the master "autopath.py" version (in pypy/tool/autopath.py) `
`+you can directly run it which will copy itself on all autopath.py files`
`+it finds under the pypy root directory. `
`+`
`+This module always provides these attributes:`
`+`
`+    pypydir    pypy root directory path `
`+    this_dir   directory where this autopath.py resides `
`+`
`+"""`
`+`
`+`
`+def __dirinfo(part):`
`+    """ return (partdir, this_dir) and insert parent of partdir`
`+    into sys.path.  If the parent directories don't have the part`
`+    an EnvironmentError is raised."""`
`+`
`+    import sys, os`
`+    try:`
`+        head = this_dir = os.path.realpath(os.path.dirname(__file__))`
`+    except NameError:`
`+        head = this_dir = os.path.realpath(os.path.dirname(sys.argv[0]))`
`+`
`+    while head:`
`+        partdir = head`
`+        head, tail = os.path.split(head)`
`+        if tail == part:`
`+            break`
`+    else:`
`+        raise EnvironmentError, "'%s' missing in '%r'" % (partdir, this_dir)`
`+    `
`+    checkpaths = sys.path[:]`
`+    pypy_root = os.path.join(head, '')`
`+    `
`+    while checkpaths:`
`+        orig = checkpaths.pop()`
`+        fullorig = os.path.join(os.path.realpath(orig), '')`
`+        if fullorig.startswith(pypy_root):`
`+            if os.path.exists(os.path.join(fullorig, '__init__.py')):`
`+                sys.path.remove(orig)`
`+    try:`
`+        sys.path.remove(head)`
`+    except ValueError:`
`+        pass`
`+    sys.path.insert(0, head)`
`+`
`+    munged = {}`
`+    for name, mod in sys.modules.items():`
`+        fn = getattr(mod, '__file__', None)`
`+        if '.' in name or not isinstance(fn, str):`
`+            continue`
`+        newname = os.path.splitext(os.path.basename(fn))[0]`
`+        if not newname.startswith(part + '.'):`
`+            continue`
`+        path = os.path.join(os.path.dirname(os.path.realpath(fn)), '')`
`+        if path.startswith(pypy_root) and newname != part:`
`+            modpaths = os.path.normpath(path[len(pypy_root):]).split(os.sep)`
`+            if newname != '__init__':`
`+                modpaths.append(newname)`
`+            modpath = '.'.join(modpaths)`
`+            if modpath not in sys.modules:`
`+                munged[modpath] = mod`
`+`
`+    for name, mod in munged.iteritems():`
`+        if name not in sys.modules:`
`+            sys.modules[name] = mod`
`+        if '.' in name:`
`+            prename = name[:name.rfind('.')]`
`+            postname = name[len(prename)+1:]`
`+            if prename not in sys.modules:`
`+                __import__(prename)`
`+                if not hasattr(sys.modules[prename], postname):`
`+                    setattr(sys.modules[prename], postname, mod)`
`+`
`+    return partdir, this_dir`
`+`
`+def __clone():`
`+    """ clone master version of autopath.py into all subdirs """`
`+    from os.path import join, walk`
`+    if not this_dir.endswith(join('pypy','tool')):`
`+        raise EnvironmentError("can only clone master version "`
`+                               "'%s'" % join(pypydir, 'tool',_myname))`
`+`
`+`
`+    def sync_walker(arg, dirname, fnames):`
`+        if _myname in fnames:`
`+            fn = join(dirname, _myname)`
`+            f = open(fn, 'rwb+')`
`+            try:`
`+                if f.read() == arg:`
`+                    print "checkok", fn`
`+                else:`
`+                    print "syncing", fn`
`+                    f = open(fn, 'w')`
`+                    f.write(arg)`
`+            finally:`
`+                f.close()`
`+    s = open(join(pypydir, 'tool', _myname), 'rb').read()`
`+    walk(pypydir, sync_walker, s)`
`+`
`+_myname = 'autopath.py'`
`+`
`+# set guaranteed attributes`
`+`
`+pypydir, this_dir = __dirinfo('pypy')`
`+`
`+if __name__ == '__main__':`
`+    __clone()`

File pypy/tool/algo/test/test_graphlib.py

`+import autopath`
`+from pypy.tool.algo.graphlib import *`
`+`
`+edges = {`
`+    'A': [Edge('A','B'), Edge('A','C')],`
`+    'B': [Edge('B','D'), Edge('B','E')],`
`+    'C': [Edge('C','F')],`
`+    'D': [Edge('D','D')],`
`+    'E': [Edge('E','A'), Edge('E','C')],`
`+    'F': [],`
`+    'G': [],`
`+    }`
`+`
`+def copy_edges(edges):`
`+    result = {}`
`+    for key, value in edges.items():`
`+        result[key] = value[:]`
`+    return result`
`+`
`+`
`+def test_strong_components():`
`+    saved = copy_edges(edges)`
`+    result = list(strong_components(edges, edges))`
`+    assert edges == saved`
`+    for comp in result:`
`+        comp = list(comp)`
`+        comp.sort()`
`+    result = [''.join(comp) for comp in result]`
`+    result.sort()`
`+    assert result == ['ABE', 'C', 'D', 'F', 'G']`
`+`
`+def test_all_cycles():`
`+    saved = copy_edges(edges)`
`+    cycles = list(all_cycles('A', edges, edges))`
`+    assert edges == saved`
`+    cycles.sort()`
`+    expected = [`
`+        [edges['A'][0], edges['B'][1], edges['E'][0]],`
`+        [edges['D'][0]],`
`+        ]`
`+    expected.sort()`
`+    assert cycles == expected`
`+`
`+def test_break_cycles():`
`+    saved = copy_edges(edges)`
`+    result = list(break_cycles(edges, edges))`
`+    assert edges == saved`
`+    assert len(result) == 2`
`+    assert edges['D'][0] in result`
`+    assert (edges['A'][0] in result or`
`+            edges['B'][1] in result or`
`+            edges['E'][0] in result)`

File pypy/tool/algo/test/test_sparsemat.py

`+import autopath`
`+from pypy.tool.algo.sparsemat import *`
`+`
`+`
`+def test_sparsemat1():`
`+    import py`
`+    M = SparseMatrix(4)`
`+    M[0,0] = M[1,1] = M[2,2] = M[3,3] = 1`
`+    M[0,1] = -1.0`
`+    M[1,2] = M[1,3] = -0.5`
`+    M[2,1] = -1.0`
`+    res = M.solve([4, 5, 4, 1])`
`+    assert res == [19, 15, 19, 1]`
`+`
`+def test_sparsemat2():`
`+    import py`
`+    M = SparseMatrix(4)`
`+    M[0,0] = M[1,1] = M[2,2] = M[3,3] = 1`
`+    M[0,1] = -1.0`
`+    M[1,2] = M[1,3] = -0.5`
`+    M[2,1] = M[2,3] = -0.5`
`+    res = M.solve([6, 3, 6, 0])`
`+    assert res == [14, 8, 10, 0]`

File pypy/tool/algo/unionfind.py

`+# This is a general algorithm used by the annotator.`
`+`
`+# union-find impl, a info object is attached to the roots`
`+`
`+class UnionFind(object):`
`+`
`+    def __init__(self, info_factory=None):`
`+        self.link_to_parent = {}`
`+        self.weight = {}`
`+        self.info_factory = info_factory`
`+        self.root_info = {}`
`+`
`+    # mapping-like [] access`
`+    def __getitem__(self, obj):`
`+        if obj not in self.link_to_parent:`
`+            raise KeyError, obj`
`+`
`+        ignore, rep, info = self.find(obj)`
`+`
`+        return info`
`+`
`+    def __contains__(self, obj):`
`+        return obj in self.link_to_parent`
`+`
`+    def __iter__(self):`
`+        return iter(self.link_to_parent)`
`+`
`+    def keys(self):`
`+        return self.link_to_parent.keys()`
`+`
`+    def infos(self):`
`+        return self.root_info.values()`
`+`
`+    def find_rep(self, obj):`
`+        try:`
`+            # fast path (shortcut for performance reasons)`
`+            parent = self.link_to_parent[obj]`
`+            self.root_info[parent]   # may raise KeyError`
`+            return parent`
`+        except KeyError:`
`+            # general case`
`+            ignore, rep, info = self.find(obj)`
`+            return rep`
`+`
`+    def find(self, obj):  # -> new_root, obj, info`
`+        if obj not in self.link_to_parent:`
`+            if self.info_factory:`
`+                info = self.info_factory(obj)`
`+            else:`
`+                info = None`
`+            self.root_info[obj] = info`
`+            self.weight[obj] = 1`
`+            self.link_to_parent[obj] = obj`
`+            return True, obj, info`
`+`
`+        to_root = [obj]`
`+        parent = self.link_to_parent[obj]`
`+        while parent is not to_root[-1]:`
`+            to_root.append(parent)`
`+            parent = self.link_to_parent[parent]`
`+`
`+        for obj in to_root:`
`+            self.link_to_parent[obj] = parent`
`+`
`+        return False, parent, self.root_info[parent]`
`+`
`+`
`+    def union(self, obj1, obj2): # -> not_noop, rep, info`
`+`
`+        new1, rep1, info1 = self.find(obj1)`
`+        new2, rep2, info2 = self.find(obj2)`
`+`
`+        if rep1 is rep2:`
`+            return new1 or new2, rep1, info1`
`+`
`+        w1 = self.weight[rep1]`
`+        w2 = self.weight[rep2]`
`+`
`+        w = w1 + w2`
`+`
`+        if w1 < w2:`
`+            rep1, rep2, info1, info2, = rep2, rep1, info2, info1`
`+`
`+        if info1 is not None:`
`+            info1.update(info2)`
`+`
`+        self.link_to_parent[rep2] = rep1`
`+`
`+        del self.weight[rep2]`
`+        del self.root_info[rep2]`
`+`
`+        self.weight[rep1] = w`
`+        self.root_info[rep1] = info1`
`+`
`+        return True, rep1, info1`
`+        `
`+`
`+    `

File pypy/tool/math/__init__.py

`-#empty`

File pypy/tool/math/graphlib.py

`-"""`
`-Utilities to manipulate graphs (vertices and edges, not control flow graphs).`
`-"""`
`-`
`-class Edge:`
`-    def __init__(self, source, target):`
`-        self.source = source`
`-        self.target = target`
`-`
`-def depth_first_search(root, vertices, edges):`
`-    seen = {}`
`-    result = []`
`-    def visit(vertex):`
`-        result.append(('start', vertex))`
`-        seen[vertex] = True`
`-        for edge in edges[vertex]:`
`-            w = edge.target`
`-            if w in vertices and w not in seen:`
`-                visit(w)`
`-        result.append(('stop', vertex))`
`-    visit(root)`
`-    return result`
`-`
`-def strong_components(vertices, edges):`
`-    """Enumerates the strongly connected components of a graph.  Each one is`
`-    a set of vertices where any node can be reached from any other vertex by`
`-    following the edges.  'edges' is a dict {vertex: [edges]})"""`
`-`
`-    component_root = {}`
`-    discovery_time = {}`
`-    stack = []`
`-`
`-    for root in vertices:`
`-        if root not in discovery_time:`
`-`
`-            for event, v in depth_first_search(root, vertices, edges):`
`-                if event == 'start':`
`-                    discovery_time[v] = len(discovery_time)`
`-                    component_root[v] = v`
`-                    stack.append(v)`
`-`
`-                else:  # event == 'stop'`
`-                    vroot = v`
`-                    for edge in edges[v]:`
`-                        w = edge.target`
`-                        if w in component_root:`
`-                            wroot = component_root[w]`
`-                            if discovery_time[wroot] < discovery_time[vroot]:`
`-                                vroot = wroot`
`-                    if vroot is v:`
`-                        component = {}`
`-                        while True:`
`-                            w = stack.pop()`
`-                            del component_root[w]`
`-                            component[w] = True`
`-                            if w is v:`
`-                                break`
`-                        yield component`
`-                    else:`
`-                        component_root[v] = vroot`
`-`
`-def all_cycles(root, vertices, edges):`
`-    """Enumerates cycles.  Each cycle is a list of edges."""`
`-    stackpos = {}`
`-    edgestack = []`
`-    result = []`
`-    def visit(v):`
`-        if v not in stackpos:`
`-            stackpos[v] = len(edgestack)`
`-            for edge in edges[v]:`
`-                if edge.target in vertices:`
`-                    edgestack.append(edge)`
`-                    visit(edge.target)`
`-                    edgestack.pop()`
`-            stackpos[v] = None`
`-        else:`
`-            if stackpos[v] is not None:   # back-edge`
`-                result.append(edgestack[stackpos[v]:])`
`-    visit(root)`
`-    return result`
`-`
`-def break_cycles(vertices, edges):`
`-    """Enumerates a reasonably minimal set of edges that must be removed to`
`-    make the graph acyclic."""`
`-    graphs = [(vertices, edges)]`
`-    for vertices, edges in graphs:`
`-        #print ''.join(vertices),`
`-        #print [e.source+e.target for l in edges.values() for e in l]`
`-        for component in strong_components(vertices, edges):`
`-            #print '-->', ''.join(component)`
`-            edge_weights = {}`
`-            random_vertex = component.iterkeys().next()`
`-            for cycle in all_cycles(random_vertex, vertices, edges):`
`-                #print '\tcycle:', [e.source+e.target for e in cycle]`
`-                for edge in cycle:`
`-                    edge_weights[edge] = edge_weights.get(edge, 0) + 1`
`-            if edge_weights:`
`-                max_weight = max(edge_weights.values())`
`-                for edge, weight in edge_weights.iteritems():`
`-                    if weight == max_weight:`
`-                        break`
`-                # kill this edge`
`-                yield edge`
`-                new_edges = edges.copy()`
`-                new_edges[edge.source] = [e for e in new_edges[edge.source]`
`-                                            if e is not edge]`
`-                graphs.append((component, new_edges))`

File pypy/tool/math/sparsemat.py

`-from __future__ import division`
`-`
`-EPSILON = 1E-6`
`-`
`-`
`-class SparseMatrix:`
`-`
`-    def __init__(self, height):`
`-        self.lines = [{} for row in range(height)]`
`-`
`-    def __getitem__(self, (row, col)):`
`-        return self.lines[row].get(col, 0)`
`-`
`-    def __setitem__(self, (row, col), value):`
`-        if abs(value) > EPSILON:`
`-            self.lines[row][col] = value`
`-        else:`
`-            try:`
`-                del self.lines[row][col]`
`-            except KeyError:`
`-                pass`
`-`
`-    def copy(self):`
`-        m = SparseMatrix(len(self.lines))`
`-        for line1, line2 in zip(self.lines, m.lines):`
`-            line2.update(line1)`
`-        return m`
`-`
`-    def solve(self, vector):`
`-        """Solves  'self * [x1...xn] == vector'; returns the list [x1...xn].`
`-        Raises ValueError if no solution or indeterminate.`
`-        """`
`-        vector = list(vector)`
`-        lines = [line.copy() for line in self.lines]`
`-        columns = [{} for i in range(len(vector))]`
`-        for i, line in enumerate(lines):`
`-            for j, a in line.items():`
`-                columns[j][i] = a`
`-        lines_left = dict.fromkeys(range(len(self.lines)))`
`-        nrows = []`
`-        for ncol in range(len(vector)):`
`-            currentcolumn = columns[ncol]`
`-            lst = [(abs(a), i) for (i, a) in currentcolumn.items()`
`-                               if i in lines_left]`
`-            _, nrow = min(lst)    # ValueError -> no solution`
`-            nrows.append(nrow)`
`-            del lines_left[nrow]`
`-            line1 = lines[nrow]`
`-            mina = line1[ncol]`
`-            for _, i in lst:`
`-                if i != nrow:`
`-                    line2 = lines[i]`
`-                    a = line2.pop(ncol)`
`-                    #del currentcolumn[i]  -- but currentcolumn no longer used`
`-                    factor = a / mina`
`-                    vector[i] -= factor*vector[nrow]`
`-                    for col in line1:`
`-                        if col > ncol:`
`-                            value = line2.get(col, 0) - factor*line1[col]`
`-                            if abs(value) > EPSILON:`
`-                                line2[col] = columns[col][i] = value`
`-                            else:`
`-                                del line2[col]`
`-                                del columns[col][i]`
`-        solution = [None] * len(vector)`
`-        for i in range(len(vector)-1, -1, -1):`
`-            row = nrows[i]`
`-            line = lines[row]`
`-            total = vector[row]`
`-            for j, a in line.items():`
`-                if j != i:`
`-                    total -= a * solution[j]`
`-            solution[i] = total / line[i]`
`-        return solution`
`-`
`-`
`-def test_sparsemat1():`
`-    import py`
`-    M = SparseMatrix(4)`
`-    M[0,0] = M[1,1] = M[2,2] = M[3,3] = 1`
`-    M[0,1] = -1.0`
`-    M[1,2] = M[1,3] = -0.5`
`-    M[2,1] = -1.0`
`-    res = M.solve([4, 5, 4, 1])`
`-    assert res == [19, 15, 19, 1]`
`-`
`-def test_sparsemat2():`
`-    import py`
`-    M = SparseMatrix(4)`
`-    M[0,0] = M[1,1] = M[2,2] = M[3,3] = 1`
`-    M[0,1] = -1.0`
`-    M[1,2] = M[1,3] = -0.5`
`-    M[2,1] = M[2,3] = -0.5`
`-    res = M.solve([6, 3, 6, 0])`
`-    assert res == [14, 8, 10, 0]`

File pypy/tool/math/test/__init__.py

`-#empty`

File pypy/tool/math/test/test_graphlib.py

`-import autopath`
`-from pypy.tool.math.graphlib import *`
`-`
`-# XXX transform.py is difficult to test directly`
`-`
`-edges = {`
`-    'A': [Edge('A','B'), Edge('A','C')],`
`-    'B': [Edge('B','D'), Edge('B','E')],`
`-    'C': [Edge('C','F')],`
`-    'D': [Edge('D','D')],`
`-    'E': [Edge('E','A'), Edge('E','C')],`
`-    'F': [],`
`-    'G': [],`
`-    }`
`-`
`-def copy_edges(edges):`
`-    result = {}`
`-    for key, value in edges.items():`
`-        result[key] = value[:]`
`-    return result`
`-`
`-`
`-def test_strong_components():`
`-    saved = copy_edges(edges)`
`-    result = list(strong_components(edges, edges))`
`-    assert edges == saved`
`-    for comp in result:`
`-        comp = list(comp)`
`-        comp.sort()`
`-    result = [''.join(comp) for comp in result]`
`-    result.sort()`
`-    assert result == ['ABE', 'C', 'D', 'F', 'G']`
`-`
`-def test_all_cycles():`
`-    saved = copy_edges(edges)`
`-    cycles = list(all_cycles('A', edges, edges))`
`-    assert edges == saved`
`-    cycles.sort()`
`-    expected = [`
`-        [edges['A'][0], edges['B'][1], edges['E'][0]],`
`-        [edges['D'][0]],`
`-        ]`
`-    expected.sort()`
`-    assert cycles == expected`
`-`
`-def test_break_cycles():`
`-    saved = copy_edges(edges)`
`-    result = list(break_cycles(edges, edges))`
`-    assert edges == saved`
`-    assert len(result) == 2`
`-    assert edges['D'][0] in result`
`-    assert (edges['A'][0] in result or`
`-            edges['B'][1] in result or`
`-            edges['E'][0] in result)`

File pypy/tool/math/unionfind.py

`-# This is a general algorithm used by the annotator.`
`-`
`-# union-find impl, a info object is attached to the roots`
`-`
`-class UnionFind(object):`
`-`
`-    def __init__(self, info_factory=None):`
`-        self.link_to_parent = {}`
`-        self.weight = {}`
`-        self.info_factory = info_factory`
`-        self.root_info = {}`
`-`
`-    # mapping-like [] access`
`-    def __getitem__(self, obj):`
`-        if obj not in self.link_to_parent:`
`-            raise KeyError, obj`
`-`
`-        ignore, rep, info = self.find(obj)`
`-`
`-        return info`
`-`
`-    def __contains__(self, obj):`
`-        return obj in self.link_to_parent`
`-`
`-    def __iter__(self):`
`-        return iter(self.link_to_parent)`
`-`
`-    def keys(self):`
`-        return self.link_to_parent.keys()`
`-`
`-    def infos(self):`
`-        return self.root_info.values()`
`-`
`-    def find_rep(self, obj):`
`-        try:`
`-            # fast path (shortcut for performance reasons)`
`-            parent = self.link_to_parent[obj]`
`-            self.root_info[parent]   # may raise KeyError`
`-            return parent`
`-        except KeyError:`
`-            # general case`
`-            ignore, rep, info = self.find(obj)`
`-            return rep`
`-`
`-    def find(self, obj):  # -> new_root, obj, info`
`-        if obj not in self.link_to_parent:`
`-            if self.info_factory:`
`-                info = self.info_factory(obj)`
`-            else:`
`-                info = None`
`-            self.root_info[obj] = info`
`-            self.weight[obj] = 1`
`-            self.link_to_parent[obj] = obj`
`-            return True, obj, info`
`-`
`-        to_root = [obj]`
`-        parent = self.link_to_parent[obj]`
`-        while parent is not to_root[-1]:`
`-            to_root.append(parent)`
`-            parent = self.link_to_parent[parent]`
`-`
`-        for obj in to_root:`
`-            self.link_to_parent[obj] = parent`
`-`
`-        return False, parent, self.root_info[parent]`
`-`
`-`
`-    def union(self, obj1, obj2): # -> not_noop, rep, info`
`-`
`-        new1, rep1, info1 = self.find(obj1)`
`-        new2, rep2, info2 = self.find(obj2)`
`-`
`-        if rep1 is rep2:`
`-            return new1 or new2, rep1, info1`
`-`
`-        w1 = self.weight[rep1]`
`-        w2 = self.weight[rep2]`
`-`
`-        w = w1 + w2`
`-`
`-        if w1 < w2:`
`-            rep1, rep2, info1, info2, = rep2, rep1, info2, info1`
`-`
`-        if info1 is not None:`
`-            info1.update(info2)`
`-`
`-        self.link_to_parent[rep2] = rep1`
`-`
`-        del self.weight[rep2]`
`-        del self.root_info[rep2]`
`-`
`-        self.weight[rep1] = w`
`-        self.root_info[rep1] = info1`
`-`
`-        return True, rep1, info1`
`-        `
`-`
`-    `

File pypy/tool/unionfind.py

`-# This is a general algorithm used by the annotator.`
`-`
`-# union-find impl, a info object is attached to the roots`
`-`
`-class UnionFind(object):`
`-`
`-    def __init__(self, info_factory=None):`
`-        self.link_to_parent = {}`
`-        self.weight = {}`
`-        self.info_factory = info_factory`
`-        self.root_info = {}`
`-`
`-    # mapping-like [] access`
`-    def __getitem__(self, obj):`
`-        if obj not in self.link_to_parent:`
`-            raise KeyError, obj`
`-`
`-        ignore, rep, info = self.find(obj)`
`-`
`-        return info`
`-`
`-    def __contains__(self, obj):`
`-        return obj in self.link_to_parent`
`-`
`-    def __iter__(self):`
`-        return iter(self.link_to_parent)`
`-`
`-    def keys(self):`
`-        return self.link_to_parent.keys()`
`-`
`-    def infos(self):`
`-        return self.root_info.values()`
`-`
`-    def find_rep(self, obj):`
`-        try:`
`-            # fast path (shortcut for performance reasons)`
`-            parent = self.link_to_parent[obj]`
`-            self.root_info[parent]   # may raise KeyError`
`-            return parent`
`-        except KeyError:`
`-            # general case`
`-            ignore, rep, info = self.find(obj)`
`-            return rep`
`-`
`-    def find(self, obj):  # -> new_root, obj, info`
`-        if obj not in self.link_to_parent:`
`-            if self.info_factory:`
`-                info = self.info_factory(obj)`
`-            else:`
`-                info = None`
`-            self.root_info[obj] = info`
`-            self.weight[obj] = 1`
`-            self.link_to_parent[obj] = obj`
`-            return True, obj, info`
`-`
`-        to_root = [obj]`
`-        parent = self.link_to_parent[obj]`
`-        while parent is not to_root[-1]:`
`-            to_root.append(parent)`
`-            parent = self.link_to_parent[parent]`
`-`
`-        for obj in to_root:`
`-            self.link_to_parent[obj] = parent`
`-`
`-        return False, parent, self.root_info[parent]`
`-`
`-`
`-    def union(self, obj1, obj2): # -> not_noop, rep, info`
`-`
`-        new1, rep1, info1 = self.find(obj1)`
`-        new2, rep2, info2 = self.find(obj2)`
`-`
`-        if rep1 is rep2:`
`-            return new1 or new2, rep1, info1`
`-`
`-        w1 = self.weight[rep1]`
`-        w2 = self.weight[rep2]`
`-`
`-        w = w1 + w2`
`-`
`-        if w1 < w2:`
`-            rep1, rep2, info1, info2, = rep2, rep1, info2, info1`
`-`
`-        if info1 is not None:`
`-            info1.update(info2)`
`-`
`-        self.link_to_parent[rep2] = rep1`
`-`
`-        del self.weight[rep2]`
`-        del self.root_info[rep2]`
`-`
`-        self.weight[rep1] = w`
`-        self.root_info[rep1] = info1`
`-`
`-        return True, rep1, info1`
`-        `
`-`
`-    `

File pypy/translator/backendopt/inline.py

` from pypy.annotation import model as annmodel`
` from pypy.rpython.lltype import Bool, typeOf`
` from pypy.rpython import rmodel`
`-from pypy.translator.backendopt import sparsemat`
`+from pypy.tool.algo import sparsemat`
` from pypy.translator.backendopt.support import log`
` `
` BASE_INLINE_THRESHOLD = 32.4    # just enough to inline add__Int_Int()`

File pypy/translator/backendopt/malloc.py

` from pypy.objspace.flow.model import Variable, Constant, Block, Link`
` from pypy.objspace.flow.model import SpaceOperation, traverse, checkgraph`
`-from pypy.tool.unionfind import UnionFind`
`+from pypy.tool.algo.unionfind import UnionFind`
` from pypy.rpython import lltype`
` from pypy.translator.simplify import remove_identical_vars`
` from pypy.translator.backendopt.support import log`

File pypy/translator/backendopt/sparsemat.py

`-from __future__ import division`
`-`
`-EPSILON = 1E-6`
`-`
`-`
`-class SparseMatrix:`
`-`
`-    def __init__(self, height):`
`-        self.lines = [{} for row in range(height)]`
`-`
`-    def __getitem__(self, (row, col)):`
`-        return self.lines[row].get(col, 0)`
`-`
`-    def __setitem__(self, (row, col), value):`
`-        if abs(value) > EPSILON:`
`-            self.lines[row][col] = value`
`-        else:`
`-            try:`
`-                del self.lines[row][col]`
`-            except KeyError:`
`-                pass`
`-`
`-    def copy(self):`
`-        m = SparseMatrix(len(self.lines))`
`-        for line1, line2 in zip(self.lines, m.lines):`
`-            line2.update(line1)`
`-        return m`
`-`
`-    def solve(self, vector):`
`-        """Solves  'self * [x1...xn] == vector'; returns the list [x1...xn].`
`-        Raises ValueError if no solution or indeterminate.`
`-        """`
`-        vector = list(vector)`
`-        lines = [line.copy() for line in self.lines]`
`-        columns = [{} for i in range(len(vector))]`
`-        for i, line in enumerate(lines):`
`-            for j, a in line.items():`
`-                columns[j][i] = a`
`-        lines_left = dict.fromkeys(range(len(self.lines)))`
`-        nrows = []`
`-        for ncol in range(len(vector)):`
`-            currentcolumn = columns[ncol]`
`-            lst = [(abs(a), i) for (i, a) in currentcolumn.items()`
`-                               if i in lines_left]`
`-            _, nrow = min(lst)    # ValueError -> no solution`
`-            nrows.append(nrow)`
`-            del lines_left[nrow]`
`-            line1 = lines[nrow]`
`-            mina = line1[ncol]`
`-            for _, i in lst:`
`-                if i != nrow:`
`-                    line2 = lines[i]`
`-                    a = line2.pop(ncol)`
`-                    #del currentcolumn[i]  -- but currentcolumn no longer used`
`-                    factor = a / mina`
`-                    vector[i] -= factor*vector[nrow]`
`-                    for col in line1:`
`-                        if col > ncol:`
`-                            value = line2.get(col, 0) - factor*line1[col]`
`-                            if abs(value) > EPSILON:`
`-                                line2[col] = columns[col][i] = value`
`-                            else:`
`-                                del line2[col]`
`-                                del columns[col][i]`
`-        solution = [None] * len(vector)`
`-        for i in range(len(vector)-1, -1, -1):`
`-            row = nrows[i]`
`-            line = lines[row]`
`-            total = vector[row]`
`-            for j, a in line.items():`
`-                if j != i:`
`-                    total -= a * solution[j]`
`-            solution[i] = total / line[i]`
`-        return solution`
`-`
`-`
`-def test_sparsemat1():`
`-    import py`
`-    M = SparseMatrix(4)`
`-    M[0,0] = M[1,1] = M[2,2] = M[3,3] = 1`
`-    M[0,1] = -1.0`
`-    M[1,2] = M[1,3] = -0.5`
`-    M[2,1] = -1.0`
`-    res = M.solve([4, 5, 4, 1])`
`-    assert res == [19, 15, 19, 1]`
`-`
`-def test_sparsemat2():`
`-    import py`
`-    M = SparseMatrix(4)`
`-    M[0,0] = M[1,1] = M[2,2] = M[3,3] = 1`
`-    M[0,1] = -1.0`
`-    M[1,2] = M[1,3] = -0.5`
`-    M[2,1] = M[2,3] = -0.5`
`-    res = M.solve([6, 3, 6, 0])`
`-    assert res == [14, 8, 10, 0]`

File pypy/translator/backendopt/ssa.py

` from pypy.objspace.flow.model import Variable, mkentrymap, flatten, Block`
`-from pypy.tool.unionfind import UnionFind`
`+from pypy.tool.algo.unionfind import UnionFind`
` `
` class DataFlowFamilyBuilder:`
`     """Follow the flow of the data in the graph.  Builds a UnionFind grouping`