1. SCons
  2. Core
  3. SCons

Commits

Steven Knight  committed a314d0a

Don't create a Node for every file we try to find during scan.

  • Participants
  • Parent commits 59afeb4
  • Branches default

Comments (0)

Files changed (7)

File bin/memlogs.py

View file
+#!/usr/bin/env python
+#
+# Copyright (c) 2005 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import getopt
+import sys
+
+filenames = sys.argv[1:]
+
+if not filenames:
+    print """Usage:  memlogs.py file [...]
+
+Summarizes the --debug=memory numbers from one or more build logs.
+"""
+    sys.exit(0)
+
+fmt = "%12s %12s %12s %12s    %s"
+
+print fmt % ("pre-read", "post-read", "pre-build", "post-build", "")
+
+for fname in sys.argv[1:]:
+    lines = [l for l in open(fname).readlines() if l[:7] == 'Memory ']
+    t = tuple([l.split()[-1] for l in lines]) + (fname,)
+    print fmt % t

File bin/objcounts.py

View file
+#!/usr/bin/env python
+#
+# Copyright (c) 2005 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import re
+import sys
+
+filenames = sys.argv[1:]
+
+if len(sys.argv) != 3:
+    print """Usage:  objcounts.py file1 file2
+
+Compare the --debug=object counts from two build logs.
+"""
+    sys.exit(0)
+
+def fetch_counts(fname):
+    contents = open(fname).read()
+    m = re.search('\nObject counts:\n(.*)\n[^\s]', contents, re.S)
+    lines = m.group().split('\n')
+    list = [l.split() for l in lines if re.match('\s+\d', l)]
+    d = {}
+    for l in list:
+        d[l[-1]] = map(int, l[:-1])
+    return d
+
+c1 = fetch_counts(sys.argv[1])
+c2 = fetch_counts(sys.argv[2])
+
+common = {}
+for k in c1.keys():
+    try:
+        common[k] = (c1[k], c2[k])
+    except KeyError:
+        pass
+    else:
+        del c1[k]
+        del c2[k]
+
+def diffstr(c1, c2):
+    try:
+        d = c2 - c1
+    except TypeError:
+        d = ''
+    else:
+        if d:
+            d = '[%+d]' % d
+        else:
+            d = ''
+    return " %5s/%-5s %-8s" % (c1, c2, d)
+
+def printline(c1, c2, classname):
+    print \
+          diffstr(c1[2], c2[2]) + \
+          diffstr(c1[3], c2[3]) + \
+          ' ' + classname
+
+keys = common.keys()
+keys.sort()
+for k in keys:
+    c = common[k]
+    printline(c[0], c[1], k)
+
+keys = c1.keys()
+keys.sort()
+for k in keys:
+    printline(c1[k], ['--']*4, k)
+
+keys = c2.keys()
+keys.sort()
+for k in keys:
+    printline(['--']*4, c2[k], k)

File src/CHANGES.txt

View file
   - Cache evaluation of LazyActions so we don't create a new object
     for each invocation.
 
+  - When scanning, don't create Nodes for include files that don't
+    actually exist on disk.
+
   From Wayne Lee:
 
   - Avoid "maximum recursion limit" errors when removing $(-$) pairs

File src/engine/SCons/Node/FS.py

View file
             if dir.srcdir:
                 srcnode = self.fs.Entry(name, dir.srcdir,
                                         klass=self.__class__)
-                if srcnode.is_under(dir):
-                    # Shouldn't source from something in the build
-                    # path: probably means build_dir is under
-                    # src_dir and we are reflecting.
-                    break
                 return srcnode
             name = dir.name + os.sep + name
             dir=dir.get_dir()
             self.__setTopLevelDir()
             self.Top.addRepository(d)
 
-    def Rsearch(self, path, clazz=_classEntry, cwd=None):
+    def do_Rsearch(self, path, func, clazz=_classEntry, cwd=None, verbose=lambda x: x):
         """Search for something in a Repository.  Returns the first
         one found in the list, or None if there isn't one.
         __cacheable__
         """
         if isinstance(path, SCons.Node.Node):
             return path
+
+        path, dir = self.__transformPath(path, cwd)
+        d, name = os.path.split(path)
+        norm_name = _my_normcase(name)
+        if d:
+            dir = dir.Dir(d)
+        try:
+            node = dir.entries[norm_name]
+        except KeyError:
+            node = dir.node_on_disk(name, clazz)
         else:
-            name, d = self.__transformPath(path, cwd)
-            n = self._doLookup(clazz, name, d)
-            if n.exists():
-                return n
-            if isinstance(n, Dir):
-                # If n is a Directory that has Repositories directly
-                # attached to it, then any of those is a valid Repository
-                # path.  Return the first one that exists.
-                reps = filter(lambda x: x.exists(), n.getRepositories())
-                if len(reps):
-                    return reps[0]
-            d = n.get_dir()
-            name = n.name
-            # Search repositories of all directories that this file is under.
-            while d:
-                for rep in d.getRepositories():
-                    try:
-                        rnode = self._doLookup(clazz, name, rep)
-                        # Only find the node if it exists and it is not
-			# a derived file.  If for some reason, we are
-			# explicitly building a file IN a Repository, we
-			# don't want it to show up in the build tree.
-			# This is usually the case with BuildDir().
-			# We only want to find pre-existing files.
-                        if rnode.exists() and \
-                           (isinstance(rnode, Dir) or not rnode.is_derived()):
-                            return rnode
-                    except TypeError:
-                        pass # Wrong type of node.
-                # Prepend directory name
-                name = d.name + os.sep + name
-                # Go up one directory
-                d = d.get_dir()
+            node = func(node)
+            if node:
+                dir = node.get_dir()
+        if node:
+            verbose("... FOUND '%s' in '%s'\n" % (name, dir))
+            return node
+        fname = '.'
+        while dir:
+            for rep in dir.getRepositories():
+                rdir = rep.Dir(fname)
+                try:
+                    node = rdir.entries[norm_name]
+                except KeyError:
+                    node = rdir.node_on_disk(name, clazz)
+                else:
+                    node = func(node)
+                if node:
+                    verbose("... FOUND '%s' in '%s'\n" % (name, dir))
+                    return node
+            fname = dir.name + os.sep + fname
+            dir = dir.get_dir()
         return None
 
+    def Rsearch(self, path, clazz=_classEntry, cwd=None):
+        def func(node):
+            if node.exists() and \
+               (isinstance(node, Dir) or not node.is_derived()):
+                   return node
+            return None
+        return self.do_Rsearch(path, func, clazz, cwd)
+
     def Rsearchall(self, pathlist, must_exist=1, clazz=_classEntry, cwd=None):
         """Search for a list of somethings in the Repository list.
         __cacheable__
         """
-        ret = []
+        result = []
         if SCons.Util.is_String(pathlist):
             pathlist = string.split(pathlist, os.pathsep)
         if not SCons.Util.is_List(pathlist):
             pathlist = [pathlist]
+
+        if must_exist:
+            select = lambda x, clazz=clazz: isinstance(x, clazz) and x.exists()
+        else:
+            select = lambda x, clazz=clazz: isinstance(x, clazz)
+
         for path in filter(None, pathlist):
             if isinstance(path, SCons.Node.Node):
-                ret.append(path)
+                result.append(path)
+                continue
+
+            path, dir = self.__transformPath(path, cwd)
+            d, name = os.path.split(path)
+            norm_name = _my_normcase(name)
+            if d:
+                dir = dir.Dir(d)
+            try:
+                node = dir.entries[norm_name]
+            except KeyError:
+                # If there's no Node on disk, we'll filter
+                # out the returned None below.
+                if must_exist:
+                    n = dir.node_on_disk(name, clazz)
+                else:
+                    n = self._doLookup(clazz, name, dir)
+                    dir.srcdir_duplicate(name, clazz)
+                result.append(n)
             else:
-                name, d = self.__transformPath(path, cwd)
-                n = self._doLookup(clazz, name, d)
-                if not must_exist or n.exists():
-                    ret.append(n)
-                if isinstance(n, Dir):
-                    # If this node is a directory, then any repositories
-                    # attached to this node can be repository paths.
-                    ret.extend(filter(lambda x, me=must_exist, clazz=clazz: isinstance(x, clazz) and (not me or x.exists()),
-                                      n.getRepositories()))
-                    
-                d = n.get_dir()
-                name = n.name
-                # Search repositories of all directories that this file
-                # is under.
-                while d:
-                    for rep in d.getRepositories():
-                        try:
-                            rnode = self._doLookup(clazz, name, rep)
-                            # Only find the node if it exists (or
-                            # must_exist is zero) and it is not a
-                            # derived file.  If for some reason, we
-                            # are explicitly building a file IN a
-                            # Repository, we don't want it to show up in
-                            # the build tree.  This is usually the case
-                            # with BuildDir().  We only want to find
-                            # pre-existing files.
-                            if (not must_exist or rnode.exists()) and \
-                               (not rnode.is_derived() or isinstance(rnode, Dir)):
-                                ret.append(rnode)
-                        except TypeError:
-                            pass # Wrong type of node.
-                    # Prepend directory name
-                    name = d.name + os.sep + name
-                    # Go up one directory
-                    d = d.get_dir()
-        return ret
+                if not must_exist or node.exists():
+                    result.append(node)
+                if isinstance(node, Dir):
+                    result.extend(filter(select, node.getRepositories()))
+                if node:
+                    dir = node.get_dir()
+            fname = '.'
+            while dir:
+                for rep in dir.getRepositories():
+                    rdir = rep.Dir(fname)
+                    try:
+                        node = rdir.entries[norm_name]
+                    except KeyError:
+                        # If there's no Node on disk, we'll filter
+                        # out the returned None below.
+                        if must_exist:
+                            n = rdir.node_on_disk(name, clazz)
+                        else:
+                            n = self._doLookup(clazz, name, rdir)
+                            rdir.srcdir_duplicate(name, clazz)
+                        result.append(n)
+                    else:
+                        if (not must_exist or node.exists()) and \
+                           (isinstance(node, Dir) or not node.is_derived()):
+                            result.append(node)
+                fname = dir.name + os.sep + fname
+                dir = dir.get_dir()
+
+        return filter(None, result)
 
     def CacheDir(self, path):
         self.CachePath = path
         sccspath = 'SCCS' + os.sep + 's.'+name
         return self.entry_exists_on_disk(sccspath)
 
+    def srcdir_duplicate(self, name, clazz):
+        dname = '.'
+        dir = self
+        while dir:
+            if dir.srcdir:
+                srcdir = dir.srcdir.Dir(dname)
+                if srcdir.entry_exists_on_disk(name):
+                    srcnode = self.fs._doLookup(clazz, name, srcdir)
+                    if self.duplicate:
+                        node = self.fs._doLookup(clazz, name, self)
+                        node.do_duplicate(srcnode)
+                        return node
+                    else:
+                        return srcnode
+            dname = dir.name + os.sep + dname
+            dir = dir.get_dir()
+        return None
+
+    def node_on_disk(self, name, clazz):
+        if self.entry_exists_on_disk(name) or \
+           self.sccs_on_disk(name) or \
+           self.rcs_on_disk(name):
+            try:
+                return self.fs._doLookup(clazz, name, self)
+            except TypeError:
+                pass
+        return self.srcdir_duplicate(name, clazz)
+
 class RootDir(Dir):
     """A class for the root directory of a file system.
 
     else:
         verbose = lambda x: x
 
-    retval = None
+    filedir, filename = os.path.split(filename)
+    if filedir:
+        lookup_dir = lambda d, fd=filedir: d.Dir(fd)
+    else:
+        lookup_dir = lambda d: d
 
     if callable(paths):
         paths = paths()
 
-    for dir in paths:
-        verbose("looking for '%s' in '%s' ...\n" % (filename, dir))
-        try:
-            node = node_factory(filename, dir)
-            # Return true if the node exists or is a derived node.
+    # Give Entries a chance to morph into Dirs.
+    paths = map(lambda p: p.must_be_a_Dir(), paths)
+
+    for pathdir in paths:
+        verbose("looking for '%s' in '%s' ...\n" % (filename, pathdir))
+        dir = lookup_dir(pathdir)
+        def func(node):
             if node.is_derived() or \
                node.is_pseudo_derived() or \
                (isinstance(node, SCons.Node.FS.Base) and node.exists()):
-                retval = node
-                verbose("... FOUND '%s' in '%s'\n" % (filename, dir))
-                break
-        except TypeError:
-            # If we find a directory instead of a file, we don't care
-            pass
+                    return node
+            return None
 
-    return retval
+        node = default_fs.do_Rsearch(filename, func, File, dir, verbose)
+        if node:
+            return node
+
+        dirname = '.'
+        while dir:
+            if dir.srcdir:
+                d = dir.srcdir.Dir(dirname)
+                if d.is_under(dir):
+                    # Shouldn't source from something in the build path:
+                    # build_dir is probably under src_dir, in which case
+                    # we are reflecting.
+                    break
+                node = dir.fs.do_Rsearch(filename, func, File, d, verbose)
+                if node:
+                    return File(filename, dir.Dir(dirname), dir.fs)
+            dirname = dir.name + os.sep + dirname
+            dir = dir.get_dir()
+
+    return None
 
 def find_files(filenames, paths, node_factory = default_fs.File):
     """

File src/engine/SCons/Node/FSTests.py

View file
         fs.BuildDir('build/var3', 'src', duplicate=0)
         d1 = fs.Dir('build/var3')
         r = d1.rdir()
-        s = fs.Dir('src')
-        assert r == s, "%s != %s" % (r, s)
+        assert r == d1, "%s != %s" % (r, d1)
 
         # verify the link creation attempts in file_link()
         class LinkSimulator :
                 'work/src/b1/b2/f' : 'work/src/f',
                 'work/src/b1/b2/b1' : 'work/src/b1/',
                 'work/src/b1/b2/b1/f' : 'work/src/b1/f',
+                'work/src/b1/b2/b1/b2' : 'work/src/b1/b2',
+                'work/src/b1/b2/b1/b2/f' : 'work/src/b1/b2/f',
         }
 
         alter_map = {
         list = fs.Rsearchall('#d2')
         assert list == [], list
 
+        fs.File('d2').built() # Clear exists cache
         test.subdir(['work', 'd2'])
-        fs.File('d2').built() # Clear exists cache
         list = fs.Rsearchall('d2')
         assert map(str, list) == ['d2'], list
 
+        fs.File('../rep2/d2').built() # Clear exists cache
         test.subdir(['rep2', 'd2'])
-        fs.File('../rep2/d2').built() # Clear exists cache
         list = fs.Rsearchall('d2')
         assert map(str, list) == ['d2', test.workpath('rep2', 'd2')], list
 
+        fs.File('../rep1/d2').built() # Clear exists cache
         test.subdir(['rep1', 'd2'])
-        fs.File('../rep1/d2').built() # Clear exists cache
         list = fs.Rsearchall('d2')
         assert map(str, list) == ['d2',
                                   test.workpath('rep1', 'd2'),
         list = fs.Rsearchall(['d3', 'd4'])
         assert list == [], list
 
+        fs.File('d3').built() # Clear exists cache
         test.subdir(['work', 'd3'])
-        fs.File('d3').built() # Clear exists cache
         list = map(str, fs.Rsearchall(['d3', 'd4']))
         assert list == ['d3'], list
 
+        fs.File('../rep3/d4').built() # Clear exists cache
         test.subdir(['rep3', 'd4'])
-        fs.File('../rep3/d4').built() # Clear exists cache
         list = map(str, fs.Rsearchall(['d3', 'd4']))
         assert list == ['d3', test.workpath('rep3', 'd4')], list
 
         """Testing find_file function"""
         test = TestCmd(workdir = '')
         test.write('./foo', 'Some file\n')
+        test.subdir('bar')
+        test.write(['bar', 'on_disk'], 'Another file\n')
         fs = SCons.Node.FS.FS(test.workpath(""))
         os.chdir(test.workpath("")) # FS doesn't like the cwd to be something other than it's root
         node_derived = fs.File(test.workpath('bar/baz'))
                      "  find_file: ... FOUND 'baz' in 'bar'\n"
             c = sio.getvalue()
             assert c == expect, c
+
+            sio = StringIO.StringIO()
+            sys.stdout = sio
+            SCons.Node.FS.find_file('on_disk', paths, fs.File, verbose=1)
+            expect = "  find_file: looking for 'on_disk' in '.' ...\n" + \
+                     "  find_file: looking for 'on_disk' in 'bar' ...\n" + \
+                     "  find_file: ... FOUND 'on_disk' in 'bar'\n"
+            c = sio.getvalue()
+            assert c == expect, c
         finally:
             sys.stdout = save_sys_stdout
 

File src/engine/SCons/Scanner/__init__.py

View file
         if callable(path): path = path()
         n = SCons.Node.FS.find_file(include,
                                     (source_dir,) + tuple(path),
-                                    self.fs.File)
+                                    SCons.Node.FS.File)
         return n, include
 
     def sort_key(self, include):

File test/builddir-reflect.py

View file
          match=TestSCons.match_re,
          )
 
-test.must_exist(['dir1', 'dir2', 'foo.h'])
+# Note that we don't check for the existence of dir1/dir2/foo.h, because
+# this bad cpppath will expand to dir1/dir2/dir1/dir2, which means it
+# won't pick up the srcdir copy of dir/dir2/foo.h.  That's all right,
+# we just need to make sure it doesn't create dir1/dir2/dir1/dir2/foo.h.
 test.must_exist(['dir1', 'dir2', 'src1', 'foo.c'])
 test.must_not_exist(['dir1', 'dir2', 'dir1', 'dir2', 'foo.h'])