Commits

Steven Knight  committed 63cee0a

Run setup.py on the unpacked .tar.gz for testing.

  • Participants
  • Parent commits a712546

Comments (0)

Files changed (72)

 #
 $project = 'scons';
 
-$env = new cons( ENV => {
-			  AEGIS_PROJECT => $ENV{AEGIS_PROJECT},
-			  PATH => $ENV{PATH},
-			} );
-
 Default qw( . );
 
 #
 map {s/^[CD]//, s/^0*(\d\d)$/$1/} @arr;
 $version = join('.', @arr);
 
-#
-# Use %(-%) around the date so date changes don't cause rebuilds.
-#
-$sed_cmd = "sed" .
-       " %( -e 's+__DATE__+$date+' %)" .
-       " -e 's+__DEVELOPER__+$developer+'" .
-       " -e 's+__REVISION__+$revision+'" .
-       " -e 's+__VERSION__+$version+'" .
-       " %< > %>";
+chomp($python_ver = `python -c 'import sys; print sys.version[0:3]'`);
 
-#
-# Run everything in the MANIFEST through the sed command we concocted.
-#
-chomp(@files = `cat src/MANIFEST`);
 
-foreach $file (@files) {
-    Command $env "build/$file", "src/$file", $sed_cmd;
+use Cwd;
+$test_dir = File::Spec->catfile(cwd, "build", "test");
+
+%package_name = (
+    'script'	=> $project,
+    'engine'	=> "$project-pylib",
+);
+
+$test_bin_dir = File::Spec->catfile($test_dir, "bin");
+$test_lib_dir = File::Spec->catfile($test_dir,
+				    "lib",
+				    "python${python_ver}",
+				    "site-packages"),
+
+%install_dir = (
+    'script'	=> $test_bin_dir,
+    'engine'	=> $test_lib_dir,
+);
+
+$env = new cons( ENV => {
+			  AEGIS_PROJECT => $ENV{AEGIS_PROJECT},
+			  PATH => $ENV{PATH},
+			},
+
+		 TEST_BIN_DIR	=> $test_bin_dir,
+		 TEST_LIB_DIR	=> $test_lib_dir,
+
+		 DATE		=> $date,
+		 DEVELOPER	=> $developer,
+		 REVISION	=> $revision,
+		 VERSION	=> $version,
+
+		 SED		=> 'sed',
+				   # Use %(-%) around the date so date
+				   # changes don't cause rebuilds.
+		 SEDFLAGS	=> " %( -e 's+__DATE__+%DATE+' %)" .
+				   " -e 's+__DEVELOPER__+%DEVELOPER+'" .
+				   " -e 's+__REVISION__+%REVISION+'" .
+				   " -e 's+__VERSION__+%VERSION+'",
+		 SEDCOM		=> "%SED %SEDFLAGS %< > %>",
+		);
+
+my @src_deps;
+
+for $dir ('script', 'engine') {
+
+    my $pkg = $package_name{$dir};
+    my $install = $install_dir{$dir};
+
+    my $build = "build/$dir";
+    my $src = "src/$dir";
+
+    my @files;
+    chomp(@files = `cat src/$dir/MANIFEST`);
+
+    #
+    # Run everything in the MANIFEST through the sed command we concocted.
+    #
+    my $file;
+    foreach $file (@files) {
+	$env->Command("$build/$file", "$src/$file", "%SEDCOM");
+    }
+
+    #
+    # Use the Python distutils to generate the packages.
+    #
+    my $tar_gz = "$build/dist/$pkg-$version.tar.gz";
+
+    push(@src_deps, $tar_gz);
+
+    my @setup_args = ('bdist sdist');
+
+    my @targets = (
+	"$build/dist/$pkg-$version.linux-i686.tar.gz",
+	$tar_gz,
+    );
+
+    if ($rpm) {
+	push(@setup_args, 'bdist_rpm');
+
+	# XXX "$build/build/bdist.linux-i686/rpm/SOURCES/$pkg-$version.tar.gz",
+	# XXX "$build/build/bdist.linux-i686/rpm/SPECS/$pkg.spec",
+	push(@targets,
+	    "$build/dist/$pkg-$version-1.src.rpm",
+	    "$build/dist/$pkg-$version-1.noarch.rpm",
+	);
+    };
+
+    $env->Command([@targets],
+	      map("$build/$_", @files),
+	      qq(rm -rf $build/build $build/dist/*
+		 cd $build && python setup.py @setup_args)
+	     );
+
+    $env->Depends([@targets], "$build/MANIFEST");
+
+    $env->Install("build/dist", @targets);
+
+    #
+    # Unpack the .tar.gz created by the distutils into build/unpack.
+    #
+    my $unpack = "build/unpack";
+
+    my @unpack_files = map("$unpack/$pkg-$version/$_", @files);
+
+    Command $env [@unpack_files], $tar_gz, qq(
+	rm -rf $unpack/$pkg-$version
+	tar zxf %< -C $unpack
+    );
+
+    #
+    # Run setup.py in the unpacked subdirectory to "install" everything
+    # into our build/test subdirectory.  Auxiliary modules that we need
+    # (TestCmd.py, TestSCons.py, unittest.py) will be copied in by
+    # etc/Conscript.  The runtest.py script will set PYTHONPATH so that
+    # the tests only look under build/test.  This makes sure that our
+    # tests pass with what we really packaged, not because of something
+    # hanging around in the development directory.
+    #
+    my %seen;
+    map($seen{$_}++, "MANIFEST", "setup.py");
+    @test_files = map(File::Spec->catfile($install, $_),
+    			grep($_ =~ /\.py$/ && ! $seen{$_}++, @files));
+
+    Command $env [@test_files], @unpack_files, qq(
+	rm -rf $install
+	cd $unpack/$pkg-$version && python setup.py install --prefix=$test_dir
+    );
 }
 
-#
-# Use the Python distutils to generate the packages.
-#
-$tar_gz = "build/dist/$project-$version.tar.gz";
-
-@setup_args = ('bdist sdist');
-
-@targets = (
-    "build/dist/$project-$version.linux-i686.tar.gz",
-    $tar_gz,
-);
-
-if ($rpm) {
-    push(@setup_args, 'bdist_rpm');
-
-    push(@targets,
-	"build/build/bdist.linux-i686/rpm/SOURCES/$project-$version.tar.gz",
-	"build/build/bdist.linux-i686/rpm/SPECS/$project.spec",
-	"build/dist/$project-$version-1.src.rpm",
-	"build/dist/$project-$version-1.noarch.rpm",
-    );
-};
-
-
-$env->Command([@targets],
-	      map("build/$_", @files),
-	      qq(rm -rf build/build build/dist/*
-		 cd build && python setup.py @setup_args)
-	     );
-
-$env->Depends([@targets], 'build/MANIFEST');
-
-#
-# Unpack the .tar.gz created by the distutils into build/test, and
-# add the TestCmd.py module.  The runtest.py script will set PYTHONPATH
-# so that the tests only look under build/test.  This makes sure that
-# our tests pass with what we really packaged, not because of something
-# hanging around in the development directory.
-#
-$test_dir = "build/test";
-
-@test_files = map("$test_dir/$project-$version/$_", @files);
-
-Command $env [@test_files], $tar_gz, qq(
-    rm -rf $test_dir/$project-$version
-    tar zxf %< -C $test_dir
-);
-
-Export qw( env test_dir );
+Export qw( env );
 
 Build "etc/Conscript";
 
 if ($jw) {
     Link 'build/doc' => 'doc';
 
-    Export qw( date env revision version );
-
     Build 'build/doc/Conscript';
 }
 
 	chomp(@src_files);
 
 	foreach $file (@src_files) {
-	    $env->Command("build/$project-src/$file", $file, $sed_cmd);
+	    $env->Command("build/$project-src/$file", $file, "%SEDCOM");
 	}
 
 	$env->Command("build/dist/$project-src-$version.tar.gz",
-    		      $tar_gz,
+    		      @src_deps,
 		      map("build/$project-src/$_", @src_files),
 		      qq(
 		rm -rf build/$project-src-$version

File doc/Conscript

 # Conscript file for building SCons documentation.
 #
 
-Import qw(
-    date
-    env
-    revision
-    version
-);
+Import qw( env );
 
 #
 #
 #
-$doc_tar_gz = "#build/dist/scons-doc-$version.tar.gz";
+$doc_tar_gz = "#build/dist/scons-doc-${\$env->{VERSION}}.tar.gz";
 
 #
 # We'll only try to build text files (for some documents)
 <!--
 THIS IS AN AUTOMATICALLY-GENERATED FILE.  DO NOT EDIT.
 -->
-<!ENTITY build_date "$date">
-<!ENTITY build_version "$version">
-<!ENTITY build_revision "$revision">
+<!ENTITY build_date "${\$env->{DATE}}">
+<!ENTITY build_version "${\$env->{VERSION}}">
+<!ENTITY build_revision "${\$env->{REVISION}}">
 _EOF_
 close(FILE);
 

File etc/Conscript

 # of stuff to work on SCons.
 #
 
-Import qw( env test_dir );
+Import qw( env );
 
 @modules = qw(TestCmd.py TestSCons.py unittest.py);
 
-$env->Install("#$test_dir", @modules);
+$env->Install($env->{TEST_LIB_DIR}, @modules);

File etc/TestSCons.py

 	    print "STDERR ============"
 	    print self.stderr()
 	    raise
+	if self.status:
+	    print self.progam + " returned " + (self.status >> 8)
+	    print "STDERR ============"
+	    print self.stderr()
+	    raise TestFailed
 	if stdout and not self.match(self.stdout(), stdout):
 	    print "Expected STDOUT =========="
 	    print stdout
 
     version = aegis_to_version(version)
 
-    build_test = os.path.join(cwd, "build", "test")
-    scons_dir = os.path.join(build_test, "scons-" + version)
+    scons_dir = os.path.join(cwd, 'build', 'test', 'bin')
 
-    os.environ['PYTHONPATH'] = string.join([scons_dir,
-					    build_test],
-					   os.pathsep)
+    os.environ['PYTHONPATH'] = os.path.join(cwd,
+					    'build',
+					    'test',
+					    'lib',
+					    'python' + sys.version[0:3],
+					    'site-packages')
 
 else:
 
-    scons_dir = os.path.join(cwd, 'src')
+    scons_dir = os.path.join(cwd, 'src', 'script')
 
-    os.environ['PYTHONPATH'] = string.join([os.path.join(cwd, 'src'),
+    os.environ['PYTHONPATH'] = string.join([os.path.join(cwd, 'src', 'engine'),
 					    os.path.join(cwd, 'etc')],
 					   os.pathsep)
 

File src/MANIFEST

-MANIFEST
-scons/__init__.py
-scons/Builder.py
-scons/Defaults.py
-scons/Environment.py
-scons/Errors.py
-scons/Job.py
-scons/exitfuncs.py
-scons/Node/__init__.py
-scons/Node/FS.py
-scons/Scanner/__init__.py
-scons/Scanner/C.py
-scons/Sig/__init__.py
-scons/Sig/MD5.py
-scons/Sig/TimeStamp.py
-scons.py
-setup.py

File src/engine/MANIFEST

+MANIFEST
+SCons/__init__.py
+SCons/Builder.py
+SCons/Defaults.py
+SCons/Environment.py
+SCons/Errors.py
+SCons/Job.py
+SCons/exitfuncs.py
+SCons/Node/__init__.py
+SCons/Node/FS.py
+SCons/Scanner/__init__.py
+SCons/Scanner/C.py
+SCons/Sig/__init__.py
+SCons/Sig/MD5.py
+SCons/Sig/TimeStamp.py
+setup.py

File src/engine/SCons/.aeignore

+*,D
+*.pyc
+.*.swp
+.consign

File src/engine/SCons/Builder.py

+"""SCons.Builder
+
+XXX
+
+"""
+
+__revision__ = "Builder.py __REVISION__ __DATE__ __DEVELOPER__"
+
+
+
+import os
+import SCons.Node.FS
+import types
+
+
+
+class Builder:
+    """Base class for Builders, objects that create output
+    nodes (files) from input nodes (files).
+    """
+
+    def __init__(self,	name = None,
+			action = None,
+			input_suffix = None,
+			output_suffix = None,
+			node_class = SCons.Node.FS.File):
+	self.name = name
+	self.action = Action(action)
+	self.insuffix = input_suffix
+	self.outsuffix = output_suffix
+	self.node_class = node_class
+	if not self.insuffix is None and self.insuffix[0] != '.':
+	    self.insuffix = '.' + self.insuffix
+	if not self.outsuffix is None and self.outsuffix[0] != '.':
+	    self.outsuffix = '.' + self.outsuffix
+
+    def __cmp__(self, other):
+	return cmp(self.__dict__, other.__dict__)
+
+    def __call__(self, env, target = None, source = None):
+	node = SCons.Node.FS.lookup(self.node_class, target)
+	node.builder_set(self)
+	node.env_set(self)
+	node.sources = source	# XXX REACHING INTO ANOTHER OBJECT
+	return node
+
+    def execute(self, **kw):
+	"""Execute a builder's action to create an output object.
+	"""
+	apply(self.action.execute, (), kw)
+
+
+
+print_actions = 1;
+execute_actions = 1;
+
+
+
+def Action(act):
+    """A factory for action objects."""
+    if type(act) == types.FunctionType:
+	return FunctionAction(act)
+    elif type(act) == types.StringType:
+	return CommandAction(act)
+    else:
+	return None
+
+class ActionBase:
+    """Base class for actions that create output objects.
+    
+    We currently expect Actions will only be accessible through
+    Builder objects, so they don't yet merit their own module."""
+    def __cmp__(self, other):
+	return cmp(self.__dict__, other.__dict__)
+
+    def show(self, string):
+	print string
+
+class CommandAction(ActionBase):
+    """Class for command-execution actions."""
+    def __init__(self, string):
+	self.command = string
+
+    def execute(self, **kw):
+	cmd = self.command % kw
+	if print_actions:
+	    self.show(cmd)
+	if execute_actions:
+	    os.system(cmd)
+
+class FunctionAction(ActionBase):
+    """Class for Python function actions."""
+    def __init__(self, function):
+	self.function = function
+
+    def execute(self, **kw):
+	# if print_actions:
+	# XXX:  WHAT SHOULD WE PRINT HERE?
+	if execute_actions:
+	    self.function(kw)

File src/engine/SCons/BuilderTests.py

+__revision__ = "BuilderTests.py __REVISION__ __DATE__ __DEVELOPER__"
+
+import sys
+import unittest
+
+import TestCmd
+import SCons.Builder
+
+
+# Initial setup of the common environment for all tests,
+# a temporary working directory containing a
+# script for writing arguments to an output file.
+#
+# We don't do this as a setUp() method because it's
+# unnecessary to create a separate directory and script
+# for each test, they can just use the one.
+test = TestCmd.TestCmd(workdir = '')
+
+test.write('act.py', """import os, string, sys
+f = open(sys.argv[1], 'w')
+f.write("act.py: " + string.join(sys.argv[2:]) + "\\n")
+f.close()
+sys.exit(0)
+""")
+
+act_py = test.workpath('act.py')
+outfile = test.workpath('outfile')
+
+
+class BuilderTestCase(unittest.TestCase):
+
+    def test_action(self):
+	"""Test Builder creation
+
+	Verify that we can retrieve the supplied action attribute.
+	"""
+	builder = SCons.Builder.Builder(action = "foo")
+	assert builder.action.command == "foo"
+
+    def test_cmp(self):
+	"""Test simple comparisons of Builder objects
+	"""
+	b1 = SCons.Builder.Builder(input_suffix = '.o')
+	b2 = SCons.Builder.Builder(input_suffix = '.o')
+	assert b1 == b2
+	b3 = SCons.Builder.Builder(input_suffix = '.x')
+	assert b1 != b3
+	assert b2 != b3
+
+    def test_execute(self):
+	"""Test execution of simple Builder objects
+	
+	One Builder is a string that executes an external command,
+	and one is an internal Python function.
+	"""
+	cmd = "python %s %s xyzzy" % (act_py, outfile)
+	builder = SCons.Builder.Builder(action = cmd)
+	builder.execute()
+	assert test.read(outfile, 'r') == "act.py: xyzzy\n"
+
+	def function(kw):
+	    import os, string, sys
+	    f = open(kw['out'], 'w')
+	    f.write("function\n")
+	    f.close()
+	    return not None
+
+	builder = SCons.Builder.Builder(action = function)
+	builder.execute(out = outfile)
+	assert test.read(outfile, 'r') == "function\n"
+
+    def test_insuffix(self):
+	"""Test Builder creation with a specified input suffix
+	
+	Make sure that the '.' separator is appended to the
+	beginning if it isn't already present.
+	"""
+	builder = SCons.Builder.Builder(input_suffix = '.c')
+	assert builder.insuffix == '.c'
+	builder = SCons.Builder.Builder(input_suffix = 'c')
+	assert builder.insuffix == '.c'
+
+    def test_name(self):
+	"""Test Builder creation with a specified name
+	"""
+	builder = SCons.Builder.Builder(name = 'foo')
+	assert builder.name == 'foo'
+
+    def test_node_class(self):
+	"""Test a Builder that creates nodes of a specified class
+	"""
+	class Foo:
+		pass
+	builder = SCons.Builder.Builder(node_class = Foo)
+	assert builder.node_class is Foo
+
+    def test_outsuffix(self):
+	"""Test Builder creation with a specified output suffix
+
+	Make sure that the '.' separator is appended to the
+	beginning if it isn't already present.
+	"""
+	builder = SCons.Builder.Builder(input_suffix = '.o')
+	assert builder.insuffix == '.o'
+	builder = SCons.Builder.Builder(input_suffix = 'o')
+	assert builder.insuffix == '.o'
+
+
+
+if __name__ == "__main__":
+    suite = unittest.makeSuite(BuilderTestCase, 'test_')
+    if not unittest.TextTestRunner().run(suite).wasSuccessful():
+	sys.exit(1)

File src/engine/SCons/Defaults.py

+"""SCons.Defaults
+
+Builders and other things for the local site.  Here's where we'll
+duplicate the functionality of autoconf until we move it into the
+installation procedure or use something like qmconf.
+
+"""
+
+__revision__ = "local.py __REVISION__ __DATE__ __DEVELOPER__"
+
+
+
+import SCons.Builder
+
+
+
+Object = SCons.Builder.Builder(name = 'Object',
+				action = 'cc -c -o %(target)s %(source)s')
+Program = SCons.Builder.Builder(name = 'Program',
+				action = 'cc -o %(target)s %(source)s')
+
+Builders = [Object, Program]

File src/engine/SCons/Environment.py

+"""SCons.Environment
+
+XXX
+
+"""
+
+__revision__ = "Environment.py __REVISION__ __DATE__ __DEVELOPER__"
+
+
+
+import copy
+import re
+import types
+
+
+
+def Command():
+    pass	# XXX
+
+def Install():
+    pass	# XXX
+
+def InstallAs():
+    pass	# XXX
+
+
+
+_cv = re.compile(r'%([_a-zA-Z]\w*|{[_a-zA-Z]\w*})')
+_self = None
+
+
+
+def _deepcopy_atomic(x, memo):
+	return x
+copy._deepcopy_dispatch[types.ModuleType] = _deepcopy_atomic
+copy._deepcopy_dispatch[types.ClassType] = _deepcopy_atomic
+copy._deepcopy_dispatch[types.FunctionType] = _deepcopy_atomic
+copy._deepcopy_dispatch[types.MethodType] = _deepcopy_atomic
+copy._deepcopy_dispatch[types.TracebackType] = _deepcopy_atomic
+copy._deepcopy_dispatch[types.FrameType] = _deepcopy_atomic
+copy._deepcopy_dispatch[types.FileType] = _deepcopy_atomic
+
+
+
+class Environment:
+    """Base class for construction Environments.  These are
+    the primary objects used to communicate dependency and
+    construction information to the build engine.
+
+    Keyword arguments supplied when the construction Environment
+    is created are construction variables used to initialize the
+    Environment.
+    """
+
+    def __init__(self, **kw):
+	self.Dictionary = {}
+	if kw.has_key('BUILDERS'):
+	    builders = kw['BUILDERS']
+	    if not type(builders) is types.ListType:
+		kw['BUILDERS'] = [builders]
+	else:
+	    import SCons.Defaults
+	    kw['BUILDERS'] = SCons.Defaults.Builders[:]
+	self.Dictionary.update(copy.deepcopy(kw))
+
+	class BuilderWrapper:
+	    """Wrapper class that allows an environment to
+	    be associated with a Builder at instantiation.
+	    """
+	    def __init__(self, env, builder):
+		self.env = env
+		self.builder = builder
+	
+	    def __call__(self, target = None, source = None):
+		return self.builder(self.env, target, source)
+
+	    def execute(self, **kw):
+		apply(self.builder.execute, (), kw)
+
+	for b in kw['BUILDERS']:
+	    setattr(self, b.name, BuilderWrapper(self, b))
+
+
+
+    def __cmp__(self, other):
+	return cmp(self.Dictionary, other.Dictionary)
+
+    def Builders(self):
+	pass	# XXX
+
+    def Copy(self, **kw):
+	"""Return a copy of a construction Environment.  The
+	copy is like a Python "deep copy"--that is, independent
+	copies are made recursively of each objects--except that
+	a reference is copied when an object is not deep-copyable
+	(like a function).  There are no references to any mutable
+	objects in the original Environment.
+	"""
+	clone = copy.deepcopy(self)
+	apply(clone.Update, (), kw)
+	return clone
+
+    def Scanners(self):
+	pass	# XXX
+
+    def	Update(self, **kw):
+	"""Update an existing construction Environment with new
+	construction variables and/or values.
+	"""
+	self.Dictionary.update(copy.deepcopy(kw))
+
+    def subst(self, string):
+	"""Recursively interpolates construction variables from the
+	Environment into the specified string, returning the expanded
+	result.  Construction variables are specified by a % prefix
+	in the string and begin with an initial underscore or
+	alphabetic character followed by any number of underscores
+	or alphanumeric characters.  The construction variable names
+	may be surrounded by curly braces to separate the name from
+	trailing characters.
+	"""
+	global _self
+	_self = self	# XXX NOT THREAD SAFE, BUT HOW ELSE DO WE DO THIS?
+	def repl(m):
+	    key = m.group(1)
+	    if key[:1] == '{' and key[-1:] == '}':
+		key = key[1:-1]
+	    if _self.Dictionary.has_key(key): return _self.Dictionary[key]
+	    else: return ''
+	n = 1
+	while n != 0:
+	    string, n = _cv.subn(repl, string)
+	return string

File src/engine/SCons/EnvironmentTests.py

+__revision__ = "EnvironmentTests.py __REVISION__ __DATE__ __DEVELOPER__"
+
+import sys
+import unittest
+
+from SCons.Environment import *
+
+
+
+built_it = {}
+
+class Builder:
+    """A dummy Builder class for testing purposes.  "Building"
+    a target is simply setting a value in the dictionary.
+    """
+    def __init__(self, name = None):
+    	self.name = name
+
+    def execute(self, target = None, source = None):
+	built_it[target] = 1
+
+
+
+class EnvironmentTestCase(unittest.TestCase):
+
+    def test_Builders(self):
+	"""Test Builder execution through different environments
+
+	One environment is initialized with a single
+	Builder object, one with a list of a single Builder
+	object, and one with a list of two Builder objects.
+	"""
+	global built_it
+
+	b1 = Builder(name = 'builder1')
+	b2 = Builder(name = 'builder2')
+
+	built_it = {}
+	env1 = Environment(BUILDERS = b1)
+	env1.builder1.execute(target = 'out1')
+	assert built_it['out1']
+
+	built_it = {}
+	env2 = Environment(BUILDERS = [b1])
+	env1.builder1.execute(target = 'out1')
+	assert built_it['out1']
+
+	built_it = {}
+	env3 = Environment(BUILDERS = [b1, b2])
+	env3.builder1.execute(target = 'out1')
+	env3.builder2.execute(target = 'out2')
+	env3.builder1.execute(target = 'out3')
+	assert built_it['out1']
+	assert built_it['out2']
+	assert built_it['out3']
+
+    def test_Command(self):
+	pass	# XXX
+
+    def test_Copy(self):
+	"""Test construction Environment copying
+
+	Update the copy independently afterwards and check that
+	the original remains intact (that is, no dangling
+	references point to objects in the copied environment).
+	Copy the original with some construction variable
+	updates and check that the original remains intact
+	and the copy has the updated values.
+	"""
+	env1 = Environment(XXX = 'x', YYY = 'y')
+	env2 = env1.Copy()
+	env1copy = env1.Copy()
+	env2.Update(YYY = 'yyy')
+	assert env1 != env2
+	assert env1 == env1copy
+
+	env3 = env1.Copy(XXX = 'x3', ZZZ = 'z3')
+	assert env3.Dictionary['XXX'] == 'x3'
+	assert env3.Dictionary['YYY'] == 'y'
+	assert env3.Dictionary['ZZZ'] == 'z3'
+	assert env1 == env1copy
+
+    def test_Dictionary(self):
+	"""Test retrieval of known construction variables
+
+	Fetch them from the Dictionary and check for well-known
+	defaults that get inserted.
+	"""
+	env = Environment(XXX = 'x', YYY = 'y')
+	assert env.Dictionary['XXX'] == 'x'
+	assert env.Dictionary['YYY'] == 'y'
+	assert env.Dictionary.has_key('BUILDERS')
+
+    def test_Environment(self):
+	"""Test construction Environments creation
+	
+	Create two with identical arguments and check that
+	they compare the same.
+	"""
+	env1 = Environment(XXX = 'x', YYY = 'y')
+	env2 = Environment(XXX = 'x', YYY = 'y')
+	assert env1 == env2
+
+    def test_Install(self):
+	pass	# XXX
+
+    def test_InstallAs(self):
+	pass	# XXX
+
+    def test_Scanners(self):
+	pass	# XXX
+
+    def test_Update(self):
+	"""Test updating an Environment with new construction variables
+
+	After creation of the Environment, of course.
+	"""
+	env1 = Environment(AAA = 'a', BBB = 'b')
+	env1.Update(BBB = 'bbb', CCC = 'ccc')
+	env2 = Environment(AAA = 'a', BBB = 'bbb', CCC = 'c')
+	assert env1 != env2
+
+    def test_subst(self):
+	"""Test substituting construction variables within strings
+	
+	Check various combinations, including recursive expansion
+	of variables into other variables.
+	"""
+	env = Environment(AAA = 'a', BBB = 'b')
+	str = env.subst("%AAA %{AAA}A %BBBB %BBB")
+	assert str == "a aA  b", str
+	env = Environment(AAA = '%BBB', BBB = 'b', BBBA = 'foo')
+	str = env.subst("%AAA %{AAA}A %{AAA}B %BBB")
+	assert str == "b foo  b", str
+	env = Environment(AAA = '%BBB', BBB = '%CCC', CCC = 'c')
+	str = env.subst("%AAA %{AAA}A %{AAA}B %BBB")
+	assert str == "c   c", str
+
+
+
+if __name__ == "__main__":
+    suite = unittest.makeSuite(EnvironmentTestCase, 'test_')
+    if not unittest.TextTestRunner().run(suite).wasSuccessful():
+	sys.exit(1)

File src/engine/SCons/Errors.py

+"""SCons.Errors
+
+This file contains the exception classes used to handle internal
+and user errors in SCons.
+
+"""
+
+__revision__ = "Errors.py __REVISION__ __DATE__ __DEVELOPER__"
+
+
+
+class InternalError(Exception):
+    def __init__(self, args=None):
+        self.args = args
+
+class UserError(Exception):
+    def __init__(self, args=None):
+        self.args = args

File src/engine/SCons/ErrorsTests.py

+__revision__ = "ErrorsTests.py __REVISION__ __DATE__ __DEVELOPER__"
+
+import sys
+import unittest
+import SCons.Errors
+
+
+class ErrorsTestCase(unittest.TestCase):
+    def test_InternalError(self):
+	"""Test the InternalError exception."""
+        try:
+            raise SCons.Errors.InternalError, "test internal error"
+        except SCons.Errors.InternalError, e:
+            assert e.args == "test internal error"
+
+    def test_UserError(self):
+	"""Test the UserError exception."""
+        try:
+            raise SCons.Errors.UserError, "test user error"
+        except SCons.Errors.UserError, e:
+            assert e.args == "test user error"
+
+
+
+if __name__ == "__main__":
+    suite = unittest.makeSuite(ErrorsTestCase, 'test_')
+    if not unittest.TextTestRunner().run(suite).wasSuccessful():
+	sys.exit(1)

File src/engine/SCons/Job.py

+"""SCons.Job
+
+This module defines the Serial and Parallel classes that execute tasks to
+complete a build. The Jobs class provides a higher level interface to start,
+stop, and wait on jobs.
+
+"""
+
+__revision__ = "Job.py __REVISION__ __DATE__ __DEVELOPER__"
+
+class Jobs:
+    """An instance of this class initializes N jobs, and provides
+    methods for starting, stopping, and waiting on all N jobs.
+    """
+    
+    def __init__(self, num, taskmaster):
+        """
+        create 'num' jobs using the given taskmaster.
+
+        If 'num' is equal to 0, then a serial job will be used,
+        otherwise 'num' parallel jobs will be used.
+        """
+
+        if num > 1:
+            self.jobs = []
+            for i in range(num):
+                self.jobs.append(Parallel(taskmaster, self))
+        else:
+            self.jobs = [Serial(taskmaster)]
+
+    def start(self):
+        """start the jobs"""
+
+        for job in self.jobs:
+            job.start()
+
+    def wait(self):
+        """ wait for the jobs started with start() to finish"""
+
+        for job in self.jobs:
+            job.wait()
+
+    def stop(self):
+        """
+        stop the jobs started with start()
+
+        This function does not wait for the jobs to finish.
+        """
+
+        for job in self.jobs:
+            job.stop()
+    
+class Serial:
+    """This class is used to execute tasks in series, and is more efficient
+    than Parallel, but is only appropriate for non-parallel builds. Only
+    one instance of this class should be in existence at a time.
+
+    This class is not thread safe.
+    """
+
+    def __init__(self, taskmaster):
+        """Create a new serial job given a taskmaster. 
+
+        The taskmaster's next_task() method should return the next task
+        that needs to be executed, or None if there are no more tasks. The
+        taskmaster's executed() method will be called for each task when it
+        is successfully executed or failed() will be called if it failed to
+        execute (e.g. execute() raised an exception). The taskmaster's
+        is_blocked() method will not be called.  """
+        
+        self.taskmaster = taskmaster
+
+    def start(self):
+        
+        """Start the job. This will begin pulling tasks from the taskmaster
+        and executing them, and return when there are no more tasks. If a task
+        fails to execute (i.e. execute() raises an exception), then the job will
+        stop."""
+        
+        while 1:
+            task = self.taskmaster.next_task()
+
+            if task is None:
+                break
+
+            try:
+                task.execute()
+            except:
+                self.taskmaster.failed(task)
+                return
+            else:
+                self.taskmaster.executed(task)
+
+    def stop(self):
+        """Serial jobs are always finished when start() returns, so there
+        is nothing to do here"""
+        
+        pass
+
+    def wait(self):
+        """Serial jobs are always finished when start() returns, so there
+        is nothing to do here"""
+        pass
+
+
+# The will hold a condition variable once the first parallel task
+# is created.
+cv = None
+
+class Parallel:
+    """This class is used to execute tasks in parallel, and is less
+    efficient than Serial, but is appropriate for parallel builds. Create
+    an instance of this class for each job or thread you want.
+
+    This class is thread safe.
+    """
+
+
+    def __init__(self, taskmaster, jobs):
+        """Create a new parallel job given a taskmaster, and a Jobs instance.
+        Multiple jobs will be using the taskmaster in parallel, but all
+        method calls to taskmaster methods are serialized by the jobs
+        themselves.
+
+        The taskmaster's next_task() method should return the next task
+        that needs to be executed, or None if there are no more tasks. The
+        taskmaster's executed() method will be called for each task when it
+        is successfully executed or failed() will be called if the task
+        failed to execute (i.e. execute() raised an exception).  The
+        taskmaster's is_blocked() method should return true iff there are
+        more tasks, but they can't be executed until one or more other
+        tasks have been executed. next_task() will be called iff
+        is_blocked() returned false.
+
+        Note: calls to taskmaster are serialized, but calls to execute() on
+        distinct tasks are not serialized, because that is the whole point
+        of parallel jobs: they can execute multiple tasks
+        simultaneously. """
+
+        global cv
+        
+        # import threading here so that everything in the Job module
+        # but the Parallel class will work if the interpreter doesn't
+        # support threads
+        import threading
+        
+        self.taskmaster = taskmaster
+        self.jobs = jobs
+        self.thread = threading.Thread(None, self.__run)
+        self.stop_running = 0
+
+        if cv is None:
+            cv = threading.Condition()
+
+    def start(self):
+        """Start the job. This will spawn a thread that will begin pulling
+        tasks from the task master and executing them. This method returns
+        immediately and doesn't wait for the jobs to be executed.
+
+        If a task fails to execute (i.e. execute() raises an exception),
+        all jobs will be stopped.
+
+        To stop the job, call stop().
+        To wait for the job to finish, call wait().
+        """
+        self.thread.start()
+
+    def stop(self):
+        """Stop the job. This will cause the job to finish after the
+        currently executing task is done. A job that has been stopped can
+        not be restarted.
+
+        To wait for the job to finish, call wait().
+        """
+
+        cv.acquire()
+        self.stop_running = 1
+        # wake up the sleeping jobs so this job will end as soon as possible:
+        cv.notifyAll() 
+        cv.release()
+        
+    def wait(self):
+        """Wait for the job to finish. A job is finished when either there
+        are no more tasks or the job has been stopped and it is no longer
+        executing a task.
+
+        This method should only be called after start() has been called.
+
+        To stop the job, call stop().
+        """
+        self.thread.join()
+
+    def __run(self):
+        """private method that actually executes the tasks"""
+
+        cv.acquire()
+
+        try:
+
+            while 1:
+                while self.taskmaster.is_blocked() and not self.stop_running:
+                    cv.wait(None)
+
+                # check this before calling next_task(), because
+                # this job may have been stopped because of a build
+                # failure:
+                if self.stop_running:
+                    break
+                    
+                task = self.taskmaster.next_task()
+
+                if task == None:
+                    break
+
+                cv.release()
+                try:
+                    try:
+                        task.execute()
+                    finally:
+                        cv.acquire()
+                except:
+                    self.taskmaster.failed(task)
+                    # stop all jobs since there was a failure:
+                    # (this will wake up any waiting jobs, so
+                    #  it isn't necessary to explicitly wake them
+                    #  here)
+                    self.jobs.stop() 
+                else:
+                    self.taskmaster.executed(task)
+                    
+                    if not self.taskmaster.is_blocked():
+                        cv.notifyAll()
+                    
+        finally:
+            cv.release()
+
+
+
+

File src/engine/SCons/JobTests.py

+__revision__ = "JobTests.py __REVISION__ __DATE__ __DEVELOPER__"
+
+import unittest
+import random
+import math
+import SCons.Job
+import sys
+
+# a large number
+num_sines = 10000
+
+# how many parallel jobs to perform for the test
+num_jobs = 11
+
+# how many tasks to perform for the test
+num_tasks = num_jobs*5
+
+class DummyLock:
+    "fake lock class to use if threads are not supported"
+    def acquire(self):
+        pass
+
+    def release(self):
+        pass
+
+class NoThreadsException:
+    "raised by the ParallelTestCase if threads are not supported"
+
+    def __str__(self):
+        return "the interpreter doesn't support threads"
+
+class Task:
+    """A dummy task class for testing purposes."""
+
+    def __init__(self, i, taskmaster):
+        self.i = i
+        self.taskmaster = taskmaster
+        self.was_executed = 0
+        
+    def execute(self):
+        self.taskmaster.guard.acquire()
+        self.taskmaster.begin_list.append(self.i)
+        self.taskmaster.guard.release()
+
+        # do something that will take some random amount of time:
+        for i in range(random.randrange(0, num_sines, 1)):
+            x = math.sin(i)
+
+        self.was_executed = 1
+
+        self.taskmaster.guard.acquire()
+        self.taskmaster.end_list.append(self.i)
+        self.taskmaster.guard.release()
+
+class ExceptionTask:
+    """A dummy task class for testing purposes."""
+
+    def __init__(self, i, taskmaster):
+        pass
+        
+    def execute(self):
+        raise "exception"
+
+class Taskmaster:
+    """A dummy taskmaster class for testing the job classes."""
+
+    def __init__(self, n, test_case, Task):
+        """n is the number of dummy tasks to perform."""
+
+        self.test_case = test_case
+        self.num_tasks = n
+        self.num_iterated = 0
+        self.num_executed = 0
+        self.num_failed = 0
+        self.Task = Task
+        # 'guard' guards 'task_begin_list' and 'task_end_list'
+        try:
+            import threading
+            self.guard = threading.Lock()
+        except:
+            self.guard = DummyLock()
+
+        # keep track of the order tasks are begun in
+        self.begin_list = []
+
+        # keep track of the order tasks are completed in
+        self.end_list = []
+
+
+    def next_task(self):
+        if self.all_tasks_are_iterated():
+            return None
+        else:
+            self.num_iterated = self.num_iterated + 1
+            return self.Task(self.num_iterated, self)
+
+    def all_tasks_are_executed(self):
+        return self.num_executed == self.num_tasks
+
+    def all_tasks_are_iterated(self):
+        return self.num_iterated == self.num_tasks
+
+    def executed(self, task):
+        self.num_executed = self.num_executed + 1
+
+        self.test_case.failUnless(task.was_executed,
+                                  "the task wasn't really executed")
+        self.test_case.failUnless(task.__class__ is Task,
+                                  "the task wasn't really a Task instance")
+
+    def failed(self, task):
+        self.num_failed = self.num_failed + 1
+    
+    def is_blocked(self):
+        # simulate blocking tasks
+        return self.num_iterated - self.num_executed >= max(num_jobs/2, 2)
+
+    def tasks_were_serial(self):
+        "analyze the task order to see if they were serial"
+        serial = 1 # assume the tasks were serial
+        for i in range(num_tasks):
+            serial = serial and (self.begin_list[i]
+                                 == self.end_list[i]
+                                 == (i + 1))
+        return serial
+
+class ParallelTestCase(unittest.TestCase):
+    def runTest(self):
+        "test parallel jobs"
+        
+        try:
+            import threading
+        except:
+            raise NoThreadsException()
+
+        taskmaster = Taskmaster(num_tasks, self, Task)
+        jobs = SCons.Job.Jobs(num_jobs, taskmaster)
+        jobs.start()
+        jobs.wait()
+
+        self.failUnless(not taskmaster.tasks_were_serial(),
+                        "the tasks were not executed in parallel")
+        self.failUnless(taskmaster.all_tasks_are_executed(),
+                        "all the tests were not executed")
+        self.failUnless(taskmaster.all_tasks_are_iterated(),
+                        "all the tests were not iterated over")
+        self.failIf(taskmaster.num_failed,
+                    "some task(s) failed to execute") 
+
+class SerialTestCase(unittest.TestCase):
+    def runTest(self):
+        "test a serial job"
+
+        taskmaster = Taskmaster(num_tasks, self, Task)
+        jobs = SCons.Job.Jobs(1, taskmaster)
+        jobs.start()
+        jobs.wait()
+
+        self.failUnless(taskmaster.tasks_were_serial(),
+                        "the tasks were not executed in series")
+        self.failUnless(taskmaster.all_tasks_are_executed(),
+                        "all the tests were not executed")
+        self.failUnless(taskmaster.all_tasks_are_iterated(),
+                        "all the tests were not iterated over")
+        self.failIf(taskmaster.num_failed,
+                    "some task(s) failed to execute") 
+
+class SerialExceptionTestCase(unittest.TestCase):
+    def runTest(self):
+        "test a serial job with tasks that raise exceptions"
+
+        taskmaster = Taskmaster(num_tasks, self, ExceptionTask)
+        jobs = SCons.Job.Jobs(1, taskmaster)
+        jobs.start()
+        jobs.wait()
+
+        self.failIf(taskmaster.num_executed,
+                    "a task was executed")
+        self.failUnless(taskmaster.num_iterated == 1,
+                    "exactly one task should have been iterated")
+        self.failUnless(taskmaster.num_failed == 1,
+                    "exactly one task should have failed")
+
+class ParallelExceptionTestCase(unittest.TestCase):
+    def runTest(self):
+        "test parallel jobs with tasks that raise exceptions"
+
+        taskmaster = Taskmaster(num_tasks, self, ExceptionTask)
+        jobs = SCons.Job.Jobs(num_jobs, taskmaster)
+        jobs.start()
+        jobs.wait()
+
+        self.failIf(taskmaster.num_executed,
+                    "a task was executed")
+        self.failUnless(taskmaster.num_iterated >= 1,
+                    "one or more task should have been iterated")
+        self.failUnless(taskmaster.num_failed >= 1,
+                    "one or more tasks should have failed") 
+
+
+def suite():
+    suite = unittest.TestSuite()
+    suite.addTest(ParallelTestCase())
+    suite.addTest(SerialTestCase())
+    suite.addTest(SerialExceptionTestCase())
+    suite.addTest(ParallelExceptionTestCase())
+    return suite
+
+if __name__ == "__main__":
+    runner = unittest.TextTestRunner()
+    result = runner.run(suite())
+    if (len(result.failures) == 0
+        and len(result.errors) == 1
+        and type(result.errors[0][0]) == SerialTestCase
+        and type(result.errors[0][1][0]) == NoThreadsException):
+        sys.exit(2)
+    elif not result.wasSuccessful():
+        sys.exit(1)
+
+            
+
+        
+    
+    
+    
+    
+
+

File src/engine/SCons/Node/.aeignore

+*,D
+*.pyc
+.*.swp
+.consign

File src/engine/SCons/Node/FS.py

+"""SCons.Node.FS
+
+File system nodes.
+
+"""
+
+__revision__ = "Node/FS.py __REVISION__ __DATE__ __DEVELOPER__"
+
+
+
+import os
+import os.path
+import SCons.Node
+
+
+
+Top = None
+Root = {}
+
+
+
+def init(path = None):
+    """Initialize the Node.FS subsystem.
+
+    The supplied path is the top of the source tree, where we
+    expect to find the top-level build file.  If no path is
+    supplied, the current directory is the default.
+    """
+    global Top
+    if path == None:
+	path = os.getcwd()
+    Top = lookup(Dir, path, directory = None)
+    Top.path = '.'
+
+def lookup(fsclass, name, directory = Top):
+    """Look up a file system node for a path name.  If the path
+    name is relative, it will be looked up relative to the
+    specified directory node, or to the top-level directory
+    if no node was specified.  An initial '#' specifies that
+    the name will be looked up relative to the top-level directory,
+    regardless of the specified directory argument.  Returns the
+    existing or newly-created node for the specified path name.
+    The node returned will be of the specified fsclass (Dir or
+    File).
+    """
+    global Top
+    head, tail = os.path.split(name)
+    if not tail:
+	drive, path = os.path.splitdrive(head)
+	if not Root.has_key(drive):
+	    Root[drive] = Dir(head, None)
+	    Root[drive].abspath = head
+	    Root[drive].path = head
+	return Root[drive]
+    if tail[0] == '#':
+	directory = Top
+	tail = tail[1:]
+    elif directory is None:
+	directory = Top
+    if head:
+	directory = lookup(Dir, head, directory)
+    try:
+	self = directory.entries[tail]
+    except AttributeError:
+	# There was no "entries" attribute on the directory,
+	# which essentially implies that it was a file.
+	# Return it as a more descriptive exception.
+	raise TypeError, directory
+    except KeyError:
+	# There was to entry for "tail," so create the new
+	# node and link it in to the existing structure.
+	self = fsclass(tail, directory)
+	self.name = tail
+	if self.path[0:2] == "./":
+	    self.path = self.path[2:]
+	directory.entries[tail] = self
+    except:
+	raise
+    if self.__class__.__name__ != fsclass.__name__:
+	# Here, we found an existing node for this path,
+	# but it was the wrong type (a File when we were
+	# looking for a Dir, or vice versa).
+	raise TypeError, self
+    return self
+
+
+
+# XXX TODO?
+# Annotate with the creator
+# is_under
+# rel_path
+# srcpath / srcdir
+# link / is_linked
+# linked_targets
+# is_accessible
+
+class Dir(SCons.Node.Node):
+    """A class for directories in a file system.
+    """
+
+    def __init__(self, name, directory):
+	self.entries = {}
+	self.entries['.'] = self
+	self.entries['..'] = directory
+	if not directory is None:
+	    self.abspath = os.path.join(directory.abspath, name, '')
+	    self.path = os.path.join(directory.path, name, '')
+
+    def up(self):
+	return self.entries['..']
+
+
+# XXX TODO?
+# rfile
+# precious
+# no_rfile
+# rpath
+# rsrcpath
+# source_exists
+# derived_exists
+# is_on_rpath
+# local
+# base_suf
+# suffix
+# addsuffix
+# accessible
+# ignore
+# build
+# bind
+# is_under
+# relpath
+
+class File(SCons.Node.Node):
+    """A class for files in a file system.
+    """
+
+    def __init__(self, name, directory):
+	self.abspath = os.path.join(directory.abspath, name)
+	self.path = os.path.join(directory.path, name)

File src/engine/SCons/Node/FS/.aeignore

+*,D
+*.pyc
+.*.swp
+.consign

File src/engine/SCons/Node/FSTests.py

+__revision__ = "Node/FSTests.py __REVISION__ __DATE__ __DEVELOPER__"
+
+import os
+import sys
+import unittest
+
+import SCons.Node.FS
+
+
+
+built_it = None
+
+class Builder:
+    def execute(self, target = None, source = None):
+	global built_it
+	built_it = 1
+
+
+
+class FSTestCase(unittest.TestCase):
+    def runTest(self):
+	"""Test FS (file system) Node operations
+	
+	This test case handles all of the file system node
+	tests in one environment, so we don't have to set up a
+	complicated directory structure for each test individually.
+	"""
+	from TestCmd import TestCmd
+
+	test = TestCmd(workdir = '')
+	test.subdir('sub', ['sub', 'dir'])
+
+	wp = test.workpath('')
+	sub = test.workpath('sub', '')
+	sub_dir = test.workpath('sub', 'dir', '')
+	sub_dir_foo = test.workpath('sub', 'dir', 'foo', '')
+	sub_dir_foo_bar = test.workpath('sub', 'dir', 'foo', 'bar', '')
+	sub_foo = test.workpath('sub', 'foo', '')
+
+	os.chdir(sub_dir)
+
+	SCons.Node.FS.init()
+
+	def Dir_test(lpath, path, abspath, up_path):
+	    dir = SCons.Node.FS.lookup(SCons.Node.FS.Dir, lpath)
+    	    assert(dir.path == path)
+	    assert(dir.abspath == abspath)
+	    assert(dir.up().path == up_path)
+
+	Dir_test('foo',		'foo/',		sub_dir_foo,		'.')
+	Dir_test('foo/bar',	'foo/bar/',	sub_dir_foo_bar,	'foo/')
+	Dir_test('/foo',	'/foo/',	'/foo/',		'/')
+	Dir_test('/foo/bar',	'/foo/bar/',	'/foo/bar/',		'/foo/')
+	Dir_test('..',		sub,		sub,			wp)
+	Dir_test('foo/..',	'.',		sub_dir,		sub)
+	Dir_test('../foo',	sub_foo,	sub_foo,		sub)
+	Dir_test('.',		'.',		sub_dir,		sub)
+	Dir_test('./.',		'.',		sub_dir,		sub)
+	Dir_test('foo/./bar',	'foo/bar/',	sub_dir_foo_bar,	'foo/')
+
+	d1 = SCons.Node.FS.lookup(SCons.Node.FS.Dir, 'd1')
+
+	f1 = SCons.Node.FS.lookup(SCons.Node.FS.File, 'f1', directory = d1)
+
+	assert(f1.path == 'd1/f1')
+
+	try:
+	    f2 = SCons.Node.FS.lookup(SCons.Node.FS.File, 'f1/f2', directory = d1)
+	except TypeError, x:
+	    node = x.args[0]
+	    assert(node.path == 'd1/f1')
+	    assert(node.__class__.__name__ == 'File')
+	except:
+	    raise
+
+	try:
+	    dir = SCons.Node.FS.lookup(SCons.Node.FS.Dir, 'd1/f1')
+	except TypeError, x:
+	    node = x.args[0]
+	    assert(node.path == 'd1/f1')
+	    assert(node.__class__.__name__ == 'File')
+	except:
+	    raise
+
+	# Test for sub-classing of node building.
+	global built_it
+
+	built_it = None
+	assert not built_it
+	d1.path = "d"		# XXX FAKE SUBCLASS ATTRIBUTE
+	d1.sources = "d"	# XXX FAKE SUBCLASS ATTRIBUTE
+	d1.builder_set(Builder())
+	d1.build()
+	assert built_it
+
+	built_it = None
+	assert not built_it
+	f1.path = "f"		# XXX FAKE SUBCLASS ATTRIBUTE
+	f1.sources = "f"	# XXX FAKE SUBCLASS ATTRIBUTE
+	f1.builder_set(Builder())
+	f1.build()
+	assert built_it
+
+
+if __name__ == "__main__":
+    suite = unittest.TestSuite()
+    suite.addTest(FSTestCase())
+    if not unittest.TextTestRunner().run(suite).wasSuccessful():
+	sys.exit(1)

File src/engine/SCons/Node/NodeTests.py

+__revision__ = "Node/NodeTests.py __REVISION__ __DATE__ __DEVELOPER__"
+
+import os
+import sys
+import unittest
+
+import SCons.Node
+
+
+
+built_it = None
+
+class Builder:
+    def execute(self, target = None, source = None):
+	global built_it
+	built_it = 1
+
+
+
+class NodeTestCase(unittest.TestCase):
+
+    def test_build(self):
+	"""Test building a node
+	"""
+	node = SCons.Node.Node()
+	node.builder_set(Builder())
+	node.path = "xxx"	# XXX FAKE SUBCLASS ATTRIBUTE
+	node.sources = "yyy"	# XXX FAKE SUBCLASS ATTRIBUTE
+	node.build()
+	assert built_it
+
+    def test_builder_set(self):
+	"""Test setting a Node's Builder
+	"""
+	node = SCons.Node.Node()
+	b = Builder()
+	node.builder_set(b)
+	assert node.builder == b
+
+
+
+if __name__ == "__main__":
+    suite = unittest.makeSuite(NodeTestCase, 'test_')
+    if not unittest.TextTestRunner().run(suite).wasSuccessful():
+	sys.exit(1)

File src/engine/SCons/Node/__init__.py

+"""SCons.Node
+
+The Node package for the SCons software construction utility.
+
+"""
+
+__revision__ = "Node/__init__.py __REVISION__ __DATE__ __DEVELOPER__"
+
+
+
+class Node:
+    """The base Node class, for entities that we know how to
+    build, or use to build other Nodes.
+    """
+    def build(self):
+	self.builder.execute(target = self.path, source = self.sources)
+
+    def builder_set(self, builder):
+	self.builder = builder
+
+    def env_set(self, env):
+	self.env = env

File src/engine/SCons/Scanner/.aeignore

+*,D
+*.pyc
+.*.swp
+.consign

File src/engine/SCons/Scanner/C.py

+"""SCons.Scanner.C
+
+This module implements the depenency scanner for C/C++ code. 
+
+"""
+
+__revision__ = "Scanner/C.py __REVISION__ __DATE__ __DEVELOPER__"
+
+
+import SCons.Scanner
+import re
+import os.path
+
+angle_re = re.compile('^[ \t]*#[ \t]*include[ \t]+<([\\w./\\\\]+)>', re.M)
+quote_re = re.compile('^[ \t]*#[ \t]*include[ \t]+"([\\w./\\\\]+)"', re.M)
+
+def CScan():
+    "Return a Scanner instance for scanning C/C++ source files"
+    return SCons.Scanner.Scanner(scan)
+
+def find_files(filenames, paths):
+    """
+    find_files([str], [str]) -> [str]
+
+    filenames - a list of filenames to find
+    paths - a list of paths to search in
+
+    returns - the fullnames of the files
+
+    Only the first fullname found is returned for each filename, and any
+    file that aren't found are ignored.
+    """
+    fullnames = []
+    for filename in filenames:
+        for path in paths:
+            fullname = os.path.join(path, filename)
+            if os.path.exists(fullname):
+                fullnames.append(fullname)
+                break
+
+    return fullnames
+
+def scan(filename, env):
+    """
+    scan(str, Environment) -> [str]
+
+    the C/C++ dependency scanner function
+
+    This function is intentionally simple. There are two rules it
+    follows:
+    
+    1) #include <foo.h> - search for foo.h in CPPPATH followed by the
+        directory 'filename' is in
+    2) #include \"foo.h\" - search for foo.h in the directory 'filename' is
+       in followed by CPPPATH
+
+    These rules approximate the behaviour of most C/C++ compilers.
+
+    This scanner also ignores #ifdef and other preprocessor conditionals, so
+    it may find more depencies than there really are, but it never misses
+    dependencies.
+    """
+
+    if hasattr(env, "CPPPATH"):
+        paths = env.CPPPATH
+    else:
+        paths = []
+        
+    file = open(filename)
+    contents = file.read()
+    file.close()
+
+    angle_includes = angle_re.findall(contents)
+    quote_includes = quote_re.findall(contents)
+
+    source_dir = os.path.dirname(filename)
+    
+    deps = (find_files(angle_includes, paths + [source_dir])
+            + find_files(quote_includes, [source_dir] + paths))
+
+    return deps
+
+    
+    
+    
+
+    

File src/engine/SCons/Scanner/CTests.py

+__revision__ = "Scanner/CTests.py __REVISION__ __DATE__ __DEVELOPER__"
+
+import TestCmd
+import SCons.Scanner.C
+import unittest
+import sys
+
+test = TestCmd.TestCmd(workdir = '')
+
+# create some source files and headers:
+
+test.write('f1.cpp',"""
+#include \"f1.h\"
+#include <f2.h>
+
+int main()
+{
+   return 0;
+}
+""")
+
+test.write('f2.cpp',"""
+#include \"d1/f1.h\"
+#include <d2/f1.h>
+#include \"f1.h\"
+#include <f4.h>
+
+int main()
+{
+   return 0;
+}
+""")
+
+test.write('f3.cpp',"""
+#include \t "f1.h"
+   \t #include "f2.h"
+#   \t include "f3.h"
+
+#include \t <d1/f1.h>
+   \t #include <d1/f2.h>
+#   \t include <d1/f3.h>
+
+// #include "never.h"
+
+const char* x = "#include <never.h>"
+
+int main()
+{
+   return 0;
+}
+""")
+
+
+# for Emacs -> "
+
+test.subdir('d1', ['d1', 'd2'])
+
+headers = ['f1.h','f2.h', 'f3.h', 'never.h',
+           'd1/f1.h', 'd1/f2.h', 'd1/f3.h',
+           'd1/d2/f1.h', 'd1/d2/f2.h', 'd1/d2/f3.h', 'd1/d2/f4.h']
+
+for h in headers:
+    test.write(h, " ")
+
+# define some helpers:
+
+class DummyEnvironment:
+    pass
+
+def deps_match(deps, headers):
+    return deps.sort() == map(test.workpath, headers).sort()
+
+# define some tests:
+
+class CScannerTestCase1(unittest.TestCase):
+    def runTest(self):
+        env = DummyEnvironment
+        s = SCons.Scanner.C.CScan()
+        deps = s.scan(test.workpath('f1.cpp'), env)
+        self.failUnless(deps_match(deps, ['f1.h', 'f2.h']))
+
+class CScannerTestCase2(unittest.TestCase):
+    def runTest(self):
+        env = DummyEnvironment
+        env.CPPPATH = [test.workpath("d1")]
+        s = SCons.Scanner.C.CScan()
+        deps = s.scan(test.workpath('f1.cpp'), env)
+        headers = ['f1.h', 'd1/f2.h']
+        self.failUnless(deps_match(deps, headers)) 
+
+class CScannerTestCase3(unittest.TestCase):
+    def runTest(self):
+        env = DummyEnvironment
+        env.CPPPATH = [test.workpath("d1")]
+        s = SCons.Scanner.C.CScan()
+        deps = s.scan(test.workpath('f2.cpp'), env)
+        headers = ['f1.h', 'd1/f2.h', 'd1/d2/f1.h']
+        self.failUnless(deps_match(deps, headers))
+                  
+
+class CScannerTestCase4(unittest.TestCase):
+    def runTest(self):
+        env = DummyEnvironment
+        env.CPPPATH = [test.workpath("d1"), test.workpath("d1/d2")]
+        s = SCons.Scanner.C.CScan()
+        deps = s.scan(test.workpath('f2.cpp'), env)
+        headers =  ['f1.h', 'd1/f2.h', 'd1/d2/f1.h', 'd1/d2/f4.h']
+        self.failUnless(deps_match(deps, headers))
+        
+class CScannerTestCase5(unittest.TestCase):
+    def runTest(self):
+        env = DummyEnvironment
+        s = SCons.Scanner.C.CScan()
+        deps = s.scan(test.workpath('f3.cpp'), env)
+        headers =  ['f1.h', 'f2.h', 'f3.h', 'd1/f1.h', 'd1/f2.h', 'd1/f3.h']
+        self.failUnless(deps_match(deps, headers))
+
+def suite():
+    suite = unittest.TestSuite()
+    suite.addTest(CScannerTestCase1())
+    suite.addTest(CScannerTestCase2())
+    suite.addTest(CScannerTestCase3())
+    suite.addTest(CScannerTestCase4())
+    suite.addTest(CScannerTestCase5())
+    return suite
+
+if __name__ == "__main__":
+    runner = unittest.TextTestRunner()
+    result = runner.run(suite())
+    if not result.wasSuccessful():
+        sys.exit(1)

File src/engine/SCons/Scanner/ScannerTests.py

+__revision__ = "Scanner/ScannerTests.py __REVISION__ __DATE__ __DEVELOPER__"
+
+import unittest
+import SCons.Scanner
+import sys
+
+class ScannerTestBase:
+    
+    def func(self, filename, env, *args):
+        self.filename = filename
+        self.env = env
+
+        if len(args) > 0: