Anselm Kruis avatar Anselm Kruis committed 9623515 Merge

Merged the umbrella feature branch.

Comments (0)

Files changed (47)

 *.bak
 
 syntax: regexp
-^stackless.*\.tar\.bz2
-^stackless.*export$
-syntax: regexp
 ^build$
 syntax: regexp
 ^dist$
 syntax: regexp
-^MANIFEST$
+^MANIFEST$
+syntax: regexp
+^C4_linux-x86_64/dist$
+syntax: regexp
+^C4_linux-x86_64/stackless_installer_C4_linux_x86_64\.egg-info$
+syntax: regexp
+^C2_win32/dist$
+syntax: regexp
+^C2_win32/stackless_installer_C2_win32\.egg-info$
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<?eclipse-pydev version="1.0"?>
-
-<pydev_project>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">python2.7</pydev_property>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
-</pydev_project>
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?eclipse-pydev version="1.0"?>
+
+<pydev_project>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">python2.7</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
+<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
+<path>/slp-installer</path>
+</pydev_pathproperty>
+</pydev_project>

C2_win32_2.7/MANIFEST.in

+include *.py
+include README.rst
+recursive-include slpInstaller *.py
+graft slpInstaller/win32

C2_win32_2.7/README.rst

+Installer for Stackless Python
+==============================
+
+Windows 32bit
+-------------
+
+This installer adds two additional executables slpython.exe and slpythonw.exe
+to your Python installation. It does not compromise your regular CPython installation.
+
+
+Sorry, currently only for Python 2.7 only.
+
+Hg repository: https://bitbucket.org/akruis/slp-installer

C2_win32_2.7/setup.py

+#
+# Copyright 2013 Anselm Kruis
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+"""
+Installer for Stackless Python (Windows)
+
+This distribution adds Stackless Python executables to an existing
+CPython installation.
+
+The distribution is a normal Python-Egg. It provides two scripts: 
+
+* *install-stackless*: installs stackless executables besides the 
+  regular CPython executables.
+* *uninstall-stackless*: removes the stackless executables
+  
+The distribution requires the package setuptools during installation.
+"""
+
+
+from setuptools import setup
+
+from slpInstaller import UCS, PLATFORM, INSTALLER_VERSION_STR, STACKLESS_VERSION
+
+
+setup(
+    name='stackless_installer_C%d_%s' % (UCS, PLATFORM.replace('-', '_')),
+    version=INSTALLER_VERSION_STR,
+    description='Installer for Stackless Python %s ucs%d' % (PLATFORM, UCS),
+    author='Christian Tismer',
+    author_email='tismer@stackless.com',
+    maintainer='Anselm Kruis',
+    maintainer_email='a.kruis@science-computing.de',
+    url='http://www.stackless.com',
+    packages=['slpInstaller'],
+    include_package_data = True,
+    # other arguments here...
+    entry_points = {
+        'console_scripts': [
+            'install-stackless = slpInstaller:main_install',
+            'uninstall-stackless = slpInstaller:main_uninstall',
+        ],
+    },
+    zip_safe=True,
+    long_description=open("README.rst").read(),
+    classifiers=[
+          "License :: OSI Approved :: Python Software Foundation License", # Stackless Python files
+          "License :: OSI Approved :: Apache Software License", # for this installer
+          "Programming Language :: Python",
+          "Programming Language :: Python :: "+".".join(map(str,STACKLESS_VERSION[:2])),
+          "Programming Language :: Python :: Implementation :: Stackless", 
+          "Operating System :: POSIX :: Linux",
+          "Development Status :: 5 - Production/Stable",
+          "Intended Audience :: Developers",
+          "Topic :: Software Development :: Libraries :: Python Modules",
+      ],
+      keywords='stackless',
+      license='Python Software Foundation License (Stackless-Python), Apache License, Version 2.0 (Installer)',
+      platforms=PLATFORM,
+    )

C2_win32_2.7/slpInstaller/Lib/copy_reg.py

+"""Helper to provide extensibility for pickle/cPickle.
+
+This is only useful to add pickle support for extension types defined in
+C, not for instances of user-defined classes.
+"""
+
+from types import ClassType as _ClassType
+
+__all__ = ["pickle", "constructor",
+           "add_extension", "remove_extension", "clear_extension_cache"]
+
+dispatch_table = {}
+
+def pickle(ob_type, pickle_function, constructor_ob=None):
+    if type(ob_type) is _ClassType:
+        raise TypeError("copy_reg is not intended for use with classes")
+
+    if not hasattr(pickle_function, '__call__'):
+        raise TypeError("reduction functions must be callable")
+    dispatch_table[ob_type] = pickle_function
+
+    # The constructor_ob function is a vestige of safe for unpickling.
+    # There is no reason for the caller to pass it anymore.
+    if constructor_ob is not None:
+        constructor(constructor_ob)
+
+def constructor(object):
+    if not hasattr(object, '__call__'):
+        raise TypeError("constructors must be callable")
+
+# Example: provide pickling support for complex numbers.
+
+try:
+    complex
+except NameError:
+    pass
+else:
+
+    def pickle_complex(c):
+        return complex, (c.real, c.imag)
+
+    pickle(complex, pickle_complex, complex)
+
+# Support for pickling new-style objects
+
+def _reconstructor(cls, base, state):
+    if base is object:
+        obj = object.__new__(cls)
+    else:
+        obj = base.__new__(cls, state)
+        if base.__init__ != object.__init__:
+            base.__init__(obj, state)
+    return obj
+
+_HEAPTYPE = 1<<9
+
+# Python code for object.__reduce_ex__ for protocols 0 and 1
+
+def _reduce_ex(self, proto):
+    assert proto < 2
+    for base in self.__class__.__mro__:
+        if hasattr(base, '__flags__') and not base.__flags__ & _HEAPTYPE:
+            break
+    else:
+        base = object # not really reachable
+    if base is object:
+        state = None
+    else:
+        if base is self.__class__:
+            raise TypeError, "can't pickle %s objects" % base.__name__
+        ## Stackless addition BEGIN
+        # if base is only supported by our shadow types in copy_reg,
+        # we need to substitute here:
+        reducer = dispatch_table.get(base)
+        if reducer and reducer.__module__ == "stackless._wrap":
+            base = reducer(self)[0]
+        ## Stackless addition END
+        state = base(self)
+    args = (self.__class__, base, state)
+    try:
+        getstate = self.__getstate__
+    except AttributeError:
+        if getattr(self, "__slots__", None):
+            raise TypeError("a class that defines __slots__ without "
+                            "defining __getstate__ cannot be pickled")
+        try:
+            dict = self.__dict__
+        except AttributeError:
+            dict = None
+    else:
+        dict = getstate()
+    if dict:
+        return _reconstructor, args, dict
+    else:
+        return _reconstructor, args
+
+# Helper for __reduce_ex__ protocol 2
+
+def __newobj__(cls, *args):
+    return cls.__new__(cls, *args)
+
+def _slotnames(cls):
+    """Return a list of slot names for a given class.
+
+    This needs to find slots defined by the class and its bases, so we
+    can't simply return the __slots__ attribute.  We must walk down
+    the Method Resolution Order and concatenate the __slots__ of each
+    class found there.  (This assumes classes don't modify their
+    __slots__ attribute to misrepresent their slots after the class is
+    defined.)
+    """
+
+    # Get the value from a cache in the class if possible
+    names = cls.__dict__.get("__slotnames__")
+    if names is not None:
+        return names
+
+    # Not cached -- calculate the value
+    names = []
+    if not hasattr(cls, "__slots__"):
+        # This class has no slots
+        pass
+    else:
+        # Slots found -- gather slot names from all base classes
+        for c in cls.__mro__:
+            if "__slots__" in c.__dict__:
+                slots = c.__dict__['__slots__']
+                # if class has a single slot, it can be given as a string
+                if isinstance(slots, basestring):
+                    slots = (slots,)
+                for name in slots:
+                    # special descriptors
+                    if name in ("__dict__", "__weakref__"):
+                        continue
+                    # mangled names
+                    elif name.startswith('__') and not name.endswith('__'):
+                        names.append('_%s%s' % (c.__name__, name))
+                    else:
+                        names.append(name)
+
+    # Cache the outcome in the class if at all possible
+    try:
+        cls.__slotnames__ = names
+    except:
+        pass # But don't die if we can't
+
+    return names
+
+# A registry of extension codes.  This is an ad-hoc compression
+# mechanism.  Whenever a global reference to <module>, <name> is about
+# to be pickled, the (<module>, <name>) tuple is looked up here to see
+# if it is a registered extension code for it.  Extension codes are
+# universal, so that the meaning of a pickle does not depend on
+# context.  (There are also some codes reserved for local use that
+# don't have this restriction.)  Codes are positive ints; 0 is
+# reserved.
+
+_extension_registry = {}                # key -> code
+_inverted_registry = {}                 # code -> key
+_extension_cache = {}                   # code -> object
+# Don't ever rebind those names:  cPickle grabs a reference to them when
+# it's initialized, and won't see a rebinding.
+
+def add_extension(module, name, code):
+    """Register an extension code."""
+    code = int(code)
+    if not 1 <= code <= 0x7fffffff:
+        raise ValueError, "code out of range"
+    key = (module, name)
+    if (_extension_registry.get(key) == code and
+        _inverted_registry.get(code) == key):
+        return # Redundant registrations are benign
+    if key in _extension_registry:
+        raise ValueError("key %s is already registered with code %s" %
+                         (key, _extension_registry[key]))
+    if code in _inverted_registry:
+        raise ValueError("code %s is already in use for key %s" %
+                         (code, _inverted_registry[code]))
+    _extension_registry[key] = code
+    _inverted_registry[code] = key
+
+def remove_extension(module, name, code):
+    """Unregister an extension code.  For testing only."""
+    key = (module, name)
+    if (_extension_registry.get(key) != code or
+        _inverted_registry.get(code) != key):
+        raise ValueError("key %s is not registered with code %s" %
+                         (key, code))
+    del _extension_registry[key]
+    del _inverted_registry[code]
+    if code in _extension_cache:
+        del _extension_cache[code]
+
+def clear_extension_cache():
+    _extension_cache.clear()
+
+# Standard extension code assignments
+
+# Reserved ranges
+
+# First  Last Count  Purpose
+#     1   127   127  Reserved for Python standard library
+#   128   191    64  Reserved for Zope
+#   192   239    48  Reserved for 3rd parties
+#   240   255    16  Reserved for private use (will never be assigned)
+#   256   Inf   Inf  Reserved for future assignment
+
+# Extension codes are assigned by the Python Software Foundation.

C2_win32_2.7/slpInstaller/Lib/distutils/__init__.py

+def _setup_path():
+    global __path__
+    from pkgutil import extend_path
+    from traceback import print_exc
+    from sys import exc_info
+    path = __path__
+    __path__ = extend_path(__path__, __name__)
+    additionalDirs = __path__[:]
+    for p in path:
+        try:
+            additionalDirs.remove(p)
+        except ValueError:
+            print_exc()
+            pass
+    from imp import find_module
+    try:
+        (file, pathname, description) = find_module("__init__", additionalDirs)
+    except ImportError:
+        print_exc()
+        pass
+    else:
+        try:
+            try:
+                exec file in globals()
+            except ImportError:
+                raise
+            except Exception:
+                einfo = exc_info()
+                raise ImportError, "Failed to exec file '%s' with exception: %s" % (pathname, einfo[1]), einfo[2]
+            else:
+                i = __path__.index(path[0])
+                if i > 0:
+                    # bring our dir to the front again. Required for virtualenv
+                    del __path__[i]
+                    __path__.insert(0, path[0])
+        finally:
+            file.close()
+        
+_setup_path()
+del _setup_path

C2_win32_2.7/slpInstaller/Lib/distutils/command/__init__.py

+def _setup_path():
+    global __path__
+    from pkgutil import extend_path
+    from traceback import print_exc
+    from sys import exc_info
+    path = __path__
+    __path__ = extend_path(__path__, __name__)
+    additionalDirs = __path__[:]
+    for p in path:
+        try:
+            additionalDirs.remove(p)
+        except ValueError:
+            print_exc()
+            pass
+    from imp import find_module
+    try:
+        (file, pathname, description) = find_module("__init__", additionalDirs)
+    except ImportError:
+        print_exc()
+        pass
+    else:
+        try:
+            try:
+                exec file in globals()
+            except ImportError:
+                raise
+            except Exception:
+                einfo = exc_info()
+                raise ImportError, "Failed to exec file '%s' with exception: %s" % (pathname, einfo[1]), einfo[2]
+        finally:
+            file.close()
+        
+_setup_path()
+del _setup_path

C2_win32_2.7/slpInstaller/Lib/distutils/command/build_ext.py

+"""distutils.command.build_ext
+
+Implements the Distutils 'build_ext' command, for building extension
+modules (currently limited to C extensions, should accommodate C++
+extensions ASAP)."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id$"
+
+import sys, os, string, re
+from types import *
+from site import USER_BASE, USER_SITE
+from distutils.core import Command
+from distutils.errors import *
+from distutils.sysconfig import customize_compiler, get_python_version
+from distutils.dep_util import newer_group
+from distutils.extension import Extension
+from distutils.util import get_platform
+from distutils import log
+
+if os.name == 'nt':
+    from distutils.msvccompiler import get_build_version
+    MSVC_VERSION = int(get_build_version())
+
+# An extension name is just a dot-separated list of Python NAMEs (ie.
+# the same as a fully-qualified module name).
+extension_name_re = re.compile \
+    (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
+
+
+def show_compilers ():
+    from distutils.ccompiler import show_compilers
+    show_compilers()
+
+
+class build_ext (Command):
+
+    description = "build C/C++ extensions (compile/link to build directory)"
+
+    # XXX thoughts on how to deal with complex command-line options like
+    # these, i.e. how to make it so fancy_getopt can suck them off the
+    # command line and make it look like setup.py defined the appropriate
+    # lists of tuples of what-have-you.
+    #   - each command needs a callback to process its command-line options
+    #   - Command.__init__() needs access to its share of the whole
+    #     command line (must ultimately come from
+    #     Distribution.parse_command_line())
+    #   - it then calls the current command class' option-parsing
+    #     callback to deal with weird options like -D, which have to
+    #     parse the option text and churn out some custom data
+    #     structure
+    #   - that data structure (in this case, a list of 2-tuples)
+    #     will then be present in the command object by the time
+    #     we get to finalize_options() (i.e. the constructor
+    #     takes care of both command-line and client options
+    #     in between initialize_options() and finalize_options())
+
+    sep_by = " (separated by '%s')" % os.pathsep
+    user_options = [
+        ('build-lib=', 'b',
+         "directory for compiled extension modules"),
+        ('build-temp=', 't',
+         "directory for temporary files (build by-products)"),
+        ('plat-name=', 'p',
+         "platform name to cross-compile for, if supported "
+         "(default: %s)" % get_platform()),
+        ('inplace', 'i',
+         "ignore build-lib and put compiled extensions into the source " +
+         "directory alongside your pure Python modules"),
+        ('include-dirs=', 'I',
+         "list of directories to search for header files" + sep_by),
+        ('define=', 'D',
+         "C preprocessor macros to define"),
+        ('undef=', 'U',
+         "C preprocessor macros to undefine"),
+        ('libraries=', 'l',
+         "external C libraries to link with"),
+        ('library-dirs=', 'L',
+         "directories to search for external C libraries" + sep_by),
+        ('rpath=', 'R',
+         "directories to search for shared C libraries at runtime"),
+        ('link-objects=', 'O',
+         "extra explicit link objects to include in the link"),
+        ('debug', 'g',
+         "compile/link with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ('swig-cpp', None,
+         "make SWIG create C++ files (default is C)"),
+        ('swig-opts=', None,
+         "list of SWIG command line options"),
+        ('swig=', None,
+         "path to the SWIG executable"),
+        ('user', None,
+         "add user include, library and rpath"),
+        ]
+
+    boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options (self):
+        self.extensions = None
+        self.build_lib = None
+        self.plat_name = None
+        self.build_temp = None
+        self.inplace = 0
+        self.package = None
+
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.libraries = None
+        self.library_dirs = None
+        self.rpath = None
+        self.link_objects = None
+        self.debug = None
+        self.force = None
+        self.compiler = None
+        self.swig = None
+        self.swig_cpp = None
+        self.swig_opts = None
+        self.user = None
+
+    def finalize_options(self):
+        from distutils import sysconfig
+
+        self.set_undefined_options('build',
+                                   ('build_lib', 'build_lib'),
+                                   ('build_temp', 'build_temp'),
+                                   ('compiler', 'compiler'),
+                                   ('debug', 'debug'),
+                                   ('force', 'force'),
+                                   ('plat_name', 'plat_name'),
+                                   )
+
+        if self.package is None:
+            self.package = self.distribution.ext_package
+
+        self.extensions = self.distribution.ext_modules
+
+        # Make sure Python's include directories (for Python.h, pyconfig.h,
+        # etc.) are in the include search path.
+        py_include = sysconfig.get_python_inc()
+        plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if isinstance(self.include_dirs, str):
+            self.include_dirs = self.include_dirs.split(os.pathsep)
+
+        # Put the Python "system" include dir at the end, so that
+        # any local include dirs take precedence.
+        self.include_dirs.append(py_include)
+        if plat_py_include != py_include:
+            self.include_dirs.append(plat_py_include)
+
+        # Assume we are a build and not installed first.
+        slpy_include = os.path.join(sysconfig.PREFIX, "Stackless")
+        if os.path.exists(slpy_include):
+            self.include_dirs.append(slpy_include)
+        else:
+            # Fall back on the installed include directory.
+            slpy_include = os.path.join(py_include, "Stackless")
+            if os.path.exists(slpy_include):
+                self.include_dirs.append(slpy_include)
+
+        if isinstance(self.libraries, str):
+            self.libraries = [self.libraries]
+
+        # Life is easier if we're not forever checking for None, so
+        # simplify these options to empty lists if unset
+        if self.libraries is None:
+            self.libraries = []
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif type(self.library_dirs) is StringType:
+            self.library_dirs = string.split(self.library_dirs, os.pathsep)
+
+        if self.rpath is None:
+            self.rpath = []
+        elif type(self.rpath) is StringType:
+            self.rpath = string.split(self.rpath, os.pathsep)
+
+        # for extensions under windows use different directories
+        # for Release and Debug builds.
+        # also Python's library directory must be appended to library_dirs
+        if os.name == 'nt':
+            # the 'libs' directory is for binary installs - we assume that
+            # must be the *native* platform.  But we don't really support
+            # cross-compiling via a binary install anyway, so we let it go.
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+            if self.debug:
+                self.build_temp = os.path.join(self.build_temp, "Debug")
+            else:
+                self.build_temp = os.path.join(self.build_temp, "Release")
+
+            # Append the source distribution include and library directories,
+            # this allows distutils on windows to work in the source tree
+            self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
+            if MSVC_VERSION == 9:
+                # Use the .lib files for the correct architecture
+                if self.plat_name == 'win32':
+                    suffix = ''
+                else:
+                    # win-amd64 or win-ia64
+                    suffix = self.plat_name[4:]
+                new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
+                if suffix:
+                    new_lib = os.path.join(new_lib, suffix)
+                self.library_dirs.append(new_lib)
+
+            elif MSVC_VERSION == 8:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VS8.0'))
+            elif MSVC_VERSION == 7:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VS7.1'))
+            else:
+                self.library_dirs.append(os.path.join(sys.exec_prefix,
+                                         'PC', 'VC6'))
+
+        # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
+        # import libraries in its "Config" subdirectory
+        if os.name == 'os2':
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
+
+        # for extensions under Cygwin and AtheOS Python's library directory must be
+        # appended to library_dirs
+        if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
+            if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+                # building third party extensions
+                self.library_dirs.append(os.path.join(sys.prefix, "lib",
+                                                      "python" + get_python_version(),
+                                                      "config"))
+            else:
+                # building python standard extensions
+                self.library_dirs.append('.')
+
+        # for extensions under Linux or Solaris with a shared Python library,
+        # Python's library directory must be appended to library_dirs
+        sysconfig.get_config_var('Py_ENABLE_SHARED')
+        if ((sys.platform.startswith('linux') or sys.platform.startswith('gnu')
+             or sys.platform.startswith('sunos'))
+            and sysconfig.get_config_var('Py_ENABLE_SHARED')):
+            if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+                # building third party extensions
+                self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
+            else:
+                # building python standard extensions
+                self.library_dirs.append('.')
+
+        # The argument parsing will result in self.define being a string, but
+        # it has to be a list of 2-tuples.  All the preprocessor symbols
+        # specified by the 'define' option will be set to '1'.  Multiple
+        # symbols can be separated with commas.
+
+        if self.define:
+            defines = self.define.split(',')
+            self.define = map(lambda symbol: (symbol, '1'), defines)
+
+        # The option for macros to undefine is also a string from the
+        # option parsing, but has to be a list.  Multiple symbols can also
+        # be separated with commas here.
+        if self.undef:
+            self.undef = self.undef.split(',')
+
+        if self.swig_opts is None:
+            self.swig_opts = []
+        else:
+            self.swig_opts = self.swig_opts.split(' ')
+
+        # Finally add the user include and library directories if requested
+        if self.user:
+            user_include = os.path.join(USER_BASE, "include")
+            user_lib = os.path.join(USER_BASE, "lib")
+            if os.path.isdir(user_include):
+                self.include_dirs.append(user_include)
+            if os.path.isdir(user_lib):
+                self.library_dirs.append(user_lib)
+                self.rpath.append(user_lib)
+
+    def run(self):
+        from distutils.ccompiler import new_compiler
+
+        # 'self.extensions', as supplied by setup.py, is a list of
+        # Extension instances.  See the documentation for Extension (in
+        # distutils.extension) for details.
+        #
+        # For backwards compatibility with Distutils 0.8.2 and earlier, we
+        # also allow the 'extensions' list to be a list of tuples:
+        #    (ext_name, build_info)
+        # where build_info is a dictionary containing everything that
+        # Extension instances do except the name, with a few things being
+        # differently named.  We convert these 2-tuples to Extension
+        # instances as needed.
+
+        if not self.extensions:
+            return
+
+        # If we were asked to build any C/C++ libraries, make sure that the
+        # directory where we put them is in the library search path for
+        # linking extensions.
+        if self.distribution.has_c_libraries():
+            build_clib = self.get_finalized_command('build_clib')
+            self.libraries.extend(build_clib.get_library_names() or [])
+            self.library_dirs.append(build_clib.build_clib)
+
+        # Setup the CCompiler object that we'll use to do all the
+        # compiling and linking
+        self.compiler = new_compiler(compiler=self.compiler,
+                                     verbose=self.verbose,
+                                     dry_run=self.dry_run,
+                                     force=self.force)
+        customize_compiler(self.compiler)
+        # If we are cross-compiling, init the compiler now (if we are not
+        # cross-compiling, init would not hurt, but people may rely on
+        # late initialization of compiler even if they shouldn't...)
+        if os.name == 'nt' and self.plat_name != get_platform():
+            self.compiler.initialize(self.plat_name)
+
+        # And make sure that any compile/link-related options (which might
+        # come from the command-line or from the setup script) are set in
+        # that CCompiler object -- that way, they automatically apply to
+        # all compiling and linking done here.
+        if self.include_dirs is not None:
+            self.compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for (name, value) in self.define:
+                self.compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler.undefine_macro(macro)
+        if self.libraries is not None:
+            self.compiler.set_libraries(self.libraries)
+        if self.library_dirs is not None:
+            self.compiler.set_library_dirs(self.library_dirs)
+        if self.rpath is not None:
+            self.compiler.set_runtime_library_dirs(self.rpath)
+        if self.link_objects is not None:
+            self.compiler.set_link_objects(self.link_objects)
+
+        # Now actually compile and link everything.
+        self.build_extensions()
+
+    def check_extensions_list(self, extensions):
+        """Ensure that the list of extensions (presumably provided as a
+        command option 'extensions') is valid, i.e. it is a list of
+        Extension objects.  We also support the old-style list of 2-tuples,
+        where the tuples are (ext_name, build_info), which are converted to
+        Extension instances here.
+
+        Raise DistutilsSetupError if the structure is invalid anywhere;
+        just returns otherwise.
+        """
+        if not isinstance(extensions, list):
+            raise DistutilsSetupError, \
+                  "'ext_modules' option must be a list of Extension instances"
+
+        for i, ext in enumerate(extensions):
+            if isinstance(ext, Extension):
+                continue                # OK! (assume type-checking done
+                                        # by Extension constructor)
+
+            if not isinstance(ext, tuple) or len(ext) != 2:
+                raise DistutilsSetupError, \
+                      ("each element of 'ext_modules' option must be an "
+                       "Extension instance or 2-tuple")
+
+            ext_name, build_info = ext
+
+            log.warn(("old-style (ext_name, build_info) tuple found in "
+                      "ext_modules for extension '%s'"
+                      "-- please convert to Extension instance" % ext_name))
+
+            if not (isinstance(ext_name, str) and
+                    extension_name_re.match(ext_name)):
+                raise DistutilsSetupError, \
+                      ("first element of each tuple in 'ext_modules' "
+                       "must be the extension name (a string)")
+
+            if not isinstance(build_info, dict):
+                raise DistutilsSetupError, \
+                      ("second element of each tuple in 'ext_modules' "
+                       "must be a dictionary (build info)")
+
+            # OK, the (ext_name, build_info) dict is type-safe: convert it
+            # to an Extension instance.
+            ext = Extension(ext_name, build_info['sources'])
+
+            # Easy stuff: one-to-one mapping from dict elements to
+            # instance attributes.
+            for key in ('include_dirs', 'library_dirs', 'libraries',
+                        'extra_objects', 'extra_compile_args',
+                        'extra_link_args'):
+                val = build_info.get(key)
+                if val is not None:
+                    setattr(ext, key, val)
+
+            # Medium-easy stuff: same syntax/semantics, different names.
+            ext.runtime_library_dirs = build_info.get('rpath')
+            if 'def_file' in build_info:
+                log.warn("'def_file' element of build info dict "
+                         "no longer supported")
+
+            # Non-trivial stuff: 'macros' split into 'define_macros'
+            # and 'undef_macros'.
+            macros = build_info.get('macros')
+            if macros:
+                ext.define_macros = []
+                ext.undef_macros = []
+                for macro in macros:
+                    if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
+                        raise DistutilsSetupError, \
+                              ("'macros' element of build info dict "
+                               "must be 1- or 2-tuple")
+                    if len(macro) == 1:
+                        ext.undef_macros.append(macro[0])
+                    elif len(macro) == 2:
+                        ext.define_macros.append(macro)
+
+            extensions[i] = ext
+
+    def get_source_files(self):
+        self.check_extensions_list(self.extensions)
+        filenames = []
+
+        # Wouldn't it be neat if we knew the names of header files too...
+        for ext in self.extensions:
+            filenames.extend(ext.sources)
+
+        return filenames
+
+    def get_outputs(self):
+        # Sanity check the 'extensions' list -- can't assume this is being
+        # done in the same run as a 'build_extensions()' call (in fact, we
+        # can probably assume that it *isn't*!).
+        self.check_extensions_list(self.extensions)
+
+        # And build the list of output (built) filenames.  Note that this
+        # ignores the 'inplace' flag, and assumes everything goes in the
+        # "build" tree.
+        outputs = []
+        for ext in self.extensions:
+            outputs.append(self.get_ext_fullpath(ext.name))
+        return outputs
+
+    def build_extensions(self):
+        # First, sanity-check the 'extensions' list
+        self.check_extensions_list(self.extensions)
+
+        for ext in self.extensions:
+            self.build_extension(ext)
+
+    def build_extension(self, ext):
+        sources = ext.sources
+        if sources is None or type(sources) not in (ListType, TupleType):
+            raise DistutilsSetupError, \
+                  ("in 'ext_modules' option (extension '%s'), " +
+                   "'sources' must be present and must be " +
+                   "a list of source filenames") % ext.name
+        sources = list(sources)
+
+        ext_path = self.get_ext_fullpath(ext.name)
+        depends = sources + ext.depends
+        if not (self.force or newer_group(depends, ext_path, 'newer')):
+            log.debug("skipping '%s' extension (up-to-date)", ext.name)
+            return
+        else:
+            log.info("building '%s' extension", ext.name)
+
+        # First, scan the sources for SWIG definition files (.i), run
+        # SWIG on 'em to create .c files, and modify the sources list
+        # accordingly.
+        sources = self.swig_sources(sources, ext)
+
+        # Next, compile the source code to object files.
+
+        # XXX not honouring 'define_macros' or 'undef_macros' -- the
+        # CCompiler API needs to change to accommodate this, and I
+        # want to do one thing at a time!
+
+        # Two possible sources for extra compiler arguments:
+        #   - 'extra_compile_args' in Extension object
+        #   - CFLAGS environment variable (not particularly
+        #     elegant, but people seem to expect it and I
+        #     guess it's useful)
+        # The environment variable should take precedence, and
+        # any sensible compiler will give precedence to later
+        # command line args.  Hence we combine them in order:
+        extra_args = ext.extra_compile_args or []
+
+        macros = ext.define_macros[:]
+        for undef in ext.undef_macros:
+            macros.append((undef,))
+
+        objects = self.compiler.compile(sources,
+                                         output_dir=self.build_temp,
+                                         macros=macros,
+                                         include_dirs=ext.include_dirs,
+                                         debug=self.debug,
+                                         extra_postargs=extra_args,
+                                         depends=ext.depends)
+
+        # XXX -- this is a Vile HACK!
+        #
+        # The setup.py script for Python on Unix needs to be able to
+        # get this list so it can perform all the clean up needed to
+        # avoid keeping object files around when cleaning out a failed
+        # build of an extension module.  Since Distutils does not
+        # track dependencies, we have to get rid of intermediates to
+        # ensure all the intermediates will be properly re-built.
+        #
+        self._built_objects = objects[:]
+
+        # Now link the object files together into a "shared object" --
+        # of course, first we have to figure out all the other things
+        # that go into the mix.
+        if ext.extra_objects:
+            objects.extend(ext.extra_objects)
+        extra_args = ext.extra_link_args or []
+
+        # Detect target language, if not provided
+        language = ext.language or self.compiler.detect_language(sources)
+
+        self.compiler.link_shared_object(
+            objects, ext_path,
+            libraries=self.get_libraries(ext),
+            library_dirs=ext.library_dirs,
+            runtime_library_dirs=ext.runtime_library_dirs,
+            extra_postargs=extra_args,
+            export_symbols=self.get_export_symbols(ext),
+            debug=self.debug,
+            build_temp=self.build_temp,
+            target_lang=language)
+
+
+    def swig_sources (self, sources, extension):
+
+        """Walk the list of source files in 'sources', looking for SWIG
+        interface (.i) files.  Run SWIG on all that are found, and
+        return a modified 'sources' list with SWIG source files replaced
+        by the generated C (or C++) files.
+        """
+
+        new_sources = []
+        swig_sources = []
+        swig_targets = {}
+
+        # XXX this drops generated C/C++ files into the source tree, which
+        # is fine for developers who want to distribute the generated
+        # source -- but there should be an option to put SWIG output in
+        # the temp dir.
+
+        if self.swig_cpp:
+            log.warn("--swig-cpp is deprecated - use --swig-opts=-c++")
+
+        if self.swig_cpp or ('-c++' in self.swig_opts) or \
+           ('-c++' in extension.swig_opts):
+            target_ext = '.cpp'
+        else:
+            target_ext = '.c'
+
+        for source in sources:
+            (base, ext) = os.path.splitext(source)
+            if ext == ".i":             # SWIG interface file
+                new_sources.append(base + '_wrap' + target_ext)
+                swig_sources.append(source)
+                swig_targets[source] = new_sources[-1]
+            else:
+                new_sources.append(source)
+
+        if not swig_sources:
+            return new_sources
+
+        swig = self.swig or self.find_swig()
+        swig_cmd = [swig, "-python"]
+        swig_cmd.extend(self.swig_opts)
+        if self.swig_cpp:
+            swig_cmd.append("-c++")
+
+        # Do not override commandline arguments
+        if not self.swig_opts:
+            for o in extension.swig_opts:
+                swig_cmd.append(o)
+
+        for source in swig_sources:
+            target = swig_targets[source]
+            log.info("swigging %s to %s", source, target)
+            self.spawn(swig_cmd + ["-o", target, source])
+
+        return new_sources
+
+    # swig_sources ()
+
+    def find_swig (self):
+        """Return the name of the SWIG executable.  On Unix, this is
+        just "swig" -- it should be in the PATH.  Tries a bit harder on
+        Windows.
+        """
+
+        if os.name == "posix":
+            return "swig"
+        elif os.name == "nt":
+
+            # Look for SWIG in its standard installation directory on
+            # Windows (or so I presume!).  If we find it there, great;
+            # if not, act like Unix and assume it's in the PATH.
+            for vers in ("1.3", "1.2", "1.1"):
+                fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+                if os.path.isfile(fn):
+                    return fn
+            else:
+                return "swig.exe"
+
+        elif os.name == "os2":
+            # assume swig available in the PATH.
+            return "swig.exe"
+
+        else:
+            raise DistutilsPlatformError, \
+                  ("I don't know how to find (much less run) SWIG "
+                   "on platform '%s'") % os.name
+
+    # find_swig ()
+
+    # -- Name generators -----------------------------------------------
+    # (extension names, filenames, whatever)
+    def get_ext_fullpath(self, ext_name):
+        """Returns the path of the filename for a given extension.
+
+        The file is located in `build_lib` or directly in the package
+        (inplace option).
+        """
+        # makes sure the extension name is only using dots
+        all_dots = string.maketrans('/'+os.sep, '..')
+        ext_name = ext_name.translate(all_dots)
+
+        fullname = self.get_ext_fullname(ext_name)
+        modpath = fullname.split('.')
+        filename = self.get_ext_filename(ext_name)
+        filename = os.path.split(filename)[-1]
+
+        if not self.inplace:
+            # no further work needed
+            # returning :
+            #   build_dir/package/path/filename
+            filename = os.path.join(*modpath[:-1]+[filename])
+            return os.path.join(self.build_lib, filename)
+
+        # the inplace option requires to find the package directory
+        # using the build_py command for that
+        package = '.'.join(modpath[0:-1])
+        build_py = self.get_finalized_command('build_py')
+        package_dir = os.path.abspath(build_py.get_package_dir(package))
+
+        # returning
+        #   package_dir/filename
+        return os.path.join(package_dir, filename)
+
+    def get_ext_fullname(self, ext_name):
+        """Returns the fullname of a given extension name.
+
+        Adds the `package.` prefix"""
+        if self.package is None:
+            return ext_name
+        else:
+            return self.package + '.' + ext_name
+
+    def get_ext_filename(self, ext_name):
+        r"""Convert the name of an extension (eg. "foo.bar") into the name
+        of the file from which it will be loaded (eg. "foo/bar.so", or
+        "foo\bar.pyd").
+        """
+        from distutils.sysconfig import get_config_var
+        ext_path = string.split(ext_name, '.')
+        # OS/2 has an 8 character module (extension) limit :-(
+        if os.name == "os2":
+            ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
+        # extensions in debug_mode are named 'module_d.pyd' under windows
+        so_ext = get_config_var('SO')
+        if os.name == 'nt' and self.debug:
+            return os.path.join(*ext_path) + '_d' + so_ext
+        return os.path.join(*ext_path) + so_ext
+
+    def get_export_symbols (self, ext):
+        """Return the list of symbols that a shared extension has to
+        export.  This either uses 'ext.export_symbols' or, if it's not
+        provided, "init" + module_name.  Only relevant on Windows, where
+        the .pyd file (DLL) must export the module "init" function.
+        """
+        initfunc_name = "init" + ext.name.split('.')[-1]
+        if initfunc_name not in ext.export_symbols:
+            ext.export_symbols.append(initfunc_name)
+        return ext.export_symbols
+
+    def get_libraries (self, ext):
+        """Return the list of libraries to link against when building a
+        shared extension.  On most platforms, this is just 'ext.libraries';
+        on Windows and OS/2, we add the Python library (eg. python20.dll).
+        """
+        # The python library is always needed on Windows.  For MSVC, this
+        # is redundant, since the library is mentioned in a pragma in
+        # pyconfig.h that MSVC groks.  The other Windows compilers all seem
+        # to need it mentioned explicitly, though, so that's what we do.
+        # Append '_d' to the python import library on debug builds.
+        if sys.platform == "win32":
+            from distutils.msvccompiler import MSVCCompiler
+            if not isinstance(self.compiler, MSVCCompiler):
+                template = "python%d%d"
+                if self.debug:
+                    template = template + '_d'
+                pythonlib = (template %
+                       (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+                # don't extend ext.libraries, it may be shared with other
+                # extensions, it is a reference to the original list
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
+        elif sys.platform == "os2emx":
+            # EMX/GCC requires the python library explicitly, and I
+            # believe VACPP does as well (though not confirmed) - AIM Apr01
+            template = "python%d%d"
+            # debug versions of the main DLL aren't supported, at least
+            # not at this time - AIM Apr01
+            #if self.debug:
+            #    template = template + '_d'
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib]
+        elif sys.platform[:6] == "cygwin":
+            template = "python%d.%d"
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib]
+        elif sys.platform[:6] == "atheos":
+            from distutils import sysconfig
+
+            template = "python%d.%d"
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # Get SHLIBS from Makefile
+            extra = []
+            for lib in sysconfig.get_config_var('SHLIBS').split():
+                if lib.startswith('-l'):
+                    extra.append(lib[2:])
+                else:
+                    extra.append(lib)
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib, "m"] + extra
+
+        elif sys.platform == 'darwin':
+            # Don't use the default code below
+            return ext.libraries
+        elif sys.platform[:3] == 'aix':
+            # Don't use the default code below
+            return ext.libraries
+        else:
+            from distutils import sysconfig
+            if sysconfig.get_config_var('Py_ENABLE_SHARED'):
+                template = "python%d.%d"
+                pythonlib = (template %
+                             (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
+
+# class build_ext

C2_win32_2.7/slpInstaller/Lib/pickle.py

+"""Create portable serialized representations of Python objects.
+
+See module cPickle for a (much) faster implementation.
+See module copy_reg for a mechanism for registering custom picklers.
+See module pickletools source for extensive comments.
+
+Classes:
+
+    Pickler
+    Unpickler
+
+Functions:
+
+    dump(object, file)
+    dumps(object) -> string
+    load(file) -> object
+    loads(string) -> object
+
+Misc variables:
+
+    __version__
+    format_version
+    compatible_formats
+
+"""
+
+__version__ = "$Revision$"       # Code version
+
+from types import *
+from copy_reg import dispatch_table
+from copy_reg import _extension_registry, _inverted_registry, _extension_cache
+import marshal
+import sys
+import struct
+import re
+
+__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
+           "Unpickler", "dump", "dumps", "load", "loads"]
+
+# These are purely informational; no code uses these.
+format_version = "2.0"                  # File format version we write
+compatible_formats = ["1.0",            # Original protocol 0
+                      "1.1",            # Protocol 0 with INST added
+                      "1.2",            # Original protocol 1
+                      "1.3",            # Protocol 1 with BINFLOAT added
+                      "2.0",            # Protocol 2
+                      ]                 # Old format versions we can read
+
+# Keep in synch with cPickle.  This is the highest protocol number we
+# know how to read.
+HIGHEST_PROTOCOL = 2
+
+# Why use struct.pack() for pickling but marshal.loads() for
+# unpickling?  struct.pack() is 40% faster than marshal.dumps(), but
+# marshal.loads() is twice as fast as struct.unpack()!
+mloads = marshal.loads
+
+class PickleError(Exception):
+    """A common base class for the other pickling exceptions."""
+    pass
+
+class PicklingError(PickleError):
+    """This exception is raised when an unpicklable object is passed to the
+    dump() method.
+
+    """
+    pass
+
+class UnpicklingError(PickleError):
+    """This exception is raised when there is a problem unpickling an object,
+    such as a security violation.
+
+    Note that other exceptions may also be raised during unpickling, including
+    (but not necessarily limited to) AttributeError, EOFError, ImportError,
+    and IndexError.
+
+    """
+    pass
+
+# An instance of _Stop is raised by Unpickler.load_stop() in response to
+# the STOP opcode, passing the object that is the result of unpickling.
+class _Stop(Exception):
+    def __init__(self, value):
+        self.value = value
+
+# Jython has PyStringMap; it's a dict subclass with string keys
+try:
+    from org.python.core import PyStringMap
+except ImportError:
+    PyStringMap = None
+
+# UnicodeType may or may not be exported (normally imported from types)
+try:
+    UnicodeType
+except NameError:
+    UnicodeType = None
+
+# Pickle opcodes.  See pickletools.py for extensive docs.  The listing
+# here is in kind-of alphabetical order of 1-character pickle code.
+# pickletools groups them by purpose.
+
+MARK            = '('   # push special markobject on stack
+STOP            = '.'   # every pickle ends with STOP
+POP             = '0'   # discard topmost stack item
+POP_MARK        = '1'   # discard stack top through topmost markobject
+DUP             = '2'   # duplicate top stack item
+FLOAT           = 'F'   # push float object; decimal string argument
+INT             = 'I'   # push integer or bool; decimal string argument
+BININT          = 'J'   # push four-byte signed int
+BININT1         = 'K'   # push 1-byte unsigned int
+LONG            = 'L'   # push long; decimal string argument
+BININT2         = 'M'   # push 2-byte unsigned int
+NONE            = 'N'   # push None
+PERSID          = 'P'   # push persistent object; id is taken from string arg
+BINPERSID       = 'Q'   #  "       "         "  ;  "  "   "     "  stack
+REDUCE          = 'R'   # apply callable to argtuple, both on stack
+STRING          = 'S'   # push string; NL-terminated string argument
+BINSTRING       = 'T'   # push string; counted binary string argument
+SHORT_BINSTRING = 'U'   #  "     "   ;    "      "       "      " < 256 bytes
+UNICODE         = 'V'   # push Unicode string; raw-unicode-escaped'd argument
+BINUNICODE      = 'X'   #   "     "       "  ; counted UTF-8 string argument
+APPEND          = 'a'   # append stack top to list below it
+BUILD           = 'b'   # call __setstate__ or __dict__.update()
+GLOBAL          = 'c'   # push self.find_class(modname, name); 2 string args
+DICT            = 'd'   # build a dict from stack items
+EMPTY_DICT      = '}'   # push empty dict
+APPENDS         = 'e'   # extend list on stack by topmost stack slice
+GET             = 'g'   # push item from memo on stack; index is string arg
+BINGET          = 'h'   #   "    "    "    "   "   "  ;   "    " 1-byte arg
+INST            = 'i'   # build & push class instance
+LONG_BINGET     = 'j'   # push item from memo on stack; index is 4-byte arg
+LIST            = 'l'   # build list from topmost stack items
+EMPTY_LIST      = ']'   # push empty list
+OBJ             = 'o'   # build & push class instance
+PUT             = 'p'   # store stack top in memo; index is string arg
+BINPUT          = 'q'   #   "     "    "   "   " ;   "    " 1-byte arg
+LONG_BINPUT     = 'r'   #   "     "    "   "   " ;   "    " 4-byte arg
+SETITEM         = 's'   # add key+value pair to dict
+TUPLE           = 't'   # build tuple from topmost stack items
+EMPTY_TUPLE     = ')'   # push empty tuple
+SETITEMS        = 'u'   # modify dict by adding topmost key+value pairs
+BINFLOAT        = 'G'   # push float; arg is 8-byte float encoding
+
+TRUE            = 'I01\n'  # not an opcode; see INT docs in pickletools.py
+FALSE           = 'I00\n'  # not an opcode; see INT docs in pickletools.py
+
+# Protocol 2
+
+PROTO           = '\x80'  # identify pickle protocol
+NEWOBJ          = '\x81'  # build object by applying cls.__new__ to argtuple
+EXT1            = '\x82'  # push object from extension registry; 1-byte index
+EXT2            = '\x83'  # ditto, but 2-byte index
+EXT4            = '\x84'  # ditto, but 4-byte index
+TUPLE1          = '\x85'  # build 1-tuple from stack top
+TUPLE2          = '\x86'  # build 2-tuple from two topmost stack items
+TUPLE3          = '\x87'  # build 3-tuple from three topmost stack items
+NEWTRUE         = '\x88'  # push True
+NEWFALSE        = '\x89'  # push False
+LONG1           = '\x8a'  # push long from < 256 bytes
+LONG4           = '\x8b'  # push really big long
+
+_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3]
+
+
+__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$",x)])
+del x
+
+
+# Pickling machinery
+
+class Pickler:
+
+    def __init__(self, file, protocol=None):
+        """This takes a file-like object for writing a pickle data stream.
+
+        The optional protocol argument tells the pickler to use the
+        given protocol; supported protocols are 0, 1, 2.  The default
+        protocol is 0, to be backwards compatible.  (Protocol 0 is the
+        only protocol that can be written to a file opened in text
+        mode and read back successfully.  When using a protocol higher
+        than 0, make sure the file is opened in binary mode, both when
+        pickling and unpickling.)
+
+        Protocol 1 is more efficient than protocol 0; protocol 2 is
+        more efficient than protocol 1.
+
+        Specifying a negative protocol version selects the highest
+        protocol version supported.  The higher the protocol used, the
+        more recent the version of Python needed to read the pickle
+        produced.
+
+        The file parameter must have a write() method that accepts a single
+        string argument.  It can thus be an open file object, a StringIO
+        object, or any other custom object that meets this interface.
+
+        """
+        if protocol is None:
+            protocol = 0
+        if protocol < 0:
+            protocol = HIGHEST_PROTOCOL
+        elif not 0 <= protocol <= HIGHEST_PROTOCOL:
+            raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL)
+        self.write = file.write
+        self.memo = {}
+        self.proto = int(protocol)
+        self.bin = protocol >= 1
+        self.fast = 0
+        ## Stackless addition BEGIN
+        try:
+            from stackless import _pickle_moduledict
+        except ImportError:
+            _pickle_moduledict = lambda self, obj:None
+        self._pickle_moduledict = _pickle_moduledict
+        ## Stackless addition END
+
+    def clear_memo(self):
+        """Clears the pickler's "memo".
+
+        The memo is the data structure that remembers which objects the
+        pickler has already seen, so that shared or recursive objects are
+        pickled by reference and not by value.  This method is useful when
+        re-using picklers.
+
+        """
+        self.memo.clear()
+
+    def dump(self, obj):
+        """Write a pickled representation of obj to the open file."""
+        if self.proto >= 2:
+            self.write(PROTO + chr(self.proto))
+        self.save(obj)
+        self.write(STOP)
+
+    def memoize(self, obj):
+        """Store an object in the memo."""
+
+        # The Pickler memo is a dictionary mapping object ids to 2-tuples
+        # that contain the Unpickler memo key and the object being memoized.
+        # The memo key is written to the pickle and will become
+        # the key in the Unpickler's memo.  The object is stored in the
+        # Pickler memo so that transient objects are kept alive during
+        # pickling.
+
+        # The use of the Unpickler memo length as the memo key is just a
+        # convention.  The only requirement is that the memo values be unique.
+        # But there appears no advantage to any other scheme, and this
+        # scheme allows the Unpickler memo to be implemented as a plain (but
+        # growable) array, indexed by memo key.
+        if self.fast:
+            return
+        assert id(obj) not in self.memo
+        memo_len = len(self.memo)
+        self.write(self.put(memo_len))
+        self.memo[id(obj)] = memo_len, obj
+
+    # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
+    def put(self, i, pack=struct.pack):
+        if self.bin:
+            if i < 256:
+                return BINPUT + chr(i)
+            else:
+                return LONG_BINPUT + pack("<i", i)
+
+        return PUT + repr(i) + '\n'
+
+    # Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
+    def get(self, i, pack=struct.pack):
+        if self.bin:
+            if i < 256:
+                return BINGET + chr(i)
+            else:
+                return LONG_BINGET + pack("<i", i)
+
+        return GET + repr(i) + '\n'
+
+    def save(self, obj):
+        # Check for persistent id (defined by a subclass)
+        pid = self.persistent_id(obj)
+        if pid:
+            self.save_pers(pid)
+            return
+
+        # Check the memo
+        x = self.memo.get(id(obj))
+        if x:
+            self.write(self.get(x[0]))
+            return
+
+        # Check the type dispatch table
+        t = type(obj)
+        f = self.dispatch.get(t)
+        if f:
+            f(self, obj) # Call unbound method with explicit self
+            return
+
+        # Check for a class with a custom metaclass; treat as regular class
+        try:
+            issc = issubclass(t, TypeType)
+        except TypeError: # t is not a class (old Boost; see SF #502085)
+            issc = 0
+        if issc:
+            self.save_global(obj)
+            return
+
+        # Check copy_reg.dispatch_table
+        reduce = dispatch_table.get(t)
+        if reduce:
+            rv = reduce(obj)
+        else:
+            # Check for a __reduce_ex__ method, fall back to __reduce__
+            reduce = getattr(obj, "__reduce_ex__", None)
+            if reduce:
+                rv = reduce(self.proto)
+            else:
+                reduce = getattr(obj, "__reduce__", None)
+                if reduce:
+                    rv = reduce()
+                else:
+                    raise PicklingError("Can't pickle %r object: %r" %
+                                        (t.__name__, obj))
+
+        # Check for string returned by reduce(), meaning "save as global"
+        if type(rv) is StringType:
+            self.save_global(obj, rv)
+            return
+
+        # Assert that reduce() returned a tuple
+        if type(rv) is not TupleType:
+            raise PicklingError("%s must return string or tuple" % reduce)
+
+        # Assert that it returned an appropriately sized tuple
+        l = len(rv)
+        if not (2 <= l <= 5):
+            raise PicklingError("Tuple returned by %s must have "
+                                "two to five elements" % reduce)
+
+        # Save the reduce() output and finally memoize the object
+        self.save_reduce(obj=obj, *rv)
+
+    def persistent_id(self, obj):
+        # This exists so a subclass can override it
+        return None
+
+    def save_pers(self, pid):
+        # Save a persistent id reference
+        if self.bin:
+            self.save(pid)
+            self.write(BINPERSID)
+        else:
+            self.write(PERSID + str(pid) + '\n')
+
+    def save_reduce(self, func, args, state=None,
+                    listitems=None, dictitems=None, obj=None):
+        # This API is called by some subclasses
+
+        # Assert that args is a tuple or None
+        if not isinstance(args, TupleType):
+            raise PicklingError("args from reduce() should be a tuple")
+
+        # Assert that func is callable
+        if not hasattr(func, '__call__'):
+            raise PicklingError("func from reduce should be callable")
+
+        save = self.save
+        write = self.write
+
+        # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
+        if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
+            # A __reduce__ implementation can direct protocol 2 to
+            # use the more efficient NEWOBJ opcode, while still
+            # allowing protocol 0 and 1 to work normally.  For this to
+            # work, the function returned by __reduce__ should be
+            # called __newobj__, and its first argument should be a
+            # new-style class.  The implementation for __newobj__
+            # should be as follows, although pickle has no way to
+            # verify this:
+            #
+            # def __newobj__(cls, *args):
+            #     return cls.__new__(cls, *args)
+            #
+            # Protocols 0 and 1 will pickle a reference to __newobj__,
+            # while protocol 2 (and above) will pickle a reference to
+            # cls, the remaining args tuple, and the NEWOBJ code,
+            # which calls cls.__new__(cls, *args) at unpickling time
+            # (see load_newobj below).  If __reduce__ returns a
+            # three-tuple, the state from the third tuple item will be
+            # pickled regardless of the protocol, calling __setstate__
+            # at unpickling time (see load_build below).
+            #
+            # Note that no standard __newobj__ implementation exists;
+            # you have to provide your own.  This is to enforce
+            # compatibility with Python 2.2 (pickles written using
+            # protocol 0 or 1 in Python 2.3 should be unpicklable by
+            # Python 2.2).
+            cls = args[0]
+            if not hasattr(cls, "__new__"):
+                raise PicklingError(
+                    "args[0] from __newobj__ args has no __new__")
+            if obj is not None and cls is not obj.__class__:
+                raise PicklingError(
+                    "args[0] from __newobj__ args has the wrong class")
+            args = args[1:]
+            save(cls)
+            save(args)
+            write(NEWOBJ)
+        else:
+            save(func)
+            save(args)
+            write(REDUCE)
+
+        if obj is not None:
+            self.memoize(obj)
+
+        # More new special cases (that work with older protocols as
+        # well): when __reduce__ returns a tuple with 4 or 5 items,
+        # the 4th and 5th item should be iterators that provide list
+        # items and dict items (as (key, value) tuples), or None.
+
+        if listitems is not None:
+            self._batch_appends(listitems)
+
+        if dictitems is not None:
+            self._batch_setitems(dictitems)
+
+        if state is not None:
+            save(state)
+            write(BUILD)
+
+    # Methods below this point are dispatched through the dispatch table
+
+    dispatch = {}
+
+    def save_none(self, obj):
+        self.write(NONE)
+    dispatch[NoneType] = save_none
+
+    def save_bool(self, obj):
+        if self.proto >= 2:
+            self.write(obj and NEWTRUE or NEWFALSE)
+        else:
+            self.write(obj and TRUE or FALSE)
+    dispatch[bool] = save_bool
+
+    def save_int(self, obj, pack=struct.pack):
+        if self.bin:
+            # If the int is small enough to fit in a signed 4-byte 2's-comp
+            # format, we can store it more efficiently than the general
+            # case.
+            # First one- and two-byte unsigned ints:
+            if obj >= 0:
+                if obj <= 0xff:
+                    self.write(BININT1 + chr(obj))
+                    return
+                if obj <= 0xffff:
+                    self.write("%c%c%c" % (BININT2, obj&0xff, obj>>8))
+                    return
+            # Next check for 4-byte signed ints:
+            high_bits = obj >> 31  # note that Python shift sign-extends
+            if high_bits == 0 or high_bits == -1:
+                # All high bits are copies of bit 2**31, so the value
+                # fits in a 4-byte signed int.
+                self.write(BININT + pack("<i", obj))
+                return
+        # Text pickle, or int too big to fit in signed 4-byte format.
+        self.write(INT + repr(obj) + '\n')
+    dispatch[IntType] = save_int
+
+    def save_long(self, obj, pack=struct.pack):
+        if self.proto >= 2:
+            bytes = encode_long(obj)
+            n = len(bytes)
+            if n < 256:
+                self.write(LONG1 + chr(n) + bytes)
+            else:
+                self.write(LONG4 + pack("<i", n) + bytes)
+            return
+        self.write(LONG + repr(obj) + '\n')
+    dispatch[LongType] = save_long
+
+    def save_float(self, obj, pack=struct.pack):
+        if self.bin:
+            self.write(BINFLOAT + pack('>d', obj))
+        else:
+            self.write(FLOAT + repr(obj) + '\n')
+    dispatch[FloatType] = save_float
+
+    def save_string(self, obj, pack=struct.pack):
+        if self.bin:
+            n = len(obj)
+            if n < 256:
+                self.write(SHORT_BINSTRING + chr(n) + obj)
+            else:
+                self.write(BINSTRING + pack("<i", n) + obj)
+        else:
+            self.write(STRING + repr(obj) + '\n')
+        self.memoize(obj)
+    dispatch[StringType] = save_string
+
+    def save_unicode(self, obj, pack=struct.pack):
+        if self.bin:
+            encoding = obj.encode('utf-8')
+            n = len(encoding)
+            self.write(BINUNICODE + pack("<i", n) + encoding)
+        else:
+            obj = obj.replace("\\", "\\u005c")
+            obj = obj.replace("\n", "\\u000a")
+            self.write(UNICODE + obj.encode('raw-unicode-escape') + '\n')
+        self.memoize(obj)
+    dispatch[UnicodeType] = save_unicode
+
+    if StringType is UnicodeType:
+        # This is true for Jython
+        def save_string(self, obj, pack=struct.pack):
+            unicode = obj.isunicode()
+
+            if self.bin:
+                if unicode:
+                    obj = obj.encode("utf-8")
+                l = len(obj)
+                if l < 256 and not unicode:
+                    self.write(SHORT_BINSTRING + chr(l) + obj)
+                else:
+                    s = pack("<i", l)
+                    if unicode:
+                        self.write(BINUNICODE + s + obj)
+                    else:
+                        self.write(BINSTRING + s + obj)
+            else:
+                if unicode:
+                    obj = obj.replace("\\", "\\u005c")
+                    obj = obj.replace("\n", "\\u000a")
+                    obj = obj.encode('raw-unicode-escape')
+                    self.write(UNICODE + obj + '\n')
+                else:
+                    self.write(STRING + repr(obj) + '\n')
+            self.memoize(obj)
+        dispatch[StringType] = save_string
+
+    def save_tuple(self, obj):
+        write = self.write
+        proto = self.proto
+
+        n = len(obj)
+        if n == 0:
+            if proto:
+                write(EMPTY_TUPLE)
+            else:
+                write(MARK + TUPLE)
+            return
+
+        save = self.save
+        memo = self.memo
+        if n <= 3 and proto >= 2:
+            for element in obj:
+                save(element)
+            # Subtle.  Same as in the big comment below.
+            if id(obj) in memo:
+                get = self.get(memo[id(obj)][0])
+                write(POP * n + get)
+            else:
+                write(_tuplesize2code[n])
+                self.memoize(obj)
+            return
+
+        # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
+        # has more than 3 elements.
+        write(MARK)
+        for element in obj:
+            save(element)
+
+        if id(obj) in memo:
+            # Subtle.  d was not in memo when we entered save_tuple(), so
+            # the process of saving the tuple's elements must have saved
+            # the tuple itself:  the tuple is recursive.  The proper action
+            # now is to throw away everything we put on the stack, and
+            # simply GET the tuple (it's already constructed).  This check
+            # could have been done in the "for element" loop instead, but
+            # recursive tuples are a rare thing.
+            get = self.get(memo[id(obj)][0])
+            if proto:
+                write(POP_MARK + get)
+            else:   # proto 0 -- POP_MARK not available
+                write(POP * (n+1) + get)
+            return
+
+        # No recursion.
+        self.write(TUPLE)
+        self.memoize(obj)
+
+    dispatch[TupleType] = save_tuple
+
+    # save_empty_tuple() isn't used by anything in Python 2.3.  However, I
+    # found a Pickler subclass in Zope3 that calls it, so it's not harmless
+    # to remove it.
+    def save_empty_tuple(self, obj):
+        self.write(EMPTY_TUPLE)
+
+    def save_list(self, obj):
+        write = self.write
+
+        if self.bin:
+            write(EMPTY_LIST)
+        else:   # proto 0 -- can't use EMPTY_LIST
+            write(MARK + LIST)
+
+        self.memoize(obj)
+        self._batch_appends(iter(obj))
+
+    dispatch[ListType] = save_list
+
+    # Keep in synch with cPickle's BATCHSIZE.  Nothing will break if it gets
+    # out of synch, though.
+    _BATCHSIZE = 1000
+
+    def _batch_appends(self, items):
+        # Helper to batch up APPENDS sequences
+        save = self.save
+        write = self.write
+
+        if not self.bin:
+            for x in items:
+                save(x)
+                write(APPEND)
+            return
+
+        r = xrange(self._BATCHSIZE)
+        while items is not None:
+            tmp = []
+            for i in r:
+                try:
+                    x = items.next()
+                    tmp.append(x)
+                except StopIteration:
+                    items = None
+                    break
+            n = len(tmp)
+            if n > 1:
+                write(MARK)
+                for x in tmp:
+                    save(x)
+                write(APPENDS)
+            elif n:
+                save(tmp[0])
+                write(APPEND)
+            # else tmp is empty, and we're done
+
+    def save_dict(self, obj):
+        ## Stackless addition BEGIN
+        modict_saver = self._pickle_moduledict(self, obj)
+        if modict_saver is not None:
+            return self.save_reduce(*modict_saver)
+        ## Stackless addition END
+        write = self.write
+
+        if self.bin:
+            write(EMPTY_DICT)
+        else:   # proto 0 -- can't use EMPTY_DICT
+            write(MARK + DICT)
+
+        self.memoize(obj)
+        self._batch_setitems(obj.iteritems())
+
+    dispatch[DictionaryType] = save_dict
+    if not PyStringMap is None:
+        dispatch[PyStringMap] = save_dict
+
+    def _batch_setitems(self, items):
+        # Helper to batch up SETITEMS sequences; proto >= 1 only
+        save = self.save
+        write = self.write
+
+        if not self.bin:
+            for k, v in items:
+                save(k)
+                save(v)
+                write(SETITEM)
+            return
+
+        r = xrange(self._BATCHSIZE)
+        while items is not None:
+            tmp = []
+            for i in r:
+                try:
+                    tmp.append(items.next())
+                except StopIteration:
+                    items = None
+                    break
+            n = len(tmp)
+            if n > 1:
+                write(MARK)
+                for k, v in tmp:
+                    save(k)
+                    save(v)
+                write(SETITEMS)
+            elif n:
+                k, v = tmp[0]
+                save(k)
+                save(v)
+                write(SETITEM)
+            # else tmp is empty, and we're done
+
+    def save_inst(self, obj):
+        cls = obj.__class__
+
+        memo  = self.memo
+        write = self.write
+        save  = self.save
+
+        if hasattr(obj, '__getinitargs__'):
+            args = obj.__getinitargs__()
+            len(args) # XXX Assert it's a sequence
+            _keep_alive(args, memo)
+        else:
+            args = ()
+
+        write(MARK)
+
+        if self.bin:
+            save(cls)
+            for arg in args:
+                save(arg)
+            write(OBJ)
+        else:
+            for arg in args:
+                save(arg)
+            write(INST + cls.__module__ + '\n' + cls.__name__ + '\n')
+
+        self.memoize(obj)
+
+        try:
+            getstate = obj.__getstate__
+        except AttributeError:
+            stuff = obj.__dict__
+        else:
+            stuff = getstate()
+            _keep_alive(stuff, memo)
+        save(stuff)
+        write(BUILD)
+
+    dispatch[InstanceType] = save_inst
+
+    def save_global(self, obj, name=None, pack=struct.pack):
+        write = self.write
+        memo = self.memo
+
+        if name is None:
+            name = obj.__name__
+
+        module = getattr(obj, "__module__", None)
+        if module is None:
+            module = whichmodule(obj, name)
+
+        try:
+            __import__(module)
+            mod = sys.modules[module]