Commits

Brett Cannon  committed bbab0db Merge

merge

  • Participants
  • Parent commits b3db006, 9c09ba7

Comments (0)

Files changed (127)

File Doc/includes/sqlite3/shared_cache.py

 import sqlite3
 
 # The shared cache is only available in SQLite versions 3.3.3 or later
-# See the SQLite documentaton for details.
+# See the SQLite documentation for details.
 
 sqlite3.enable_shared_cache(True)

File Doc/library/collections.rst

    :options: +NORMALIZE_WHITESPACE
 
    >>> # Basic example
-   >>> Point = namedtuple('Point', 'x y')
+   >>> Point = namedtuple('Point', ['x', 'y'])
    >>> p = Point(x=10, y=11)
 
    >>> # Example using the verbose option to print the class definition
 a fixed-width print format:
 
     >>> class Point(namedtuple('Point', 'x y')):
-    ...     __slots__ = ()
-    ...     @property
-    ...     def hypot(self):
-    ...         return (self.x ** 2 + self.y ** 2) ** 0.5
-    ...     def __str__(self):
-    ...         return 'Point: x=%6.3f  y=%6.3f  hypot=%6.3f' % (self.x, self.y, self.hypot)
+            __slots__ = ()
+            @property
+            def hypot(self):
+                return (self.x ** 2 + self.y ** 2) ** 0.5
+            def __str__(self):
+                return 'Point: x=%6.3f  y=%6.3f  hypot=%6.3f' % (self.x, self.y, self.hypot)
 
     >>> for p in Point(3, 4), Point(14, 5/7):
-    ...     print(p)
+            print(p)
     Point: x= 3.000  y= 4.000  hypot= 5.000
     Point: x=14.000  y= 0.714  hypot=14.018
 
     >>> Status.open, Status.pending, Status.closed
     (0, 1, 2)
     >>> class Status:
-    ...     open, pending, closed = range(3)
+            open, pending, closed = range(3)
 
 .. seealso::
 

File Doc/library/inspect.rst

    that raise AttributeError). It can also return descriptors objects
    instead of instance members.
 
+   If the instance `__dict__` is shadowed by another member (for example a
+   property) then this function will be unable to find instance members.
+
    .. versionadded:: 3.2
 
-The only known case that can cause `getattr_static` to trigger code execution,
-and cause it to return incorrect results (or even break), is where a class uses
-:data:`~object.__slots__` and provides a `__dict__` member using a property or
-descriptor. If you find other cases please report them so they can be fixed
-or documented.
-
 `getattr_static` does not resolve descriptors, for example slot descriptors or
 getset descriptors on objects implemented in C. The descriptor object
 is returned instead of the underlying attribute.

File Doc/library/subprocess.rst

 
    *stdin*, *stdout* and *stderr* specify the executed programs' standard input,
    standard output and standard error file handles, respectively.  Valid values
-   are :data:`PIPE`, an existing file descriptor (a positive integer), an
-   existing :term:`file object`, and ``None``.  :data:`PIPE` indicates that a
-   new pipe to the child should be created.  With ``None``, no redirection will
-   occur; the child's file handles will be inherited from the parent.  Additionally,
-   *stderr* can be :data:`STDOUT`, which indicates that the stderr data from the
-   applications should be captured into the same file handle as for stdout.
+   are :data:`PIPE`, :data:`DEVNULL`, an existing file descriptor (a positive
+   integer), an existing :term:`file object`, and ``None``.  :data:`PIPE`
+   indicates that a new pipe to the child should be created.  :data:`DEVNULL`
+   indicates that the special file :data:`os.devnull` will be used. With ``None``,
+   no redirection will occur; the child's file handles will be inherited from
+   the parent.  Additionally, *stderr* can be :data:`STDOUT`, which indicates
+   that the stderr data from the applications should be captured into the same
+   file handle as for stdout.
 
    If *preexec_fn* is set to a callable object, this object will be called in the
    child process just before the child is executed.
       Added context manager support.
 
 
+.. data:: DEVNULL
+
+   Special value that can be used as the *stdin*, *stdout* or *stderr* argument
+   to :class:`Popen` and indicates that the special file :data:`os.devnull`
+   will be used.
+
+   .. versionadded:: 3.3
+
+
 .. data:: PIPE
 
    Special value that can be used as the *stdin*, *stdout* or *stderr* argument
 :func:`call` and :meth:`Popen.communicate` will raise :exc:`TimeoutExpired` if
 the timeout expires before the process exits.
 
-Exceptions defined in this module all inherit from :ext:`SubprocessError`.
+Exceptions defined in this module all inherit from :exc:`SubprocessError`.
 
    .. versionadded:: 3.3
       The :exc:`SubprocessError` base class was added.

File Include/abstract.h

       arbitrary data.
 
       0 is returned on success.  buffer and buffer_len are only
-      set in case no error occurrs.  Otherwise, -1 is returned and
+      set in case no error occurs.  Otherwise, -1 is returned and
       an exception set.
        */
 
       writable memory location in buffer of size buffer_len.
 
       0 is returned on success.  buffer and buffer_len are only
-      set in case no error occurrs. Otherwise, -1 is returned and
+      set in case no error occurs. Otherwise, -1 is returned and
       an exception set.
        */
 

File Include/pymacconfig.h

 #    endif
 
 #    if defined(__LP64__)
-     /* MacOSX 10.4 (the first release to suppport 64-bit code
+     /* MacOSX 10.4 (the first release to support 64-bit code
       * at all) only supports 64-bit in the UNIX layer.
       * Therefore surpress the toolbox-glue in 64-bit mode.
       */

File Lib/binhex.py

 
 def getfileinfo(name):
     finfo = FInfo()
-    fp = io.open(name, 'rb')
-    # Quick check for textfile
-    data = fp.read(512)
-    if 0 not in data:
-        finfo.Type = 'TEXT'
-    fp.seek(0, 2)
-    dsize = fp.tell()
-    fp.close()
+    with io.open(name, 'rb') as fp:
+        # Quick check for textfile
+        data = fp.read(512)
+        if 0 not in data:
+            finfo.Type = 'TEXT'
+        fp.seek(0, 2)
+        dsize = fp.tell()
     dir, file = os.path.split(name)
     file = file.replace(':', '-', 1)
     return file, finfo, dsize, 0
 class BinHex:
     def __init__(self, name_finfo_dlen_rlen, ofp):
         name, finfo, dlen, rlen = name_finfo_dlen_rlen
+        close_on_error = False
         if isinstance(ofp, str):
             ofname = ofp
             ofp = io.open(ofname, 'wb')
-        ofp.write(b'(This file must be converted with BinHex 4.0)\r\r:')
-        hqxer = _Hqxcoderengine(ofp)
-        self.ofp = _Rlecoderengine(hqxer)
-        self.crc = 0
-        if finfo is None:
-            finfo = FInfo()
-        self.dlen = dlen
-        self.rlen = rlen
-        self._writeinfo(name, finfo)
-        self.state = _DID_HEADER
+            close_on_error = True
+        try:
+            ofp.write(b'(This file must be converted with BinHex 4.0)\r\r:')
+            hqxer = _Hqxcoderengine(ofp)
+            self.ofp = _Rlecoderengine(hqxer)
+            self.crc = 0
+            if finfo is None:
+                finfo = FInfo()
+            self.dlen = dlen
+            self.rlen = rlen
+            self._writeinfo(name, finfo)
+            self.state = _DID_HEADER
+        except:
+            if close_on_error:
+                ofp.close()
+            raise
 
     def _writeinfo(self, name, finfo):
         nl = len(name)
         an all or nothing approach, so we allow for small variations in this
         number.
           1) build a table of the frequency of each character on every line.
-          2) build a table of freqencies of this frequency (meta-frequency?),
+          2) build a table of frequencies of this frequency (meta-frequency?),
              e.g.  'x occurred 5 times in 10 rows, 6 times in 1000 rows,
              7 times in 2 rows'
           3) use the mode of the meta-frequency to determine the /expected/

File Lib/ctypes/test/test_arrays.py

             values = [ia[i] for i in range(len(init))]
             self.assertEqual(values, [0] * len(init))
 
-            # Too many in itializers should be caught
+            # Too many initializers should be caught
             self.assertRaises(IndexError, int_array, *range(alen*2))
 
         CharArray = ARRAY(c_char, 3)

File Lib/ctypes/test/test_init.py

         self.assertEqual((y.x.a, y.x.b), (0, 0))
         self.assertEqual(y.x.new_was_called, False)
 
-        # But explicitely creating an X structure calls __new__ and __init__, of course.
+        # But explicitly creating an X structure calls __new__ and __init__, of course.
         x = X()
         self.assertEqual((x.a, x.b), (9, 12))
         self.assertEqual(x.new_was_called, True)

File Lib/ctypes/test/test_numbers.py

     def test_int_from_address(self):
         from array import array
         for t in signed_types + unsigned_types:
-            # the array module doesn't suppport all format codes
+            # the array module doesn't support all format codes
             # (no 'q' or 'Q')
             try:
                 array(t._type_)

File Lib/ctypes/test/test_win32.py

             # ValueError: Procedure probably called with not enough arguments (4 bytes missing)
             self.assertRaises(ValueError, IsWindow)
 
-            # This one should succeeed...
+            # This one should succeed...
             self.assertEqual(0, IsWindow(0))
 
             # ValueError: Procedure probably called with too many arguments (8 bytes in excess)

File Lib/difflib.py

             line = line.replace(' ','\0')
             # expand tabs into spaces
             line = line.expandtabs(self._tabsize)
-            # relace spaces from expanded tabs back into tab characters
+            # replace spaces from expanded tabs back into tab characters
             # (we'll replace them with markup after we do differencing)
             line = line.replace(' ','\t')
             return line.replace('\0',' ').rstrip('\n')

File Lib/distutils/cmd.py

                                   not self.force, dry_run=self.dry_run)
 
     def move_file (self, src, dst, level=1):
-        """Move a file respectin dry-run flag."""
+        """Move a file respecting dry-run flag."""
         return file_util.move_file(src, dst, dry_run=self.dry_run)
 
     def spawn(self, cmd, search_path=1, level=1):

File Lib/distutils/cygwinccompiler.py

             self.dll_libraries = get_msvcr()
 
     def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
-        """Compiles the source by spawing GCC and windres if needed."""
+        """Compiles the source by spawning GCC and windres if needed."""
         if ext == '.rc' or ext == '.res':
             # gcc needs '.res' and '.rc' compiled to object files !!!
             try:

File Lib/distutils/tests/test_clean.py

             self.assertTrue(not os.path.exists(path),
                          '%s was not removed' % path)
 
-        # let's run the command again (should spit warnings but suceed)
+        # let's run the command again (should spit warnings but succeed)
         cmd.all = 1
         cmd.ensure_finalized()
         cmd.run()

File Lib/distutils/tests/test_install.py

         if sys.version < '2.6':
             return
 
-        # preparing the environement for the test
+        # preparing the environment for the test
         self.old_user_base = site.USER_BASE
         self.old_user_site = site.USER_SITE
         self.tmpdir = self.mkdtemp()

File Lib/distutils/tests/test_sdist.py

         # adding a file
         self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#')
 
-        # make sure build_py is reinitinialized, like a fresh run
+        # make sure build_py is reinitialized, like a fresh run
         build_py = dist.get_command_obj('build_py')
         build_py.finalized = False
         build_py.ensure_finalized()

File Lib/doctest.py

         # Process each example.
         for examplenum, example in enumerate(test.examples):
 
-            # If REPORT_ONLY_FIRST_FAILURE is set, then supress
+            # If REPORT_ONLY_FIRST_FAILURE is set, then suppress
             # reporting after the first failure.
             quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
                      failures > 0)
            caller can catch the errors and initiate post-mortem debugging.
 
            The DocTestCase provides a debug method that raises
-           UnexpectedException errors if there is an unexepcted
+           UnexpectedException errors if there is an unexpected
            exception:
 
              >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',

File Lib/email/encoders.py

     ]
 
 
-from base64 import b64encode as _bencode
+from base64 import encodebytes as _bencode
 from quopri import encodestring as _encodestring
 
 

File Lib/email/header.py

 # For use with .match()
 fcre = re.compile(r'[\041-\176]+:$')
 
-# Find a header embeded in a putative header value.  Used to check for
+# Find a header embedded in a putative header value.  Used to check for
 # header injection attack.
 _embeded_header = re.compile(r'\n[^ \t]+:')
 
                                     self._continuation_ws, splitchars)
         for string, charset in self._chunks:
             lines = string.splitlines()
-            formatter.feed(lines[0], charset)
+            formatter.feed(lines[0] if lines else '', charset)
             for line in lines[1:]:
                 formatter.newline()
                 if charset.header_encoding is not None:

File Lib/email/message.py

 def _splitparam(param):
     # Split header parameters.  BAW: this may be too simple.  It isn't
     # strictly RFC 2045 (section 5.1) compliant, but it catches most headers
-    # found in the wild.  We may eventually need a full fledged parser
-    # eventually.
-    a, sep, b = param.partition(';')
+    # found in the wild.  We may eventually need a full fledged parser.
+    # RDM: we might have a Header here; for now just stringify it.
+    a, sep, b = str(param).partition(';')
     if not sep:
         return a.strip(), None
     return a.strip(), b.strip()
         return param
 
 def _parseparam(s):
+    # RDM This might be a Header, so for now stringify it.
+    s = ';' + str(s)
     plist = []
     while s[:1] == ';':
         s = s[1:]
         if i is not None and not isinstance(self._payload, list):
             raise TypeError('Expected list, got %s' % type(self._payload))
         payload = self._payload
-        cte = self.get('content-transfer-encoding', '').lower()
+        # cte might be a Header, so for now stringify it.
+        cte = str(self.get('content-transfer-encoding', '')).lower()
         # payload may be bytes here.
         if isinstance(payload, str):
             if _has_surrogates(payload):
         if value is missing:
             return failobj
         params = []
-        for p in _parseparam(';' + value):
+        for p in _parseparam(value):
             try:
                 name, val = p.split('=', 1)
                 name = name.strip()

File Lib/email/test/test_email.py

         msg['Dummy'] = 'dummy\nX-Injected-Header: test'
         self.assertRaises(errors.HeaderParseError, msg.as_string)
 
-
 # Test the email.encoders module
 class TestEncoders(unittest.TestCase):
+
+    def test_EncodersEncode_base64(self):
+        with openfile('PyBanner048.gif', 'rb') as fp:
+            bindata = fp.read()
+        mimed = email.mime.image.MIMEImage(bindata)
+        base64ed = mimed.get_payload()
+        # the transfer-encoded body lines should all be <=76 characters
+        lines = base64ed.split('\n')
+        self.assertLessEqual(max([ len(x) for x in lines ]), 76)
+
     def test_encode_empty_payload(self):
         eq = self.assertEqual
         msg = Message()
 
     def test_body(self):
         eq = self.assertEqual
-        bytes = b'\xfa\xfb\xfc\xfd\xfe\xff'
-        msg = MIMEApplication(bytes)
-        eq(msg.get_payload(), '+vv8/f7/')
-        eq(msg.get_payload(decode=True), bytes)
+        bytesdata = b'\xfa\xfb\xfc\xfd\xfe\xff'
+        msg = MIMEApplication(bytesdata)
+        # whitespace in the cte encoded block is RFC-irrelevant.
+        eq(msg.get_payload().strip(), '+vv8/f7/')
+        eq(msg.get_payload(decode=True), bytesdata)
 
 
 
                               ['foo@bar.com',
                                'g\uFFFD\uFFFDst'])
 
+    def test_get_content_type_with_8bit(self):
+        msg = email.message_from_bytes(textwrap.dedent("""\
+            Content-Type: text/pl\xA7in; charset=utf-8
+            """).encode('latin-1'))
+        self.assertEqual(msg.get_content_type(), "text/pl\uFFFDin")
+        self.assertEqual(msg.get_content_maintype(), "text")
+        self.assertEqual(msg.get_content_subtype(), "pl\uFFFDin")
+
+    def test_get_params_with_8bit(self):
+        msg = email.message_from_bytes(
+            'X-Header: foo=\xa7ne; b\xa7r=two; baz=three\n'.encode('latin-1'))
+        self.assertEqual(msg.get_params(header='x-header'),
+           [('foo', '\uFFFDne'), ('b\uFFFDr', 'two'), ('baz', 'three')])
+        self.assertEqual(msg.get_param('Foo', header='x-header'), '\uFFFdne')
+        # XXX: someday you might be able to get 'b\xa7r', for now you can't.
+        self.assertEqual(msg.get_param('b\xa7r', header='x-header'), None)
+
+    def test_get_rfc2231_params_with_8bit(self):
+        msg = email.message_from_bytes(textwrap.dedent("""\
+            Content-Type: text/plain; charset=us-ascii;
+             title*=us-ascii'en'This%20is%20not%20f\xa7n"""
+             ).encode('latin-1'))
+        self.assertEqual(msg.get_param('title'),
+            ('us-ascii', 'en', 'This is not f\uFFFDn'))
+
+    def test_set_rfc2231_params_with_8bit(self):
+        msg = email.message_from_bytes(textwrap.dedent("""\
+            Content-Type: text/plain; charset=us-ascii;
+             title*=us-ascii'en'This%20is%20not%20f\xa7n"""
+             ).encode('latin-1'))
+        msg.set_param('title', 'test')
+        self.assertEqual(msg.get_param('title'), 'test')
+
+    def test_del_rfc2231_params_with_8bit(self):
+        msg = email.message_from_bytes(textwrap.dedent("""\
+            Content-Type: text/plain; charset=us-ascii;
+             title*=us-ascii'en'This%20is%20not%20f\xa7n"""
+             ).encode('latin-1'))
+        msg.del_param('title')
+        self.assertEqual(msg.get_param('title'), None)
+        self.assertEqual(msg.get_content_maintype(), 'text')
+
+    def test_get_payload_with_8bit_cte_header(self):
+        msg = email.message_from_bytes(textwrap.dedent("""\
+            Content-Transfer-Encoding: b\xa7se64
+            Content-Type: text/plain; charset=latin-1
+
+            payload
+            """).encode('latin-1'))
+        self.assertEqual(msg.get_payload(), 'payload\n')
+        self.assertEqual(msg.get_payload(decode=True), b'payload\n')
+
     non_latin_bin_msg = textwrap.dedent("""\
         From: foo@bar.com
         To: báz
         h = Header('文', charset='shift_jis')
         self.assertEqual(h.encode(), '=?iso-2022-jp?b?GyRCSjgbKEI=?=')
 
+    def test_flatten_header_with_no_value(self):
+        # Issue 11401 (regression from email 4.x)  Note that the space after
+        # the header doesn't reflect the input, but this is also the way
+        # email 4.x behaved.  At some point it would be nice to fix that.
+        msg = email.message_from_string("EmptyHeader:")
+        self.assertEqual(str(msg), "EmptyHeader: \n\n")
+
 
 
 # Test RFC 2231 header parameters (en/de)coding

File Lib/http/server.py

 
 # Default error message template
 DEFAULT_ERROR_MESSAGE = """\
-<head>
-<title>Error response</title>
-</head>
-<body>
-<h1>Error response</h1>
-<p>Error code %(code)d.
-<p>Message: %(message)s.
-<p>Error code explanation: %(code)s = %(explain)s.
-</body>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+    <head>
+        <meta http-equiv="Content-Type" content="text/html;charset=utf-8" />
+        <title>Error response</title>
+    </head>
+    <body>
+        <h1>Error response</h1>
+        <p>Error code: %(code)d</p>
+        <p>Message: %(message)s.</p>
+        <p>Error code explanation: %(code)s - %(explain)s.</p>
+    </body>
+</html>
 """
 
 DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8"

File Lib/idlelib/FormatParagraph.py

             # If the block ends in a \n, we dont want the comment
             # prefix inserted after it. (Im not sure it makes sense to
             # reformat a comment block that isnt made of complete
-            # lines, but whatever!)  Can't think of a clean soltution,
+            # lines, but whatever!)  Can't think of a clean solution,
             # so we hack away
             block_suffix = ""
             if not newdata[-1]:

File Lib/idlelib/extend.txt

 
 An IDLE extension class is instantiated with a single argument,
 `editwin', an EditorWindow instance. The extension cannot assume much
-about this argument, but it is guarateed to have the following instance
+about this argument, but it is guaranteed to have the following instance
 variables:
 
     text	a Text instance (a widget)

File Lib/idlelib/macosxSupport.py

 
 def addOpenEventSupport(root, flist):
     """
-    This ensures that the application will respont to open AppleEvents, which
-    makes is feaseable to use IDLE as the default application for python files.
+    This ensures that the application will respond to open AppleEvents, which
+    makes is feasible to use IDLE as the default application for python files.
     """
     def doOpenFile(*args):
         for fn in args:

File Lib/inspect.py

         instance_dict = object.__getattribute__(obj, "__dict__")
     except AttributeError:
         pass
-    return instance_dict.get(attr, _sentinel)
+    return dict.get(instance_dict, attr, _sentinel)
 
 
 def _check_class(klass, attr):
     for entry in _static_getmro(klass):
-        try:
-            return entry.__dict__[attr]
-        except KeyError:
-            pass
+        if not _shadowed_dict(type(entry)):
+            try:
+                return entry.__dict__[attr]
+            except KeyError:
+                pass
     return _sentinel
 
 def _is_type(obj):
         return False
     return True
 
+def _shadowed_dict(klass):
+    dict_attr = type.__dict__["__dict__"]
+    for entry in _static_getmro(klass):
+        try:
+            class_dict = dict_attr.__get__(entry)["__dict__"]
+        except KeyError:
+            pass
+        else:
+            if not (type(class_dict) is types.GetSetDescriptorType and
+                    class_dict.__name__ == "__dict__" and
+                    class_dict.__objclass__ is entry):
+                return True
+    return False
 
 def getattr_static(obj, attr, default=_sentinel):
     """Retrieve attributes without triggering dynamic lookup via the
     """
     instance_result = _sentinel
     if not _is_type(obj):
-        instance_result = _check_instance(obj, attr)
         klass = type(obj)
+        if not _shadowed_dict(klass):
+            instance_result = _check_instance(obj, attr)
     else:
         klass = obj
 

File Lib/lib2to3/fixes/fix_metaclass.py

     """
     for node in cls_node.children:
         if node.type == syms.suite:
-            # already in the prefered format, do nothing
+            # already in the preferred format, do nothing
             return
 
     # !%@#! oneliners have no suite node, we have to fake one up

File Lib/lib2to3/pgen2/conv.py

         self.finish_off()
 
     def parse_graminit_h(self, filename):
-        """Parse the .h file writen by pgen.  (Internal)
+        """Parse the .h file written by pgen.  (Internal)
 
         This file is a sequence of #define statements defining the
         nonterminals of the grammar as numbers.  We build two tables
         return True
 
     def parse_graminit_c(self, filename):
-        """Parse the .c file writen by pgen.  (Internal)
+        """Parse the .c file written by pgen.  (Internal)
 
         The file looks as follows.  The first two lines are always this:
 

File Lib/lib2to3/pytree.py

             content: optional sequence of subsequences of patterns;
                      if absent, matches one node;
                      if present, each subsequence is an alternative [*]
-            min: optinal minumum number of times to match, default 0
-            max: optional maximum number of times tro match, default HUGE
+            min: optional minimum number of times to match, default 0
+            max: optional maximum number of times to match, default HUGE
             name: optional name assigned to this match
 
         [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is

File Lib/lib2to3/tests/data/py2_test_grammar.py

         ### simple_stmt: small_stmt (';' small_stmt)* [';']
         x = 1; pass; del x
         def foo():
-            # verify statments that end with semi-colons
+            # verify statements that end with semi-colons
             x = 1; pass; del x;
         foo()
 

File Lib/lib2to3/tests/data/py3_test_grammar.py

         ### simple_stmt: small_stmt (';' small_stmt)* [';']
         x = 1; pass; del x
         def foo():
-            # verify statments that end with semi-colons
+            # verify statements that end with semi-colons
             x = 1; pass; del x;
         foo()
 

File Lib/multiprocessing/__init__.py

         except (ValueError, KeyError):
             num = 0
     elif 'bsd' in sys.platform or sys.platform == 'darwin':
+        comm = '/sbin/sysctl -n hw.ncpu'
+        if sys.platform == 'darwin':
+            comm = '/usr' + comm
         try:
-            with os.popen('sysctl -n hw.ncpu') as p:
+            with os.popen(comm) as p:
                 num = int(p.read())
         except ValueError:
             num = 0

File Lib/ntpath.py

 #       - $varname is accepted.
 #       - %varname% is accepted.
 #       - varnames can be made out of letters, digits and the characters '_-'
-#         (though is not verifed in the ${varname} and %varname% cases)
+#         (though is not verified in the ${varname} and %varname% cases)
 # XXX With COMMAND.COM you can use any characters in a variable name,
 # XXX except '^|<>='.
 

File Lib/pickletools.py

       proto=0,
       doc="""Read an object from the memo and push it on the stack.
 
-      The index of the memo object to push is given by the newline-teriminated
+      The index of the memo object to push is given by the newline-terminated
       decimal string following.  BINGET and LONG_BINGET are space-optimized
       versions.
       """),

File Lib/platform.py

             info = pipe.read()
             if pipe.close():
                 raise os.error('command failed')
-            # XXX How can I supress shell errors from being written
+            # XXX How can I suppress shell errors from being written
             #     to stderr ?
         except os.error as why:
             #print 'Command %s failed: %s' % (cmd,why)

File Lib/pstats.py

File contents unchanged.

File Lib/shutil.py

         except KeyError:
             raise ValueError("Unknown unpack format '{0}'".format(format))
 
-        func = format_info[0]
-        func(filename, extract_dir, **dict(format_info[1]))
+        func = format_info[1]
+        func(filename, extract_dir, **dict(format_info[2]))
     else:
         # we need to look at the registered unpackers supported extensions
         format = _find_unpack_format(filename)

File Lib/subprocess.py

     """This exception is raised when the timeout expires while waiting for a
     child process.
     """
-    def __init__(self, cmd, output=None):
+    def __init__(self, cmd, timeout, output=None):
         self.cmd = cmd
+        self.timeout = timeout
         self.output = output
 
     def __str__(self):
             return fds
 
 __all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
-           "getoutput", "check_output", "CalledProcessError"]
+           "getoutput", "check_output", "CalledProcessError", "DEVNULL"]
 
 if mswindows:
     from _subprocess import CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP
 
 PIPE = -1
 STDOUT = -2
+DEVNULL = -3
 
 
 def _eintr_retry_call(func, *args):
     except TimeoutExpired:
         process.kill()
         output, unused_err = process.communicate()
-        raise TimeoutExpired(process.args, output=output)
+        raise TimeoutExpired(process.args, timeout, output=output)
     retcode = process.poll()
     if retcode:
         raise CalledProcessError(retcode, process.args, output=output)
             # Child is still running, keep us alive until we can wait on it.
             _active.append(self)
 
+    def _get_devnull(self):
+        if not hasattr(self, '_devnull'):
+            self._devnull = os.open(os.devnull, os.O_RDWR)
+        return self._devnull
 
     def communicate(self, input=None, timeout=None):
         """Interact with process: Send data to stdin.  Read data from
             return (stdout, stderr)
 
         try:
-            stdout, stderr = self._communicate(input, endtime)
+            stdout, stderr = self._communicate(input, endtime, timeout)
         finally:
             self._communication_started = True
 
             return endtime - time.time()
 
 
-    def _check_timeout(self, endtime):
+    def _check_timeout(self, endtime, orig_timeout):
         """Convenience for checking if a timeout has expired."""
         if endtime is None:
             return
         if time.time() > endtime:
-            raise TimeoutExpired(self.args)
+            raise TimeoutExpired(self.args, orig_timeout)
 
 
     if mswindows:
                     p2cread, _ = _subprocess.CreatePipe(None, 0)
             elif stdin == PIPE:
                 p2cread, p2cwrite = _subprocess.CreatePipe(None, 0)
+            elif stdin == DEVNULL:
+                p2cread = msvcrt.get_osfhandle(self._get_devnull())
             elif isinstance(stdin, int):
                 p2cread = msvcrt.get_osfhandle(stdin)
             else:
                     _, c2pwrite = _subprocess.CreatePipe(None, 0)
             elif stdout == PIPE:
                 c2pread, c2pwrite = _subprocess.CreatePipe(None, 0)
+            elif stdout == DEVNULL:
+                c2pwrite = msvcrt.get_osfhandle(self._get_devnull())
             elif isinstance(stdout, int):
                 c2pwrite = msvcrt.get_osfhandle(stdout)
             else:
                 errread, errwrite = _subprocess.CreatePipe(None, 0)
             elif stderr == STDOUT:
                 errwrite = c2pwrite
+            elif stderr == DEVNULL:
+                errwrite = msvcrt.get_osfhandle(self._get_devnull())
             elif isinstance(stderr, int):
                 errwrite = msvcrt.get_osfhandle(stderr)
             else:
             except pywintypes.error as e:
                 # Translate pywintypes.error to WindowsError, which is
                 # a subclass of OSError.  FIXME: We should really
-                # translate errno using _sys_errlist (or simliar), but
+                # translate errno using _sys_errlist (or similar), but
                 # how can this be done from Python?
                 raise WindowsError(*e.args)
             finally:
                     c2pwrite.Close()
                 if errwrite != -1:
                     errwrite.Close()
+                if hasattr(self, '_devnull'):
+                    os.close(self._devnull)
 
             # Retain the process handle, but close the thread handle
             self._child_created = True
             return self.returncode
 
 
-        def wait(self, timeout=None):
+        def wait(self, timeout=None, endtime=None):
             """Wait for child process to terminate.  Returns returncode
             attribute."""
+            if endtime is not None:
+                timeout = self._remaining_time(endtime)
             if timeout is None:
                 timeout = _subprocess.INFINITE
             else:
             if self.returncode is None:
                 result = _subprocess.WaitForSingleObject(self._handle, timeout)
                 if result == _subprocess.WAIT_TIMEOUT:
-                    raise TimeoutExpired(self.args)
+                    raise TimeoutExpired(self.args, timeout)
                 self.returncode = _subprocess.GetExitCodeProcess(self._handle)
             return self.returncode
 
             fh.close()
 
 
-        def _communicate(self, input, endtime):
+        def _communicate(self, input, endtime, orig_timeout):
             # Start reader threads feeding into a list hanging off of this
             # object, unless they've already been started.
             if self.stdout and not hasattr(self, "_stdout_buff"):
                 pass
             elif stdin == PIPE:
                 p2cread, p2cwrite = _create_pipe()
+            elif stdin == DEVNULL:
+                p2cread = self._get_devnull()
             elif isinstance(stdin, int):
                 p2cread = stdin
             else:
                 pass
             elif stdout == PIPE:
                 c2pread, c2pwrite = _create_pipe()
+            elif stdout == DEVNULL:
+                c2pwrite = self._get_devnull()
             elif isinstance(stdout, int):
                 c2pwrite = stdout
             else:
                 errread, errwrite = _create_pipe()
             elif stderr == STDOUT:
                 errwrite = c2pwrite
+            elif stderr == DEVNULL:
+                errwrite = self._get_devnull()
             elif isinstance(stderr, int):
                 errwrite = stderr
             else:
                     os.close(c2pwrite)
                 if errwrite != -1 and errread != -1:
                     os.close(errwrite)
+                if hasattr(self, '_devnull'):
+                    os.close(self._devnull)
 
                 # Wait for exec to fail or succeed; possibly raising an
                 # exception (limited in size)
         def wait(self, timeout=None, endtime=None):
             """Wait for child process to terminate.  Returns returncode
             attribute."""
-            # If timeout was passed but not endtime, compute endtime in terms of
-            # timeout.
-            if endtime is None and timeout is not None:
-                endtime = time.time() + timeout
             if self.returncode is not None:
                 return self.returncode
-            elif endtime is not None:
+
+            # endtime is preferred to timeout.  timeout is only used for
+            # printing.
+            if endtime is not None or timeout is not None:
+                if endtime is None:
+                    endtime = time.time() + timeout
+                elif timeout is None:
+                    timeout = self._remaining_time(endtime)
+
+            if endtime is not None:
                 # Enter a busy loop if we have a timeout.  This busy loop was
                 # cribbed from Lib/threading.py in Thread.wait() at r71065.
                 delay = 0.0005 # 500 us -> initial delay of 1 ms
                         break
                     remaining = self._remaining_time(endtime)
                     if remaining <= 0:
-                        raise TimeoutExpired(self.args)
+                        raise TimeoutExpired(self.args, timeout)
                     delay = min(delay * 2, remaining, .05)
                     time.sleep(delay)
             elif self.returncode is None:
             return self.returncode
 
 
-        def _communicate(self, input, endtime):
+        def _communicate(self, input, endtime, orig_timeout):
             if self.stdin and not self._communication_started:
                 # Flush stdio buffer.  This might block, if the user has
                 # been writing to .stdin in an uncontrolled fashion.
                     self.stdin.close()
 
             if _has_poll:
-                stdout, stderr = self._communicate_with_poll(input, endtime)
+                stdout, stderr = self._communicate_with_poll(input, endtime,
+                                                             orig_timeout)
             else:
-                stdout, stderr = self._communicate_with_select(input, endtime)
+                stdout, stderr = self._communicate_with_select(input, endtime,
+                                                               orig_timeout)
 
             self.wait(timeout=self._remaining_time(endtime))
 
             return (stdout, stderr)
 
 
-        def _communicate_with_poll(self, input, endtime):
+        def _communicate_with_poll(self, input, endtime, orig_timeout):
             stdout = None # Return
             stderr = None # Return
 
                     if e.args[0] == errno.EINTR:
                         continue
                     raise
-                self._check_timeout(endtime)
+                self._check_timeout(endtime, orig_timeout)
 
                 # XXX Rewrite these to use non-blocking I/O on the
                 # file objects; they are no longer using C stdio!
             return (stdout, stderr)
 
 
-        def _communicate_with_select(self, input, endtime):
+        def _communicate_with_select(self, input, endtime, orig_timeout):
             if not self._communication_started:
                 self._read_set = []
                 self._write_set = []
                 # According to the docs, returning three empty lists indicates
                 # that the timeout expired.
                 if not (rlist or wlist or xlist):
-                    raise TimeoutExpired(self.args)
+                    raise TimeoutExpired(self.args, orig_timeout)
                 # We also check what time it is ourselves for good measure.
-                self._check_timeout(endtime)
+                self._check_timeout(endtime, orig_timeout)
 
                 # XXX Rewrite these to use non-blocking I/O on the
                 # file objects; they are no longer using C stdio!

File Lib/test/crashers/README

 Once the crash is fixed, the test case should be moved into an appropriate test
 (even if it was originally from the test suite).  This ensures the regression
 doesn't happen again.  And if it does, it should be easier to track down.
+
+Also see Lib/test_crashers.py which exercises the crashers in this directory.
+In particular, make sure to add any new infinite loop crashers to the black
+list so it doesn't try to run them.

File Lib/test/crashers/compiler_recursion.py

 # e.g. '1*'*10**5+'1' will die in compiler_visit_expr
 
 # The exact limit to destroy the stack will vary by platform
-# but 100k should do the trick most places
-compile('()'*10**5, '?', 'exec')
+# but 1M should do the trick most places
+compile('()'*10**6, '?', 'exec')

File Lib/test/datetimetester.py

             self.assertEqual(dt, there_and_back)
 
         # Because we have a redundant spelling when DST begins, there is
-        # (unforunately) an hour when DST ends that can't be spelled at all in
+        # (unfortunately) an hour when DST ends that can't be spelled at all in
         # local time.  When DST ends, the clock jumps from 1:59 back to 1:00
         # again.  The hour 1:MM DST has no spelling then:  1:MM is taken to be
         # standard time.  1:MM DST == 0:MM EST, but 0:MM is taken to be

File Lib/test/pyclbr_input.py

 
     # XXX: This causes test_pyclbr.py to fail, but only because the
     #      introspection-based is_method() code in the test can't
-    #      distinguish between this and a geniune method function like m().
+    #      distinguish between this and a genuine method function like m().
     #      The pyclbr.py module gets this right as it parses the text.
     #
     #f = f

File Lib/test/test_binhex.py

     def setUp(self):
         self.fname1 = support.TESTFN + "1"
         self.fname2 = support.TESTFN + "2"
+        self.fname3 = support.TESTFN + "very_long_filename__very_long_filename__very_long_filename__very_long_filename__"
 
     def tearDown(self):
         support.unlink(self.fname1)
         support.unlink(self.fname2)
+        support.unlink(self.fname3)
 
     DATA = b'Jack is my hero'
 
 
         self.assertEqual(self.DATA, finish)
 
+    def test_binhex_error_on_long_filename(self):
+        """
+        The testcase fails if no exception is raised when a filename parameter provided to binhex.binhex()
+        is too long, or if the exception raised in binhex.binhex() is not an instance of binhex.Error.
+        """
+        f3 = open(self.fname3, 'wb')
+        f3.close()
+
+        self.assertRaises(binhex.Error, binhex.binhex, self.fname3, self.fname2)
 
 def test_main():
     support.run_unittest(BinHexTestCase)

File Lib/test/test_capi.py

                 context.event.set()
 
     def test_pendingcalls_non_threaded(self):
-        #again, just using the main thread, likely they will all be dispathced at
+        #again, just using the main thread, likely they will all be dispatched at
         #once.  It is ok to ask for too many, because we loop until we find a slot.
         #the loop can be interrupted to dispatch.
         #there are only 32 dispatch slots, so we go for twice that!

File Lib/test/test_crashers.py

+# Tests that the crashers in the Lib/test/crashers directory actually
+# do crash the interpreter as expected
+#
+# If a crasher is fixed, it should be moved elsewhere in the test suite to
+# ensure it continues to work correctly.
+
+import unittest
+import glob
+import os.path
+import test.support
+from test.script_helper import assert_python_failure
+
+CRASHER_DIR = os.path.join(os.path.dirname(__file__), "crashers")
+CRASHER_FILES = os.path.join(CRASHER_DIR, "*.py")
+
+infinite_loops = ["infinite_loop_re.py", "nasty_eq_vs_dict.py"]
+
+class CrasherTest(unittest.TestCase):
+
+    @test.support.cpython_only
+    def test_crashers_crash(self):
+        for fname in glob.glob(CRASHER_FILES):
+            if os.path.basename(fname) in infinite_loops:
+                continue
+            # Some "crashers" only trigger an exception rather than a
+            # segfault. Consider that an acceptable outcome.
+            if test.support.verbose:
+                print("Checking crasher:", fname)
+            assert_python_failure(fname)
+
+
+def test_main():
+    test.support.run_unittest(CrasherTest)
+    test.support.reap_children()
+
+if __name__ == "__main__":
+    test_main()

File Lib/test/test_datetime.py

File contents unchanged.

File Lib/test/test_decimal.py

                 try:
                     t = self.eval_line(line)
                 except DecimalException as exception:
-                    #Exception raised where there shoudn't have been one.
+                    #Exception raised where there shouldn't have been one.
                     self.fail('Exception "'+exception.__class__.__name__ + '" raised on line '+line)
 
         return

File Lib/test/test_descr.py

         except TypeError:
             pass
         else:
-            self.fail("Carlo Verre __setattr__ suceeded!")
+            self.fail("Carlo Verre __setattr__ succeeded!")
         try:
             object.__delattr__(str, "lower")
         except TypeError:

File Lib/test/test_doctest.py

         ?     +              ++    ^
     TestResults(failed=1, attempted=1)
 
-The REPORT_ONLY_FIRST_FAILURE supresses result output after the first
+The REPORT_ONLY_FIRST_FAILURE suppresses result output after the first
 failing example:
 
     >>> def f(x):
         2
     TestResults(failed=3, attempted=5)
 
-However, output from `report_start` is not supressed:
+However, output from `report_start` is not suppressed:
 
     >>> doctest.DocTestRunner(verbose=True, optionflags=flags).run(test)
     ... # doctest: +ELLIPSIS
     >>> doctest.master = None  # Reset master.
 
 (Note: we'll be clearing doctest.master after each call to
-`doctest.testfile`, to supress warnings about multiple tests with the
+`doctest.testfile`, to suppress warnings about multiple tests with the
 same name.)
 
 Globals may be specified with the `globs` and `extraglobs` parameters:
     TestResults(failed=0, attempted=2)
     >>> doctest.master = None  # Reset master.
 
-Verbosity can be increased with the optional `verbose` paremter:
+Verbosity can be increased with the optional `verbose` parameter:
 
     >>> doctest.testfile('test_doctest.txt', globs=globs, verbose=True)
     Trying:
     TestResults(failed=1, attempted=2)
     >>> doctest.master = None  # Reset master.
 
-The summary report may be supressed with the optional `report`
+The summary report may be suppressed with the optional `report`
 parameter:
 
     >>> doctest.testfile('test_doctest.txt', report=False)

File Lib/test/test_extcall.py

     >>> Foo.method(1, *[2, 3])
     5
 
-A PyCFunction that takes only positional parameters shoud allow an
+A PyCFunction that takes only positional parameters should allow an
 empty keyword dictionary to pass without a complaint, but raise a
 TypeError if te dictionary is not empty
 

File Lib/test/test_float.py

     def test_float_with_comma(self):
         # set locale to something that doesn't use '.' for the decimal point
         # float must not accept the locale specific decimal point but
-        # it still has to accept the normal python syntac
+        # it still has to accept the normal python syntax
         import locale
         if not locale.localeconv()['decimal_point'] == ',':
             return
     def assertEqualAndEqualSign(self, a, b):
         # fail unless a == b and a and b have the same sign bit;
         # the only difference from assertEqual is that this test
-        # distingishes -0.0 and 0.0.
+        # distinguishes -0.0 and 0.0.
         self.assertEqual((a, copysign(1.0, a)), (b, copysign(1.0, b)))
 
     @support.requires_IEEE_754

File Lib/test/test_gdb.py

                           " inferior's thread library, thread debugging will"
                           " not be available.\n",
                           '')
+        err = err.replace("warning: Cannot initialize thread debugging"
+                          " library: Debugger service failed\n",
+                          '')
 
         # Ensure no unexpected error messages:
         self.assertEqual(err, '')

File Lib/test/test_grammar.py

         ### simple_stmt: small_stmt (';' small_stmt)* [';']
         x = 1; pass; del x
         def foo():
-            # verify statments that end with semi-colons
+            # verify statements that end with semi-colons
             x = 1; pass; del x;
         foo()
 

File Lib/test/test_httpservers.py

         return False
 
 class BaseHTTPRequestHandlerTestCase(unittest.TestCase):
-    """Test the functionaility of the BaseHTTPServer.
+    """Test the functionality of the BaseHTTPServer.
 
        Test the support for the Expect 100-continue header.
        """

File Lib/test/test_inspect.py

         self.assertEqual(inspect.getattr_static(Something(), 'foo'), 3)
         self.assertEqual(inspect.getattr_static(Something, 'foo'), 3)
 
+    def test_dict_as_property(self):
+        test = self
+        test.called = False
+
+        class Foo(dict):
+            a = 3
+            @property
+            def __dict__(self):
+                test.called = True
+                return {}
+
+        foo = Foo()
+        foo.a = 4
+        self.assertEqual(inspect.getattr_static(foo, 'a'), 3)
+        self.assertFalse(test.called)
+
+    def test_custom_object_dict(self):
+        test = self
+        test.called = False
+
+        class Custom(dict):
+            def get(self, key, default=None):
+                test.called = True
+                super().get(key, default)
+
+        class Foo(object):
+            a = 3
+        foo = Foo()
+        foo.__dict__ = Custom()
+        self.assertEqual(inspect.getattr_static(foo, 'a'), 3)
+        self.assertFalse(test.called)
+
+    def test_metaclass_dict_as_property(self):
+        class Meta(type):
+            @property
+            def __dict__(self):
+                self.executed = True
+
+        class Thing(metaclass=Meta):
+            executed = False
+
+            def __init__(self):
+                self.spam = 42
+
+        instance = Thing()
+        self.assertEqual(inspect.getattr_static(instance, "spam"), 42)
+        self.assertFalse(Thing.executed)
 
 class TestGetGeneratorState(unittest.TestCase):
 

File Lib/test/test_iterlen.py

 
 Some containers become temporarily immutable during iteration.  This includes
 dicts, sets, and collections.deque.  Their implementation is equally simple
-though they need to permantently set their length to zero whenever there is
+though they need to permanently set their length to zero whenever there is
 an attempt to iterate after a length mutation.
 
 The situation slightly more involved whenever an object allows length mutation
-during iteration.  Lists and sequence iterators are dynanamically updatable.
+during iteration.  Lists and sequence iterators are dynamically updatable.
 So, if a list is extended during iteration, the iterator will continue through
 the new items.  If it shrinks to a point before the most recent iteration,
 then no further items are available and the length is reported at zero.

File Lib/test/test_itertools.py

 ...     return chain(iterable, repeat(None))
 
 >>> def ncycles(iterable, n):
-...     "Returns the seqeuence elements n times"
+...     "Returns the sequence elements n times"
 ...     return chain(*repeat(iterable, n))
 
 >>> def dotproduct(vec1, vec2):

File Lib/test/test_marshal.py

         #   >>> type(loads(dumps(Int())))
         #   <type 'int'>
         for typ in (int, float, complex, tuple, list, dict, set, frozenset):
-            # Note: str sublclasses are not tested because they get handled
+            # Note: str subclasses are not tested because they get handled
             # by marshal's routines for objects supporting the buffer API.
             subtyp = type('subtyp', (typ,), {})
             self.assertRaises(ValueError, marshal.dumps, subtyp())

File Lib/test/test_math.py

 
         # the following tests have been commented out since they don't
         # really belong here:  the implementation of ** for floats is
-        # independent of the implemention of math.pow
+        # independent of the implementation of math.pow
         #self.assertEqual(1**NAN, 1)
         #self.assertEqual(1**INF, 1)
         #self.assertEqual(1**NINF, 1)

File Lib/test/test_mmap.py

             m2.close()
             m1.close()
 
-            # Test differnt tag
+            # Test different tag
             m1 = mmap.mmap(-1, len(data1), tagname="foo")
             m1[:] = data1
             m2 = mmap.mmap(-1, len(data2), tagname="boo")

File Lib/test/test_multiprocessing.py

         event = self.Event()
         wait = TimingWrapper(event.wait)
 
-        # Removed temporaily, due to API shear, this does not
+        # Removed temporarily, due to API shear, this does not
         # work with threading._Event objects. is_set == isSet
         self.assertEqual(event.is_set(), False)
 
 
         util.Finalize(None, conn.send, args=('STOP',), exitpriority=-100)
 
-        # call mutliprocessing's cleanup function then exit process without
+        # call multiprocessing's cleanup function then exit process without
         # garbage collecting locals
         util._exit_function()
         conn.close()

File Lib/test/test_pep292.py

File contents unchanged.

File Lib/test/test_pkg.py

         if self.root: # Only clean if the test was actually run
             cleanout(self.root)
 
-        # delete all modules concerning the tested hiearchy
+        # delete all modules concerning the tested hierarchy
         if self.pkgname:
             modules = [name for name in sys.modules
                        if self.pkgname in name.split('.')]

File Lib/test/test_posixpath.py

 import sys
 from posixpath import realpath, abspath, dirname, basename
 
+try:
+    import posix
+except ImportError:
+    posix = None
+
 # An absolute path to a temporary filename for testing. We can't rely on TESTFN
 # being an absolute path, so we need this.
 
 
     def test_islink(self):
         self.assertIs(posixpath.islink(support.TESTFN + "1"), False)
+        self.assertIs(posixpath.lexists(support.TESTFN + "2"), False)
         f = open(support.TESTFN + "1", "wb")
         try:
             f.write(b"foo")
 
     def test_ismount(self):
         self.assertIs(posixpath.ismount("/"), True)
+        self.assertIs(posixpath.ismount(b"/"), True)
+
+    def test_ismount_non_existent(self):
+        # Non-existent mountpoint.
+        self.assertIs(posixpath.ismount(ABSTFN), False)
+        try:
+            os.mkdir(ABSTFN)
+            self.assertIs(posixpath.ismount(ABSTFN), False)
+        finally:
+            safe_rmdir(ABSTFN)
+
+    @unittest.skipUnless(support.can_symlink(),
+                         "Test requires symlink support")
+    def test_ismount_symlinks(self):
+        # Symlinks are never mountpoints.
+        try:
+            os.symlink("/", ABSTFN)
+            self.assertIs(posixpath.ismount(ABSTFN), False)
+        finally:
+            os.unlink(ABSTFN)
+
+    @unittest.skipIf(posix is None, "Test requires posix module")
+    def test_ismount_different_device(self):
+        # Simulate the path being on a different device from its parent by
+        # mocking out st_dev.
+        save_lstat = os.lstat
+        def fake_lstat(path):
+            st_ino = 0
+            st_dev = 0
+            if path == ABSTFN:
+                st_dev = 1
+                st_ino = 1
+            return posix.stat_result((0, st_ino, st_dev, 0, 0, 0, 0, 0, 0, 0))
+        try:
+            os.lstat = fake_lstat
+            self.assertIs(posixpath.ismount(ABSTFN), True)
+        finally:
+            os.lstat = save_lstat
 
     def test_expanduser(self):
         self.assertEqual(posixpath.expanduser("foo"), "foo")
             with support.EnvironmentVarGuard() as env:
                 env['HOME'] = '/'
                 self.assertEqual(posixpath.expanduser("~"), "/")
+                # expanduser should fall back to using the password database
+                del env['HOME']
+                home = pwd.getpwuid(os.getuid()).pw_dir
+                self.assertEqual(posixpath.expanduser("~"), home)
 
     def test_normpath(self):
         self.assertEqual(posixpath.normpath(""), ".")
     @unittest.skipUnless(hasattr(os, "symlink"),
                          "Missing symlink implementation")
     @skip_if_ABSTFN_contains_backslash
+    def test_realpath_relative(self):
+        try:
+            os.symlink(posixpath.relpath(ABSTFN+"1"), ABSTFN)
+            self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
+        finally:
+            support.unlink(ABSTFN)
+
+    @unittest.skipUnless(hasattr(os, "symlink"),
+                         "Missing symlink implementation")
+    @skip_if_ABSTFN_contains_backslash
     def test_realpath_symlink_loops(self):
         # Bug #930024, return the path unchanged if we get into an infinite
         # symlink loop.
         finally:
             os.getcwdb = real_getcwdb
 
+    def test_sameopenfile(self):
+        fname = support.TESTFN + "1"
+        with open(fname, "wb") as a, open(fname, "wb") as b:
+            self.assertTrue(posixpath.sameopenfile(a.fileno(), b.fileno()))
+
 
 class PosixCommonTest(test_genericpath.CommonTest):
     pathmodule = posixpath

File Lib/test/test_print.py

 # A dispatch table all 8 combinations of providing
 #  sep, end, and file
 # I use this machinery so that I'm not just passing default
-#  values to print, I'm eiher passing or not passing in the
+#  values to print, I'm either passing or not passing in the
 #  arguments
 dispatch = {
     (False, False, False):

File Lib/test/test_shutil.py

 import stat
 import os
 import os.path
+import functools
 from test import support
 from test.support import TESTFN
 from os.path import splitdrive
 except ImportError:
     ZIP_SUPPORT = find_executable('zip')
 
+def _fake_rename(*args, **kwargs):
+    # Pretend the destination path is on a different filesystem.
+    raise OSError()
+
+def mock_rename(func):
+    @functools.wraps(func)
+    def wrap(*args, **kwargs):
+        try:
+            builtin_rename = os.rename
+            os.rename = _fake_rename
+            return func(*args, **kwargs)
+        finally:
+            os.rename = builtin_rename
+    return wrap
+
 class TestShutil(unittest.TestCase):
 
     def setUp(self):
         shutil.copytree(src_dir, dst_dir, symlinks=True)
         self.assertIn('test.txt', os.listdir(dst_dir))
 
+    def _copy_file(self, method):
+        fname = 'test.txt'
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, fname])
+        file1 = os.path.join(tmpdir, fname)
+        tmpdir2 = self.mkdtemp()
+        method(file1, tmpdir2)
+        file2 = os.path.join(tmpdir2, fname)
+        return (file1, file2)
+
+    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
+    def test_copy(self):
+        # Ensure that the copied file exists and has the same mode bits.
+        file1, file2 = self._copy_file(shutil.copy)
+        self.assertTrue(os.path.exists(file2))
+        self.assertEqual(os.stat(file1).st_mode, os.stat(file2).st_mode)
+
+    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
+    @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.utime')
+    def test_copy2(self):
+        # Ensure that the copied file exists and has the same mode and
+        # modification time bits.
+        file1, file2 = self._copy_file(shutil.copy2)
+        self.assertTrue(os.path.exists(file2))
+        file1_stat = os.stat(file1)
+        file2_stat = os.stat(file2)
+        self.assertEqual(file1_stat.st_mode, file2_stat.st_mode)
+        for attr in 'st_atime', 'st_mtime':
+            # The modification times may be truncated in the new file.
+            self.assertLessEqual(getattr(file1_stat, attr),
+                                 getattr(file2_stat, attr) + 1)
+        if hasattr(os, 'chflags') and hasattr(file1_stat, 'st_flags'):
+            self.assertEqual(getattr(file1_stat, 'st_flags'),
+                             getattr(file2_stat, 'st_flags'))
+
     @unittest.skipUnless(zlib, "requires zlib")
     def test_make_tarball(self):
         # creating something to tar
         self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
 
         tmpdir2 = self.mkdtemp()
+        # force shutil to create the directory
+        os.rmdir(tmpdir2)
         unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
                             "source and target should be on same drive")
 
         self.write_file([tmpdir, 'file2'], 'xxx')
 
         tmpdir2 = self.mkdtemp()
+        # force shutil to create the directory
+        os.rmdir(tmpdir2)
         base_name = os.path.join(tmpdir2, 'archive')
         _make_zipfile(base_name, tmpdir)
 
             diff = self._compare_dirs(tmpdir, tmpdir2)
             self.assertEqual(diff, [])
 
+            # and again, this time with the format specified
+            tmpdir3 = self.mkdtemp()
+            unpack_archive(filename, tmpdir3, format=format)
+            diff = self._compare_dirs(tmpdir, tmpdir3)
+            self.assertEqual(diff, [])
+        self.assertRaises(shutil.ReadError, unpack_archive, TESTFN)
+        self.assertRaises(ValueError, unpack_archive, TESTFN, format='xxx')
+
     def test_unpack_registery(self):
 
         formats = get_unpack_formats()
         self.dst_dir = tempfile.mkdtemp()
         self.src_file = os.path.join(self.src_dir, filename)
         self.dst_file = os.path.join(self.dst_dir, filename)
-        # Try to create a dir in the current directory, hoping that it is
-        # not located on the same filesystem as the system tmp dir.
-        try:
-            self.dir_other_fs = tempfile.mkdtemp(
-                dir=os.path.dirname(__file__))
-            self.file_other_fs = os.path.join(self.dir_other_fs,
-                filename)
-        except OSError:
-            self.dir_other_fs = None
         with open(self.src_file, "wb") as f:
             f.write(b"spam")
 
     def tearDown(self):
-        for d in (self.src_dir, self.dst_dir, self.dir_other_fs):
+        for d in (self.src_dir, self.dst_dir):
             try:
                 if d:
                     shutil.rmtree(d)
         # Move a file inside an existing dir on the same filesystem.
         self._check_move_file(self.src_file, self.dst_dir, self.dst_file)
 
+    @mock_rename
     def test_move_file_other_fs(self):
         # Move a file to an existing dir on another filesystem.
-        if not self.dir_other_fs:
-            # skip
-            return
-        self._check_move_file(self.src_file, self.file_other_fs,
-            self.file_other_fs)
+        self.test_move_file()
 
+    @mock_rename
     def test_move_file_to_dir_other_fs(self):
         # Move a file to another location on another filesystem.
-        if not self.dir_other_fs:
-            # skip
-            return
-        self._check_move_file(self.src_file, self.dir_other_fs,
-            self.file_other_fs)
+        self.test_move_file_to_dir()
 
     def test_move_dir(self):
         # Move a dir to another location on the same filesystem.
             except:
                 pass
 
+    @mock_rename
     def test_move_dir_other_fs(self):
         # Move a dir to another location on another filesystem.
-        if not self.dir_other_fs:
-            # skip
-            return
-        dst_dir = tempfile.mktemp(dir=self.dir_other_fs)
-        try:
-            self._check_move_dir(self.src_dir, dst_dir, dst_dir)
-        finally:
-            try:
-                shutil.rmtree(dst_dir)
-            except:
-                pass
+        self.test_move_dir()
 
     def test_move_dir_to_dir(self):
         # Move a dir inside an existing dir on the same filesystem.
         self._check_move_dir(self.src_dir, self.dst_dir,
             os.path.join(self.dst_dir, os.path.basename(self.src_dir)))
 
+    @mock_rename
     def test_move_dir_to_dir_other_fs(self):
         # Move a dir inside an existing dir on another filesystem.
-        if not self.dir_other_fs:
-            # skip
-            return
-        self._check_move_dir(self.src_dir, self.dir_other_fs,
-            os.path.join(self.dir_other_fs, os.path.basename(self.src_dir)))
+        self.test_move_dir_to_dir()
 
     def test_existing_file_inside_dest_dir(self):
         # A file with the same name inside the destination dir already exist