Radix cross Linux

The main Radix cross Linux repository contains the build scripts of packages, which have the most complete and common functionality for desktop machines

452 Commits   2 Branches   1 Tag
Index: Lib/distutils/command/install.py
===================================================================
--- Lib/distutils/command/install.py	(nonexistent)
+++ Lib/distutils/command/install.py	(revision 5)
@@ -0,0 +1,672 @@
+"""distutils.command.install
+
+Implements the Distutils 'install' command."""
+
+from distutils import log
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id$"
+
+import sys, os, string
+from types import *
+from distutils.core import Command
+from distutils.debug import DEBUG
+from distutils.sysconfig import get_config_vars
+from distutils.errors import DistutilsPlatformError
+from distutils.file_util import write_file
+from distutils.util import convert_path, subst_vars, change_root
+from distutils.util import get_platform
+from distutils.errors import DistutilsOptionError
+from site import USER_BASE
+from site import USER_SITE
+
+
+if sys.version < "2.2":
+    WINDOWS_SCHEME = {
+        'purelib': '$base',
+        'platlib': '$base',
+        'headers': '$base/Include/$dist_name',
+        'scripts': '$base/Scripts',
+        'data'   : '$base',
+    }
+else:
+    WINDOWS_SCHEME = {
+        'purelib': '$base/Lib/site-packages',
+        'platlib': '$base/Lib/site-packages',
+        'headers': '$base/Include/$dist_name',
+        'scripts': '$base/Scripts',
+        'data'   : '$base',
+    }
+
+INSTALL_SCHEMES = {
+    'unix_prefix': {
+        'purelib': '$base/lib32/python$py_version_short/site-packages',
+        'platlib': '$platbase/lib32/python$py_version_short/site-packages',
+        'headers': '$base/include/python$py_version_short/$dist_name',
+        'scripts': '$base/bin',
+        'data'   : '$base',
+        },
+    'unix_home': {
+        'purelib': '$base/lib32/python',
+        'platlib': '$base/lib32/python',
+        'headers': '$base/include/python/$dist_name',
+        'scripts': '$base/bin',
+        'data'   : '$base',
+        },
+    'unix_user': {
+        'purelib': '$usersite',
+        'platlib': '$usersite',
+        'headers': '$userbase/include/python$py_version_short/$dist_name',
+        'scripts': '$userbase/bin',
+        'data'   : '$userbase',
+        },
+    'nt': WINDOWS_SCHEME,
+    'nt_user': {
+        'purelib': '$usersite',
+        'platlib': '$usersite',
+        'headers': '$userbase/Python$py_version_nodot/Include/$dist_name',
+        'scripts': '$userbase/Scripts',
+        'data'   : '$userbase',
+        },
+    'os2': {
+        'purelib': '$base/Lib/site-packages',
+        'platlib': '$base/Lib/site-packages',
+        'headers': '$base/Include/$dist_name',
+        'scripts': '$base/Scripts',
+        'data'   : '$base',
+        },
+    'os2_home': {
+        'purelib': '$usersite',
+        'platlib': '$usersite',
+        'headers': '$userbase/include/python$py_version_short/$dist_name',
+        'scripts': '$userbase/bin',
+        'data'   : '$userbase',
+        },
+    }
+
+# The keys to an installation scheme; if any new types of files are to be
+# installed, be sure to add an entry to every installation scheme above,
+# and to SCHEME_KEYS here.
+SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
+
+
+class install (Command):
+
+    description = "install everything from build directory"
+
+    user_options = [
+        # Select installation scheme and set base director(y|ies)
+        ('prefix=', None,
+         "installation prefix"),
+        ('exec-prefix=', None,
+         "(Unix only) prefix for platform-specific files"),
+        ('home=', None,
+         "(Unix only) home directory to install under"),
+        ('user', None,
+         "install in user site-package '%s'" % USER_SITE),
+
+        # Or, just set the base director(y|ies)
+        ('install-base=', None,
+         "base installation directory (instead of --prefix or --home)"),
+        ('install-platbase=', None,
+         "base installation directory for platform-specific files " +
+         "(instead of --exec-prefix or --home)"),
+        ('root=', None,
+         "install everything relative to this alternate root directory"),
+
+        # Or, explicitly set the installation scheme
+        ('install-purelib=', None,
+         "installation directory for pure Python module distributions"),
+        ('install-platlib=', None,
+         "installation directory for non-pure module distributions"),
+        ('install-lib=', None,
+         "installation directory for all module distributions " +
+         "(overrides --install-purelib and --install-platlib)"),
+
+        ('install-headers=', None,
+         "installation directory for C/C++ headers"),
+        ('install-scripts=', None,
+         "installation directory for Python scripts"),
+        ('install-data=', None,
+         "installation directory for data files"),
+
+        # Byte-compilation options -- see install_lib.py for details, as
+        # these are duplicated from there (but only install_lib does
+        # anything with them).
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+
+        # Miscellaneous control options
+        ('force', 'f',
+         "force installation (overwrite any existing files)"),
+        ('skip-build', None,
+         "skip rebuilding everything (for testing/debugging)"),
+
+        # Where to install documentation (eventually!)
+        #('doc-format=', None, "format of documentation to generate"),
+        #('install-man=', None, "directory for Unix man pages"),
+        #('install-html=', None, "directory for HTML documentation"),
+        #('install-info=', None, "directory for GNU info files"),
+
+        ('record=', None,
+         "filename in which to record list of installed files"),
+        ]
+
+    boolean_options = ['compile', 'force', 'skip-build', 'user']
+    negative_opt = {'no-compile' : 'compile'}
+
+
+    def initialize_options (self):
+
+        # High-level options: these select both an installation base
+        # and scheme.
+        self.prefix = None
+        self.exec_prefix = None
+        self.home = None
+        self.user = 0
+
+        # These select only the installation base; it's up to the user to
+        # specify the installation scheme (currently, that means supplying
+        # the --install-{platlib,purelib,scripts,data} options).
+        self.install_base = None
+        self.install_platbase = None
+        self.root = None
+
+        # These options are the actual installation directories; if not
+        # supplied by the user, they are filled in using the installation
+        # scheme implied by prefix/exec-prefix/home and the contents of
+        # that installation scheme.
+        self.install_purelib = None     # for pure module distributions
+        self.install_platlib = None     # non-pure (dists w/ extensions)
+        self.install_headers = None     # for C/C++ headers
+        self.install_lib = None         # set to either purelib or platlib
+        self.install_scripts = None
+        self.install_data = None
+        self.install_userbase = USER_BASE
+        self.install_usersite = USER_SITE
+
+        self.compile = None
+        self.optimize = None
+
+        # These two are for putting non-packagized distributions into their
+        # own directory and creating a .pth file if it makes sense.
+        # 'extra_path' comes from the setup file; 'install_path_file' can
+        # be turned off if it makes no sense to install a .pth file.  (But
+        # better to install it uselessly than to guess wrong and not
+        # install it when it's necessary and would be used!)  Currently,
+        # 'install_path_file' is always true unless some outsider meddles
+        # with it.
+        self.extra_path = None
+        self.install_path_file = 1
+
+        # 'force' forces installation, even if target files are not
+        # out-of-date.  'skip_build' skips running the "build" command,
+        # handy if you know it's not necessary.  'warn_dir' (which is *not*
+        # a user option, it's just there so the bdist_* commands can turn
+        # it off) determines whether we warn about installing to a
+        # directory not in sys.path.
+        self.force = 0
+        self.skip_build = 0
+        self.warn_dir = 1
+
+        # These are only here as a conduit from the 'build' command to the
+        # 'install_*' commands that do the real work.  ('build_base' isn't
+        # actually used anywhere, but it might be useful in future.)  They
+        # are not user options, because if the user told the install
+        # command where the build directory is, that wouldn't affect the
+        # build command.
+        self.build_base = None
+        self.build_lib = None
+
+        # Not defined yet because we don't know anything about
+        # documentation yet.
+        #self.install_man = None
+        #self.install_html = None
+        #self.install_info = None
+
+        self.record = None
+
+
+    # -- Option finalizing methods -------------------------------------
+    # (This is rather more involved than for most commands,
+    # because this is where the policy for installing third-
+    # party Python modules on various platforms given a wide
+    # array of user input is decided.  Yes, it's quite complex!)
+
+    def finalize_options (self):
+
+        # This method (and its pliant slaves, like 'finalize_unix()',
+        # 'finalize_other()', and 'select_scheme()') is where the default
+        # installation directories for modules, extension modules, and
+        # anything else we care to install from a Python module
+        # distribution.  Thus, this code makes a pretty important policy
+        # statement about how third-party stuff is added to a Python
+        # installation!  Note that the actual work of installation is done
+        # by the relatively simple 'install_*' commands; they just take
+        # their orders from the installation directory options determined
+        # here.
+
+        # Check for errors/inconsistencies in the options; first, stuff
+        # that's wrong on any platform.
+
+        if ((self.prefix or self.exec_prefix or self.home) and
+            (self.install_base or self.install_platbase)):
+            raise DistutilsOptionError, \
+                  ("must supply either prefix/exec-prefix/home or " +
+                   "install-base/install-platbase -- not both")
+
+        if self.home and (self.prefix or self.exec_prefix):
+            raise DistutilsOptionError, \
+                  "must supply either home or prefix/exec-prefix -- not both"
+
+        if self.user and (self.prefix or self.exec_prefix or self.home or
+                self.install_base or self.install_platbase):
+            raise DistutilsOptionError("can't combine user with prefix, "
+                                       "exec_prefix/home, or install_(plat)base")
+
+        # Next, stuff that's wrong (or dubious) only on certain platforms.
+        if os.name != "posix":
+            if self.exec_prefix:
+                self.warn("exec-prefix option ignored on this platform")
+                self.exec_prefix = None
+
+        # Now the interesting logic -- so interesting that we farm it out
+        # to other methods.  The goal of these methods is to set the final
+        # values for the install_{lib,scripts,data,...}  options, using as
+        # input a heady brew of prefix, exec_prefix, home, install_base,
+        # install_platbase, user-supplied versions of
+        # install_{purelib,platlib,lib,scripts,data,...}, and the
+        # INSTALL_SCHEME dictionary above.  Phew!
+
+        self.dump_dirs("pre-finalize_{unix,other}")
+
+        if os.name == 'posix':
+            self.finalize_unix()
+        else:
+            self.finalize_other()
+
+        self.dump_dirs("post-finalize_{unix,other}()")
+
+        # Expand configuration variables, tilde, etc. in self.install_base
+        # and self.install_platbase -- that way, we can use $base or
+        # $platbase in the other installation directories and not worry
+        # about needing recursive variable expansion (shudder).
+
+        py_version = (string.split(sys.version))[0]
+        (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
+        self.config_vars = {'dist_name': self.distribution.get_name(),
+                            'dist_version': self.distribution.get_version(),
+                            'dist_fullname': self.distribution.get_fullname(),
+                            'py_version': py_version,
+                            'py_version_short': py_version[0:3],
+                            'py_version_nodot': py_version[0] + py_version[2],
+                            'sys_prefix': prefix,
+                            'prefix': prefix,
+                            'sys_exec_prefix': exec_prefix,
+                            'exec_prefix': exec_prefix,
+                            'userbase': self.install_userbase,
+                            'usersite': self.install_usersite,
+                           }
+        self.expand_basedirs()
+
+        self.dump_dirs("post-expand_basedirs()")
+
+        # Now define config vars for the base directories so we can expand
+        # everything else.
+        self.config_vars['base'] = self.install_base
+        self.config_vars['platbase'] = self.install_platbase
+
+        if DEBUG:
+            from pprint import pprint
+            print "config vars:"
+            pprint(self.config_vars)
+
+        # Expand "~" and configuration variables in the installation
+        # directories.
+        self.expand_dirs()
+
+        self.dump_dirs("post-expand_dirs()")
+
+        # Create directories in the home dir:
+        if self.user:
+            self.create_home_path()
+
+        # Pick the actual directory to install all modules to: either
+        # install_purelib or install_platlib, depending on whether this
+        # module distribution is pure or not.  Of course, if the user
+        # already specified install_lib, use their selection.
+        if self.install_lib is None:
+            if self.distribution.ext_modules: # has extensions: non-pure
+                self.install_lib = self.install_platlib
+            else:
+                self.install_lib = self.install_purelib
+
+
+        # Convert directories from Unix /-separated syntax to the local
+        # convention.
+        self.convert_paths('lib', 'purelib', 'platlib',
+                           'scripts', 'data', 'headers',
+                           'userbase', 'usersite')
+
+        # Well, we're not actually fully completely finalized yet: we still
+        # have to deal with 'extra_path', which is the hack for allowing
+        # non-packagized module distributions (hello, Numerical Python!) to
+        # get their own directories.
+        self.handle_extra_path()
+        self.install_libbase = self.install_lib # needed for .pth file
+        self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
+
+        # If a new root directory was supplied, make all the installation
+        # dirs relative to it.
+        if self.root is not None:
+            self.change_roots('libbase', 'lib', 'purelib', 'platlib',
+                              'scripts', 'data', 'headers')
+
+        self.dump_dirs("after prepending root")
+
+        # Find out the build directories, ie. where to install from.
+        self.set_undefined_options('build',
+                                   ('build_base', 'build_base'),
+                                   ('build_lib', 'build_lib'))
+
+        # Punt on doc directories for now -- after all, we're punting on
+        # documentation completely!
+
+    # finalize_options ()
+
+
+    def dump_dirs (self, msg):
+        if DEBUG:
+            from distutils.fancy_getopt import longopt_xlate
+            print msg + ":"
+            for opt in self.user_options:
+                opt_name = opt[0]
+                if opt_name[-1] == "=":
+                    opt_name = opt_name[0:-1]
+                if opt_name in self.negative_opt:
+                    opt_name = string.translate(self.negative_opt[opt_name],
+                                                longopt_xlate)
+                    val = not getattr(self, opt_name)
+                else:
+                    opt_name = string.translate(opt_name, longopt_xlate)
+                    val = getattr(self, opt_name)
+                print "  %s: %s" % (opt_name, val)
+
+
+    def finalize_unix (self):
+
+        if self.install_base is not None or self.install_platbase is not None:
+            if ((self.install_lib is None and
+                 self.install_purelib is None and
+                 self.install_platlib is None) or
+                self.install_headers is None or
+                self.install_scripts is None or
+                self.install_data is None):
+                raise DistutilsOptionError, \
+                      ("install-base or install-platbase supplied, but "
+                      "installation scheme is incomplete")
+            return
+
+        if self.user:
+            if self.install_userbase is None:
+                raise DistutilsPlatformError(
+                    "User base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme("unix_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("unix_home")
+        else:
+            if self.prefix is None:
+                if self.exec_prefix is not None:
+                    raise DistutilsOptionError, \
+                          "must not supply exec-prefix without prefix"
+
+                self.prefix = os.path.normpath(sys.prefix)
+                self.exec_prefix = os.path.normpath(sys.exec_prefix)
+
+            else:
+                if self.exec_prefix is None:
+                    self.exec_prefix = self.prefix
+
+            self.install_base = self.prefix
+            self.install_platbase = self.exec_prefix
+            self.select_scheme("unix_prefix")
+
+    # finalize_unix ()
+
+
+    def finalize_other (self):          # Windows and Mac OS for now
+
+        if self.user:
+            if self.install_userbase is None:
+                raise DistutilsPlatformError(
+                    "User base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme(os.name + "_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("unix_home")
+        else:
+            if self.prefix is None:
+                self.prefix = os.path.normpath(sys.prefix)
+
+            self.install_base = self.install_platbase = self.prefix
+            try:
+                self.select_scheme(os.name)
+            except KeyError:
+                raise DistutilsPlatformError, \
+                      "I don't know how to install stuff on '%s'" % os.name
+
+    # finalize_other ()
+
+
+    def select_scheme (self, name):
+        # it's the caller's problem if they supply a bad name!
+        scheme = INSTALL_SCHEMES[name]
+        for key in SCHEME_KEYS:
+            attrname = 'install_' + key
+            if getattr(self, attrname) is None:
+                setattr(self, attrname, scheme[key])
+
+
+    def _expand_attrs (self, attrs):
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                if os.name == 'posix' or os.name == 'nt':
+                    val = os.path.expanduser(val)
+                val = subst_vars(val, self.config_vars)
+                setattr(self, attr, val)
+
+
+    def expand_basedirs (self):
+        self._expand_attrs(['install_base',
+                            'install_platbase',
+                            'root'])
+
+    def expand_dirs (self):
+        self._expand_attrs(['install_purelib',
+                            'install_platlib',
+                            'install_lib',
+                            'install_headers',
+                            'install_scripts',
+                            'install_data',])
+
+
+    def convert_paths (self, *names):
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, convert_path(getattr(self, attr)))
+
+
+    def handle_extra_path (self):
+
+        if self.extra_path is None:
+            self.extra_path = self.distribution.extra_path
+
+        if self.extra_path is not None:
+            if type(self.extra_path) is StringType:
+                self.extra_path = string.split(self.extra_path, ',')
+
+            if len(self.extra_path) == 1:
+                path_file = extra_dirs = self.extra_path[0]
+            elif len(self.extra_path) == 2:
+                (path_file, extra_dirs) = self.extra_path
+            else:
+                raise DistutilsOptionError, \
+                      ("'extra_path' option must be a list, tuple, or "
+                      "comma-separated string with 1 or 2 elements")
+
+            # convert to local form in case Unix notation used (as it
+            # should be in setup scripts)
+            extra_dirs = convert_path(extra_dirs)
+
+        else:
+            path_file = None
+            extra_dirs = ''
+
+        # XXX should we warn if path_file and not extra_dirs? (in which
+        # case the path file would be harmless but pointless)
+        self.path_file = path_file
+        self.extra_dirs = extra_dirs
+
+    # handle_extra_path ()
+
+
+    def change_roots (self, *names):
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, change_root(self.root, getattr(self, attr)))
+
+    def create_home_path(self):
+        """Create directories under ~
+        """
+        if not self.user:
+            return
+        home = convert_path(os.path.expanduser("~"))
+        for name, path in self.config_vars.iteritems():
+            if path.startswith(home) and not os.path.isdir(path):
+                self.debug_print("os.makedirs('%s', 0700)" % path)
+                os.makedirs(path, 0700)
+
+    # -- Command execution methods -------------------------------------
+
+    def run (self):
+
+        # Obviously have to build before we can install
+        if not self.skip_build:
+            self.run_command('build')
+            # If we built for any other platform, we can't install.
+            build_plat = self.distribution.get_command_obj('build').plat_name
+            # check warn_dir - it is a clue that the 'install' is happening
+            # internally, and not to sys.path, so we don't check the platform
+            # matches what we are running.
+            if self.warn_dir and build_plat != get_platform():
+                raise DistutilsPlatformError("Can't install when "
+                                             "cross-compiling")
+
+        # Run all sub-commands (at least those that need to be run)
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        if self.path_file:
+            self.create_path_file()
+
+        # write list of installed files, if requested.
+        if self.record:
+            outputs = self.get_outputs()
+            if self.root:               # strip any package prefix
+                root_len = len(self.root)
+                for counter in xrange(len(outputs)):
+                    outputs[counter] = outputs[counter][root_len:]
+            self.execute(write_file,
+                         (self.record, outputs),
+                         "writing list of installed files to '%s'" %
+                         self.record)
+
+        sys_path = map(os.path.normpath, sys.path)
+        sys_path = map(os.path.normcase, sys_path)
+        install_lib = os.path.normcase(os.path.normpath(self.install_lib))
+        if (self.warn_dir and
+            not (self.path_file and self.install_path_file) and
+            install_lib not in sys_path):
+            log.debug(("modules installed to '%s', which is not in "
+                       "Python's module search path (sys.path) -- "
+                       "you'll have to change the search path yourself"),
+                       self.install_lib)
+
+    # run ()
+
+    def create_path_file (self):
+        filename = os.path.join(self.install_libbase,
+                                self.path_file + ".pth")
+        if self.install_path_file:
+            self.execute(write_file,
+                         (filename, [self.extra_dirs]),
+                         "creating %s" % filename)
+        else:
+            self.warn("path file '%s' not created" % filename)
+
+
+    # -- Reporting methods ---------------------------------------------
+
+    def get_outputs (self):
+        # Assemble the outputs of all the sub-commands.
+        outputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            # Add the contents of cmd.get_outputs(), ensuring
+            # that outputs doesn't contain duplicate entries
+            for filename in cmd.get_outputs():
+                if filename not in outputs:
+                    outputs.append(filename)
+
+        if self.path_file and self.install_path_file:
+            outputs.append(os.path.join(self.install_libbase,
+                                        self.path_file + ".pth"))
+
+        return outputs
+
+    def get_inputs (self):
+        # XXX gee, this looks familiar ;-(
+        inputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            inputs.extend(cmd.get_inputs())
+
+        return inputs
+
+
+    # -- Predicates for sub-command list -------------------------------
+
+    def has_lib (self):
+        """Return true if the current distribution has any Python
+        modules to install."""
+        return (self.distribution.has_pure_modules() or
+                self.distribution.has_ext_modules())
+
+    def has_headers (self):
+        return self.distribution.has_headers()
+
+    def has_scripts (self):
+        return self.distribution.has_scripts()
+
+    def has_data (self):
+        return self.distribution.has_data_files()
+
+
+    # 'sub_commands': a list of commands this command might have to run to
+    # get its work done.  See cmd.py for more info.
+    sub_commands = [('install_lib',     has_lib),
+                    ('install_headers', has_headers),
+                    ('install_scripts', has_scripts),
+                    ('install_data',    has_data),
+                    ('install_egg_info', lambda self:True),
+                   ]
+
+# class install
Index: Lib/distutils/command
===================================================================
--- Lib/distutils/command	(nonexistent)
+++ Lib/distutils/command	(revision 5)

Property changes on: Lib/distutils/command
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,73 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~
Index: Lib/distutils/sysconfig.py
===================================================================
--- Lib/distutils/sysconfig.py	(nonexistent)
+++ Lib/distutils/sysconfig.py	(revision 5)
@@ -0,0 +1,493 @@
+"""Provide access to Python's configuration information.  The specific
+configuration variables available depend heavily on the platform and
+configuration.  The values may be retrieved using
+get_config_var(name), and the list of variables is available via
+get_config_vars().keys().  Additional convenience functions are also
+available.
+
+Written by:   Fred L. Drake, Jr.
+Email:        <fdrake@acm.org>
+"""
+
+__revision__ = "$Id$"
+
+import os
+import re
+import string
+import sys
+
+from distutils.errors import DistutilsPlatformError
+
+# These are needed in a couple of spots, so just compute them once.
+PREFIX = os.path.normpath(sys.prefix)
+EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
+
+# Path to the base directory of the project. On Windows the binary may
+# live in project/PCBuild9.  If we're dealing with an x64 Windows build,
+# it'll live in project/PCbuild/amd64.
+if sys.executable:
+    project_base = os.path.dirname(os.path.abspath(sys.executable))
+else:
+    # sys.executable can be empty if argv[0] has been changed and Python is
+    # unable to retrieve the real program name
+    project_base = os.getcwd()
+if os.name == "nt" and "pcbuild" in project_base[-8:].lower():
+    project_base = os.path.abspath(os.path.join(project_base, os.path.pardir))
+# PC/VS7.1
+if os.name == "nt" and "\\pc\\v" in project_base[-10:].lower():
+    project_base = os.path.abspath(os.path.join(project_base, os.path.pardir,
+                                                os.path.pardir))
+# PC/AMD64
+if os.name == "nt" and "\\pcbuild\\amd64" in project_base[-14:].lower():
+    project_base = os.path.abspath(os.path.join(project_base, os.path.pardir,
+                                                os.path.pardir))
+
+# set for cross builds
+if "_PYTHON_PROJECT_BASE" in os.environ:
+    # this is the build directory, at least for posix
+    project_base = os.path.normpath(os.environ["_PYTHON_PROJECT_BASE"])
+
+# python_build: (Boolean) if true, we're either building Python or
+# building an extension with an un-installed Python, so we use
+# different (hard-wired) directories.
+# Setup.local is available for Makefile builds including VPATH builds,
+# Setup.dist is available on Windows
+def _python_build():
+    for fn in ("Setup.dist", "Setup.local"):
+        if os.path.isfile(os.path.join(project_base, "Modules", fn)):
+            return True
+    return False
+python_build = _python_build()
+
+
+def get_python_version():
+    """Return a string containing the major and minor Python version,
+    leaving off the patchlevel.  Sample return values could be '1.5'
+    or '2.2'.
+    """
+    return sys.version[:3]
+
+
+def get_python_inc(plat_specific=0, prefix=None):
+    """Return the directory containing installed Python header files.
+
+    If 'plat_specific' is false (the default), this is the path to the
+    non-platform-specific header files, i.e. Python.h and so on;
+    otherwise, this is the path to platform-specific header files
+    (namely pyconfig.h).
+
+    If 'prefix' is supplied, use it instead of sys.prefix or
+    sys.exec_prefix -- i.e., ignore 'plat_specific'.
+    """
+    if prefix is None:
+        prefix = plat_specific and EXEC_PREFIX or PREFIX
+
+    if os.name == "posix":
+        if python_build:
+            if sys.executable:
+                buildir = os.path.dirname(sys.executable)
+            else:
+                # sys.executable can be empty if argv[0] has been changed
+                # and Python is unable to retrieve the real program name
+                buildir = os.getcwd()
+            if plat_specific:
+                # python.h is located in the buildir
+                inc_dir = buildir
+            else:
+                # the source dir is relative to the buildir
+                srcdir = os.path.abspath(os.path.join(buildir,
+                                         get_config_var('srcdir')))
+                # Include is located in the srcdir
+                inc_dir = os.path.join(srcdir, "Include")
+            return inc_dir
+        return os.path.join(prefix, "include", "python" + get_python_version())
+    elif os.name == "nt":
+        return os.path.join(prefix, "include")
+    elif os.name == "os2":
+        return os.path.join(prefix, "Include")
+    else:
+        raise DistutilsPlatformError(
+            "I don't know where Python installs its C header files "
+            "on platform '%s'" % os.name)
+
+
+def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+    """Return the directory containing the Python library (standard or
+    site additions).
+
+    If 'plat_specific' is true, return the directory containing
+    platform-specific modules, i.e. any module from a non-pure-Python
+    module distribution; otherwise, return the platform-shared library
+    directory.  If 'standard_lib' is true, return the directory
+    containing standard Python library modules; otherwise, return the
+    directory for site-specific modules.
+
+    If 'prefix' is supplied, use it instead of sys.prefix or
+    sys.exec_prefix -- i.e., ignore 'plat_specific'.
+    """
+    if prefix is None:
+        prefix = plat_specific and EXEC_PREFIX or PREFIX
+
+    if os.name == "posix":
+        libpython = os.path.join(prefix,
+                                 "lib32", "python" + get_python_version())
+        if standard_lib:
+            return libpython
+        else:
+            return os.path.join(libpython, "site-packages")
+
+    elif os.name == "nt":
+        if standard_lib:
+            return os.path.join(prefix, "Lib")
+        else:
+            if get_python_version() < "2.2":
+                return prefix
+            else:
+                return os.path.join(prefix, "Lib", "site-packages")
+
+    elif os.name == "os2":
+        if standard_lib:
+            return os.path.join(prefix, "Lib")
+        else:
+            return os.path.join(prefix, "Lib", "site-packages")
+
+    else:
+        raise DistutilsPlatformError(
+            "I don't know where Python installs its library "
+            "on platform '%s'" % os.name)
+
+
+
+def customize_compiler(compiler):
+    """Do any platform-specific customization of a CCompiler instance.
+
+    Mainly needed on Unix, so we can plug in the information that
+    varies across Unices and is stored in Python's Makefile.
+    """
+    if compiler.compiler_type == "unix":
+        if sys.platform == "darwin":
+            # Perform first-time customization of compiler-related
+            # config vars on OS X now that we know we need a compiler.
+            # This is primarily to support Pythons from binary
+            # installers.  The kind and paths to build tools on
+            # the user system may vary significantly from the system
+            # that Python itself was built on.  Also the user OS
+            # version and build tools may not support the same set
+            # of CPU architectures for universal builds.
+            global _config_vars
+            # Use get_config_var() to ensure _config_vars is initialized.
+            if not get_config_var('CUSTOMIZED_OSX_COMPILER'):
+                import _osx_support
+                _osx_support.customize_compiler(_config_vars)
+                _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
+
+        (cc, cxx, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \
+            get_config_vars('CC', 'CXX', 'CFLAGS',
+                            'CCSHARED', 'LDSHARED', 'SO', 'AR',
+                            'ARFLAGS')
+
+        if 'CC' in os.environ:
+            newcc = os.environ['CC']
+            if (sys.platform == 'darwin'
+                    and 'LDSHARED' not in os.environ
+                    and ldshared.startswith(cc)):
+                # On OS X, if CC is overridden, use that as the default
+                #       command for LDSHARED as well
+                ldshared = newcc + ldshared[len(cc):]
+            cc = newcc
+        if 'CXX' in os.environ:
+            cxx = os.environ['CXX']
+        if 'LDSHARED' in os.environ:
+            ldshared = os.environ['LDSHARED']
+        if 'CPP' in os.environ:
+            cpp = os.environ['CPP']
+        else:
+            cpp = cc + " -E"           # not always
+        if 'LDFLAGS' in os.environ:
+            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
+        if 'CFLAGS' in os.environ:
+            cflags = cflags + ' ' + os.environ['CFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CFLAGS']
+        if 'CPPFLAGS' in os.environ:
+            cpp = cpp + ' ' + os.environ['CPPFLAGS']
+            cflags = cflags + ' ' + os.environ['CPPFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+        if 'AR' in os.environ:
+            ar = os.environ['AR']
+        if 'ARFLAGS' in os.environ:
+            archiver = ar + ' ' + os.environ['ARFLAGS']
+        else:
+            archiver = ar + ' ' + ar_flags
+
+        cc_cmd = cc + ' ' + cflags
+        compiler.set_executables(
+            preprocessor=cpp,
+            compiler=cc_cmd,
+            compiler_so=cc_cmd + ' ' + ccshared,
+            compiler_cxx=cxx,
+            linker_so=ldshared,
+            linker_exe=cc,
+            archiver=archiver)
+
+        compiler.shared_lib_extension = so_ext
+
+
+def get_config_h_filename():
+    """Return full pathname of installed pyconfig.h file."""
+    if python_build:
+        if os.name == "nt":
+            inc_dir = os.path.join(project_base, "PC")
+        else:
+            inc_dir = project_base
+    else:
+        inc_dir = get_python_inc(plat_specific=1)
+    if get_python_version() < '2.2':
+        config_h = 'config.h'
+    else:
+        # The name of the config.h file changed in 2.2
+        config_h = 'pyconfig.h'
+    return os.path.join(inc_dir, config_h)
+
+
+def get_makefile_filename():
+    """Return full pathname of installed Makefile from the Python build."""
+    if python_build:
+        return os.path.join(project_base, "Makefile")
+    lib_dir = get_python_lib(plat_specific=1, standard_lib=1)
+    return os.path.join(lib_dir, "config", "Makefile")
+
+
+def parse_config_h(fp, g=None):
+    """Parse a config.h-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    if g is None:
+        g = {}
+    define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
+    undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
+    #
+    while 1:
+        line = fp.readline()
+        if not line:
+            break
+        m = define_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            try: v = int(v)
+            except ValueError: pass
+            g[n] = v
+        else:
+            m = undef_rx.match(line)
+            if m:
+                g[m.group(1)] = 0
+    return g
+
+
+# Regexes needed for parsing Makefile (and similar syntaxes,
+# like old-style Setup files).
+_variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
+_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
+_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
+
+def parse_makefile(fn, g=None):
+    """Parse a Makefile-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    from distutils.text_file import TextFile
+    fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1)
+
+    if g is None:
+        g = {}
+    done = {}
+    notdone = {}
+
+    while 1:
+        line = fp.readline()
+        if line is None:  # eof
+            break
+        m = _variable_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            v = v.strip()
+            # `$$' is a literal `$' in make
+            tmpv = v.replace('$$', '')
+
+            if "$" in tmpv:
+                notdone[n] = v
+            else:
+                try:
+                    v = int(v)
+                except ValueError:
+                    # insert literal `$'
+                    done[n] = v.replace('$$', '$')
+                else:
+                    done[n] = v
+
+    # do variable interpolation here
+    while notdone:
+        for name in notdone.keys():
+            value = notdone[name]
+            m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
+            if m:
+                n = m.group(1)
+                found = True
+                if n in done:
+                    item = str(done[n])
+                elif n in notdone:
+                    # get it on a subsequent round
+                    found = False
+                elif n in os.environ:
+                    # do it like make: fall back to environment
+                    item = os.environ[n]
+                else:
+                    done[n] = item = ""
+                if found:
+                    after = value[m.end():]
+                    value = value[:m.start()] + item + after
+                    if "$" in after:
+                        notdone[name] = value
+                    else:
+                        try: value = int(value)
+                        except ValueError:
+                            done[name] = value.strip()
+                        else:
+                            done[name] = value
+                        del notdone[name]
+            else:
+                # bogus variable reference; just drop it since we can't deal
+                del notdone[name]
+
+    fp.close()
+
+    # strip spurious spaces
+    for k, v in done.items():
+        if isinstance(v, str):
+            done[k] = v.strip()
+
+    # save the results in the global dictionary
+    g.update(done)
+    return g
+
+
+def expand_makefile_vars(s, vars):
+    """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
+    'string' according to 'vars' (a dictionary mapping variable names to
+    values).  Variables not present in 'vars' are silently expanded to the
+    empty string.  The variable values in 'vars' should not contain further
+    variable expansions; if 'vars' is the output of 'parse_makefile()',
+    you're fine.  Returns a variable-expanded version of 's'.
+    """
+
+    # This algorithm does multiple expansion, so if vars['foo'] contains
+    # "${bar}", it will expand ${foo} to ${bar}, and then expand
+    # ${bar}... and so forth.  This is fine as long as 'vars' comes from
+    # 'parse_makefile()', which takes care of such expansions eagerly,
+    # according to make's variable expansion semantics.
+
+    while 1:
+        m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
+        if m:
+            (beg, end) = m.span()
+            s = s[0:beg] + vars.get(m.group(1)) + s[end:]
+        else:
+            break
+    return s
+
+
+_config_vars = None
+
+def _init_posix():
+    """Initialize the module as appropriate for POSIX systems."""
+    # _sysconfigdata is generated at build time, see the sysconfig module
+    from _sysconfigdata import build_time_vars
+    global _config_vars
+    _config_vars = {}
+    _config_vars.update(build_time_vars)
+
+
+def _init_nt():
+    """Initialize the module as appropriate for NT"""
+    g = {}
+    # set basic install directories
+    g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
+    g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
+
+    # XXX hmmm.. a normal install puts include files here
+    g['INCLUDEPY'] = get_python_inc(plat_specific=0)
+
+    g['SO'] = '.pyd'
+    g['EXE'] = ".exe"
+    g['VERSION'] = get_python_version().replace(".", "")
+    g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable))
+
+    global _config_vars
+    _config_vars = g
+
+
+def _init_os2():
+    """Initialize the module as appropriate for OS/2"""
+    g = {}
+    # set basic install directories
+    g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
+    g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
+
+    # XXX hmmm.. a normal install puts include files here
+    g['INCLUDEPY'] = get_python_inc(plat_specific=0)
+
+    g['SO'] = '.pyd'
+    g['EXE'] = ".exe"
+
+    global _config_vars
+    _config_vars = g
+
+
+def get_config_vars(*args):
+    """With no arguments, return a dictionary of all configuration
+    variables relevant for the current platform.  Generally this includes
+    everything needed to build extensions and install both pure modules and
+    extensions.  On Unix, this means every variable defined in Python's
+    installed Makefile; on Windows and Mac OS it's a much smaller set.
+
+    With arguments, return a list of values that result from looking up
+    each argument in the configuration variable dictionary.
+    """
+    global _config_vars
+    if _config_vars is None:
+        func = globals().get("_init_" + os.name)
+        if func:
+            func()
+        else:
+            _config_vars = {}
+
+        # Normalized versions of prefix and exec_prefix are handy to have;
+        # in fact, these are the standard versions used most places in the
+        # Distutils.
+        _config_vars['prefix'] = PREFIX
+        _config_vars['exec_prefix'] = EXEC_PREFIX
+
+        # OS X platforms require special customization to handle
+        # multi-architecture, multi-os-version installers
+        if sys.platform == 'darwin':
+            import _osx_support
+            _osx_support.customize_config_vars(_config_vars)
+
+    if args:
+        vals = []
+        for name in args:
+            vals.append(_config_vars.get(name))
+        return vals
+    else:
+        return _config_vars
+
+def get_config_var(name):
+    """Return the value of a single variable using the dictionary
+    returned by 'get_config_vars()'.  Equivalent to
+    get_config_vars().get(name)
+    """
+    return get_config_vars().get(name)
Index: Lib/distutils
===================================================================
--- Lib/distutils	(nonexistent)
+++ Lib/distutils	(revision 5)

Property changes on: Lib/distutils
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,73 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~
Index: Lib/site.py
===================================================================
--- Lib/site.py	(nonexistent)
+++ Lib/site.py	(revision 5)
@@ -0,0 +1,591 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code.  Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path.  On
+Unix (including Mac OSX), it starts with sys.prefix and
+sys.exec_prefix (if different) and appends
+lib/python<version>/site-packages as well as lib/site-python.
+On other platforms (such as Windows), it tries each of the
+prefixes directly, as well as with lib/site-packages appended.  The
+resulting directories, if they exist, are appended to sys.path, and
+also inspected for path configuration files.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path.  Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once.  Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.5/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth.  Assume foo.pth contains the
+following:
+
+  # foo package configuration
+  foo
+  bar
+  bletch
+
+and bar.pth contains:
+
+  # bar package configuration
+  bar
+
+Then the following directories are added to sys.path, in this order:
+
+  /usr/local/lib/python2.5/site-packages/bar
+  /usr/local/lib/python2.5/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations.  If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+import __builtin__
+import traceback
+
+# Prefixes for site-packages; add additional prefixes like /usr/local here
+PREFIXES = [sys.prefix, sys.exec_prefix]
+# Enable per user site-packages directory
+# set it to False to disable the feature or True to force the feature
+ENABLE_USER_SITE = None
+
+# for distutils.commands.install
+# These values are initialized by the getuserbase() and getusersitepackages()
+# functions, through the main() function when Python starts.
+USER_SITE = None
+USER_BASE = None
+
+
+def makepath(*paths):
+    dir = os.path.join(*paths)
+    try:
+        dir = os.path.abspath(dir)
+    except OSError:
+        pass
+    return dir, os.path.normcase(dir)
+
+
+def abs__file__():
+    """Set all module' __file__ attribute to an absolute path"""
+    for m in sys.modules.values():
+        if hasattr(m, '__loader__'):
+            continue   # don't mess with a PEP 302-supplied __file__
+        try:
+            m.__file__ = os.path.abspath(m.__file__)
+        except (AttributeError, OSError):
+            pass
+
+
+def removeduppaths():
+    """ Remove duplicate entries from sys.path along with making them
+    absolute"""
+    # This ensures that the initial path provided by the interpreter contains
+    # only absolute pathnames, even if we're running from the build directory.
+    L = []
+    known_paths = set()
+    for dir in sys.path:
+        # Filter out duplicate paths (on case-insensitive file systems also
+        # if they only differ in case); turn relative paths into absolute
+        # paths.
+        dir, dircase = makepath(dir)
+        if not dircase in known_paths:
+            L.append(dir)
+            known_paths.add(dircase)
+    sys.path[:] = L
+    return known_paths
+
+
+def _init_pathinfo():
+    """Return a set containing all existing directory entries from sys.path"""
+    d = set()
+    for dir in sys.path:
+        try:
+            if os.path.isdir(dir):
+                dir, dircase = makepath(dir)
+                d.add(dircase)
+        except TypeError:
+            continue
+    return d
+
+
+def addpackage(sitedir, name, known_paths):
+    """Process a .pth file within the site-packages directory:
+       For each line in the file, either combine it with sitedir to a path
+       and add that to known_paths, or execute it if it starts with 'import '.
+    """
+    if known_paths is None:
+        _init_pathinfo()
+        reset = 1
+    else:
+        reset = 0
+    fullname = os.path.join(sitedir, name)
+    try:
+        f = open(fullname, "rU")
+    except IOError:
+        return
+    with f:
+        for n, line in enumerate(f):
+            if line.startswith("#"):
+                continue
+            try:
+                if line.startswith(("import ", "import\t")):
+                    exec line
+                    continue
+                line = line.rstrip()
+                dir, dircase = makepath(sitedir, line)
+                if not dircase in known_paths and os.path.exists(dir):
+                    sys.path.append(dir)
+                    known_paths.add(dircase)
+            except Exception as err:
+                print >>sys.stderr, "Error processing line {:d} of {}:\n".format(
+                    n+1, fullname)
+                for record in traceback.format_exception(*sys.exc_info()):
+                    for line in record.splitlines():
+                        print >>sys.stderr, '  '+line
+                print >>sys.stderr, "\nRemainder of file ignored"
+                break
+    if reset:
+        known_paths = None
+    return known_paths
+
+
+def addsitedir(sitedir, known_paths=None):
+    """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+    'sitedir'"""
+    if known_paths is None:
+        known_paths = _init_pathinfo()
+        reset = 1
+    else:
+        reset = 0
+    sitedir, sitedircase = makepath(sitedir)
+    if not sitedircase in known_paths:
+        sys.path.append(sitedir)        # Add path component
+    try:
+        names = os.listdir(sitedir)
+    except os.error:
+        return
+    dotpth = os.extsep + "pth"
+    names = [name for name in names if name.endswith(dotpth)]
+    for name in sorted(names):
+        addpackage(sitedir, name, known_paths)
+    if reset:
+        known_paths = None
+    return known_paths
+
+
+def check_enableusersite():
+    """Check if user site directory is safe for inclusion
+
+    The function tests for the command line flag (including environment var),
+    process uid/gid equal to effective uid/gid.
+
+    None: Disabled for security reasons
+    False: Disabled by user (command line option)
+    True: Safe and enabled
+    """
+    if sys.flags.no_user_site:
+        return False
+
+    if hasattr(os, "getuid") and hasattr(os, "geteuid"):
+        # check process uid == effective uid
+        if os.geteuid() != os.getuid():
+            return None
+    if hasattr(os, "getgid") and hasattr(os, "getegid"):
+        # check process gid == effective gid
+        if os.getegid() != os.getgid():
+            return None
+
+    return True
+
+def getuserbase():
+    """Returns the `user base` directory path.
+
+    The `user base` directory can be used to store data. If the global
+    variable ``USER_BASE`` is not initialized yet, this function will also set
+    it.
+    """
+    global USER_BASE
+    if USER_BASE is not None:
+        return USER_BASE
+    from sysconfig import get_config_var
+    USER_BASE = get_config_var('userbase')
+    return USER_BASE
+
+def getusersitepackages():
+    """Returns the user-specific site-packages directory path.
+
+    If the global variable ``USER_SITE`` is not initialized yet, this
+    function will also set it.
+    """
+    global USER_SITE
+    user_base = getuserbase() # this will also set USER_BASE
+
+    if USER_SITE is not None:
+        return USER_SITE
+
+    from sysconfig import get_path
+    import os
+
+    if sys.platform == 'darwin':
+        from sysconfig import get_config_var
+        if get_config_var('PYTHONFRAMEWORK'):
+            USER_SITE = get_path('purelib', 'osx_framework_user')
+            return USER_SITE
+
+    USER_SITE = get_path('purelib', '%s_user' % os.name)
+    return USER_SITE
+
+def addusersitepackages(known_paths):
+    """Add a per user site-package to sys.path
+
+    Each user has its own python directory with site-packages in the
+    home directory.
+    """
+    # get the per user site-package path
+    # this call will also make sure USER_BASE and USER_SITE are set
+    user_site = getusersitepackages()
+
+    if ENABLE_USER_SITE and os.path.isdir(user_site):
+        addsitedir(user_site, known_paths)
+    return known_paths
+
+def getsitepackages():
+    """Returns a list containing all global site-packages directories
+    (and possibly site-python).
+
+    For each directory present in the global ``PREFIXES``, this function
+    will find its `site-packages` subdirectory depending on the system
+    environment, and will return a list of full paths.
+    """
+    sitepackages = []
+    seen = set()
+
+    for prefix in PREFIXES:
+        if not prefix or prefix in seen:
+            continue
+        seen.add(prefix)
+
+        if sys.platform in ('os2emx', 'riscos'):
+            sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
+        elif os.sep == '/':
+            sitepackages.append(os.path.join(prefix, "lib32",
+                                        "python" + sys.version[:3],
+                                        "site-packages"))
+            sitepackages.append(os.path.join(prefix, "lib", "site-python"))
+        else:
+            sitepackages.append(prefix)
+            sitepackages.append(os.path.join(prefix, "lib32", "site-packages"))
+    return sitepackages
+
+def addsitepackages(known_paths):
+    """Add site-packages (and possibly site-python) to sys.path"""
+    for sitedir in getsitepackages():
+        if os.path.isdir(sitedir):
+            addsitedir(sitedir, known_paths)
+
+    return known_paths
+
+def setBEGINLIBPATH():
+    """The OS/2 EMX port has optional extension modules that do double duty
+    as DLLs (and must use the .DLL file extension) for other extensions.
+    The library search path needs to be amended so these will be found
+    during module import.  Use BEGINLIBPATH so that these are at the start
+    of the library search path.
+
+    """
+    dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+    libpath = os.environ['BEGINLIBPATH'].split(';')
+    if libpath[-1]:
+        libpath.append(dllpath)
+    else:
+        libpath[-1] = dllpath
+    os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+    """Define new builtins 'quit' and 'exit'.
+
+    These are objects which make the interpreter exit when called.
+    The repr of each object contains a hint at how it works.
+
+    """
+    if os.sep == ':':
+        eof = 'Cmd-Q'
+    elif os.sep == '\\':
+        eof = 'Ctrl-Z plus Return'
+    else:
+        eof = 'Ctrl-D (i.e. EOF)'
+
+    class Quitter(object):
+        def __init__(self, name):
+            self.name = name
+        def __repr__(self):
+            return 'Use %s() or %s to exit' % (self.name, eof)
+        def __call__(self, code=None):
+            # Shells like IDLE catch the SystemExit, but listen when their
+            # stdin wrapper is closed.
+            try:
+                sys.stdin.close()
+            except:
+                pass
+            raise SystemExit(code)
+    __builtin__.quit = Quitter('quit')
+    __builtin__.exit = Quitter('exit')
+
+
+class _Printer(object):
+    """interactive prompt objects for printing the license text, a list of
+    contributors and the copyright notice."""
+
+    MAXLINES = 23
+
+    def __init__(self, name, data, files=(), dirs=()):
+        self.__name = name
+        self.__data = data
+        self.__files = files
+        self.__dirs = dirs
+        self.__lines = None
+
+    def __setup(self):
+        if self.__lines:
+            return
+        data = None
+        for dir in self.__dirs:
+            for filename in self.__files:
+                filename = os.path.join(dir, filename)
+                try:
+                    fp = file(filename, "rU")
+                    data = fp.read()
+                    fp.close()
+                    break
+                except IOError:
+                    pass
+            if data:
+                break
+        if not data:
+            data = self.__data
+        self.__lines = data.split('\n')
+        self.__linecnt = len(self.__lines)
+
+    def __repr__(self):
+        self.__setup()
+        if len(self.__lines) <= self.MAXLINES:
+            return "\n".join(self.__lines)
+        else:
+            return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+    def __call__(self):
+        self.__setup()
+        prompt = 'Hit Return for more, or q (and Return) to quit: '
+        lineno = 0
+        while 1:
+            try:
+                for i in range(lineno, lineno + self.MAXLINES):
+                    print self.__lines[i]
+            except IndexError:
+                break
+            else:
+                lineno += self.MAXLINES
+                key = None
+                while key is None:
+                    key = raw_input(prompt)
+                    if key not in ('', 'q'):
+                        key = None
+                if key == 'q':
+                    break
+
+def setcopyright():
+    """Set 'copyright' and 'credits' in __builtin__"""
+    __builtin__.copyright = _Printer("copyright", sys.copyright)
+    if sys.platform[:4] == 'java':
+        __builtin__.credits = _Printer(
+            "credits",
+            "Jython is maintained by the Jython developers (www.jython.org).")
+    else:
+        __builtin__.credits = _Printer("credits", """\
+    Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+    for supporting Python development.  See www.python.org for more information.""")
+    here = os.path.dirname(os.__file__)
+    __builtin__.license = _Printer(
+        "license", "See https://www.python.org/psf/license/",
+        ["LICENSE.txt", "LICENSE"],
+        [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+    """Define the builtin 'help'.
+    This is a wrapper around pydoc.help (with a twist).
+
+    """
+
+    def __repr__(self):
+        return "Type help() for interactive help, " \
+               "or help(object) for help about object."
+    def __call__(self, *args, **kwds):
+        import pydoc
+        return pydoc.help(*args, **kwds)
+
+def sethelper():
+    __builtin__.help = _Helper()
+
+def aliasmbcs():
+    """On Windows, some default encodings are not provided by Python,
+    while they are always available as "mbcs" in each locale. Make
+    them usable by aliasing to "mbcs" in such a case."""
+    if sys.platform == 'win32':
+        import locale, codecs
+        enc = locale.getdefaultlocale()[1]
+        if enc.startswith('cp'):            # "cp***" ?
+            try:
+                codecs.lookup(enc)
+            except LookupError:
+                import encodings
+                encodings._cache[enc] = encodings._unknown
+                encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+    """Set the string encoding used by the Unicode implementation.  The
+    default is 'ascii', but if you're willing to experiment, you can
+    change this."""
+    encoding = "ascii" # Default value set by _PyUnicode_Init()
+    if 0:
+        # Enable to support locale aware default string encodings.
+        import locale
+        loc = locale.getdefaultlocale()
+        if loc[1]:
+            encoding = loc[1]
+    if 0:
+        # Enable to switch off string to Unicode coercion and implicit
+        # Unicode to string conversion.
+        encoding = "undefined"
+    if encoding != "ascii":
+        # On Non-Unicode builds this will raise an AttributeError...
+        sys.setdefaultencoding(encoding) # Needs Python Unicode build !
+
+
+def execsitecustomize():
+    """Run custom site specific code, if available."""
+    try:
+        import sitecustomize
+    except ImportError:
+        pass
+    except Exception:
+        if sys.flags.verbose:
+            sys.excepthook(*sys.exc_info())
+        else:
+            print >>sys.stderr, \
+                "'import sitecustomize' failed; use -v for traceback"
+
+
+def execusercustomize():
+    """Run custom user specific code, if available."""
+    try:
+        import usercustomize
+    except ImportError:
+        pass
+    except Exception:
+        if sys.flags.verbose:
+            sys.excepthook(*sys.exc_info())
+        else:
+            print>>sys.stderr, \
+                "'import usercustomize' failed; use -v for traceback"
+
+
+def main():
+    global ENABLE_USER_SITE
+
+    abs__file__()
+    known_paths = removeduppaths()
+    if ENABLE_USER_SITE is None:
+        ENABLE_USER_SITE = check_enableusersite()
+    known_paths = addusersitepackages(known_paths)
+    known_paths = addsitepackages(known_paths)
+    if sys.platform == 'os2emx':
+        setBEGINLIBPATH()
+    setquit()
+    setcopyright()
+    sethelper()
+    aliasmbcs()
+    setencoding()
+    execsitecustomize()
+    if ENABLE_USER_SITE:
+        execusercustomize()
+    # Remove sys.setdefaultencoding() so that users cannot change the
+    # encoding after initialization.  The test for presence is needed when
+    # this module is run as a script, because this code is executed twice.
+    if hasattr(sys, "setdefaultencoding"):
+        del sys.setdefaultencoding
+
+main()
+
+def _script():
+    help = """\
+    %s [--user-base] [--user-site]
+
+    Without arguments print some useful information
+    With arguments print the value of USER_BASE and/or USER_SITE separated
+    by '%s'.
+
+    Exit codes with --user-base or --user-site:
+      0 - user site directory is enabled
+      1 - user site directory is disabled by user
+      2 - uses site directory is disabled by super user
+          or for security reasons
+     >2 - unknown error
+    """
+    args = sys.argv[1:]
+    if not args:
+        print "sys.path = ["
+        for dir in sys.path:
+            print "    %r," % (dir,)
+        print "]"
+        print "USER_BASE: %r (%s)" % (USER_BASE,
+            "exists" if os.path.isdir(USER_BASE) else "doesn't exist")
+        print "USER_SITE: %r (%s)" % (USER_SITE,
+            "exists" if os.path.isdir(USER_SITE) else "doesn't exist")
+        print "ENABLE_USER_SITE: %r" %  ENABLE_USER_SITE
+        sys.exit(0)
+
+    buffer = []
+    if '--user-base' in args:
+        buffer.append(USER_BASE)
+    if '--user-site' in args:
+        buffer.append(USER_SITE)
+
+    if buffer:
+        print os.pathsep.join(buffer)
+        if ENABLE_USER_SITE:
+            sys.exit(0)
+        elif ENABLE_USER_SITE is False:
+            sys.exit(1)
+        elif ENABLE_USER_SITE is None:
+            sys.exit(2)
+        else:
+            sys.exit(3)
+    else:
+        import textwrap
+        print textwrap.dedent(help % (sys.argv[0], os.pathsep))
+        sys.exit(10)
+
+if __name__ == '__main__':
+    _script()
Index: Lib/sysconfig.py
===================================================================
--- Lib/sysconfig.py	(nonexistent)
+++ Lib/sysconfig.py	(revision 5)
@@ -0,0 +1,645 @@
+"""Provide access to Python's configuration information.
+
+"""
+import sys
+import os
+from os.path import pardir, realpath
+
+_INSTALL_SCHEMES = {
+    'posix_prefix': {
+        'stdlib': '{base}/lib32/python{py_version_short}',
+        'platstdlib': '{platbase}/lib32/python{py_version_short}',
+        'purelib': '{base}/lib32/python{py_version_short}/site-packages',
+        'platlib': '{platbase}/lib32/python{py_version_short}/site-packages',
+        'include': '{base}/include/python{py_version_short}',
+        'platinclude': '{platbase}/include/python{py_version_short}',
+        'scripts': '{base}/bin',
+        'data': '{base}',
+        },
+    'posix_home': {
+        'stdlib': '{base}/lib32/python',
+        'platstdlib': '{base}/lib32/python',
+        'purelib': '{base}/lib32/python',
+        'platlib': '{base}/lib32/python',
+        'include': '{base}/include/python',
+        'platinclude': '{base}/include/python',
+        'scripts': '{base}/bin',
+        'data'   : '{base}',
+        },
+    'nt': {
+        'stdlib': '{base}/Lib',
+        'platstdlib': '{base}/Lib',
+        'purelib': '{base}/Lib/site-packages',
+        'platlib': '{base}/Lib/site-packages',
+        'include': '{base}/Include',
+        'platinclude': '{base}/Include',
+        'scripts': '{base}/Scripts',
+        'data'   : '{base}',
+        },
+    'os2': {
+        'stdlib': '{base}/Lib',
+        'platstdlib': '{base}/Lib',
+        'purelib': '{base}/Lib/site-packages',
+        'platlib': '{base}/Lib/site-packages',
+        'include': '{base}/Include',
+        'platinclude': '{base}/Include',
+        'scripts': '{base}/Scripts',
+        'data'   : '{base}',
+        },
+    'os2_home': {
+        'stdlib': '{userbase}/lib/python{py_version_short}',
+        'platstdlib': '{userbase}/lib/python{py_version_short}',
+        'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
+        'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
+        'include': '{userbase}/include/python{py_version_short}',
+        'scripts': '{userbase}/bin',
+        'data'   : '{userbase}',
+        },
+    'nt_user': {
+        'stdlib': '{userbase}/Python{py_version_nodot}',
+        'platstdlib': '{userbase}/Python{py_version_nodot}',
+        'purelib': '{userbase}/Python{py_version_nodot}/site-packages',
+        'platlib': '{userbase}/Python{py_version_nodot}/site-packages',
+        'include': '{userbase}/Python{py_version_nodot}/Include',
+        'scripts': '{userbase}/Scripts',
+        'data'   : '{userbase}',
+        },
+    'posix_user': {
+        'stdlib': '{userbase}/lib32/python{py_version_short}',
+        'platstdlib': '{userbase}/lib32/python{py_version_short}',
+        'purelib': '{userbase}/lib32/python{py_version_short}/site-packages',
+        'platlib': '{userbase}/lib32/python{py_version_short}/site-packages',
+        'include': '{userbase}/include/python{py_version_short}',
+        'scripts': '{userbase}/bin',
+        'data'   : '{userbase}',
+        },
+    'osx_framework_user': {
+        'stdlib': '{userbase}/lib/python',
+        'platstdlib': '{userbase}/lib/python',
+        'purelib': '{userbase}/lib/python/site-packages',
+        'platlib': '{userbase}/lib/python/site-packages',
+        'include': '{userbase}/include',
+        'scripts': '{userbase}/bin',
+        'data'   : '{userbase}',
+        },
+    }
+
+_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
+                'scripts', 'data')
+_PY_VERSION = sys.version.split()[0]
+_PY_VERSION_SHORT = sys.version[:3]
+_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
+_PREFIX = os.path.normpath(sys.prefix)
+_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
+_CONFIG_VARS = None
+_USER_BASE = None
+
+def _safe_realpath(path):
+    try:
+        return realpath(path)
+    except OSError:
+        return path
+
+if sys.executable:
+    _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
+else:
+    # sys.executable can be empty if argv[0] has been changed and Python is
+    # unable to retrieve the real program name
+    _PROJECT_BASE = _safe_realpath(os.getcwd())
+
+if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
+    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
+# PC/VS7.1
+if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
+    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
+# PC/VS9.0/amd64
+if (os.name == "nt"
+   and os.path.basename(os.path.dirname(os.path.dirname(_PROJECT_BASE))).lower() == "pc"
+   and os.path.basename(os.path.dirname(_PROJECT_BASE)).lower() == "vs9.0"):
+    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir, pardir))
+# PC/AMD64
+if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
+    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
+
+# set for cross builds
+if "_PYTHON_PROJECT_BASE" in os.environ:
+    # the build directory for posix builds
+    _PROJECT_BASE = os.path.normpath(os.path.abspath("."))
+def is_python_build():
+    for fn in ("Setup.dist", "Setup.local"):
+        if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
+            return True
+    return False
+
+_PYTHON_BUILD = is_python_build()
+
+if _PYTHON_BUILD:
+    for scheme in ('posix_prefix', 'posix_home'):
+        _INSTALL_SCHEMES[scheme]['include'] = '{projectbase}/Include'
+        _INSTALL_SCHEMES[scheme]['platinclude'] = '{srcdir}'
+
+def _subst_vars(s, local_vars):
+    try:
+        return s.format(**local_vars)
+    except KeyError:
+        try:
+            return s.format(**os.environ)
+        except KeyError, var:
+            raise AttributeError('{%s}' % var)
+
+def _extend_dict(target_dict, other_dict):
+    target_keys = target_dict.keys()
+    for key, value in other_dict.items():
+        if key in target_keys:
+            continue
+        target_dict[key] = value
+
+def _expand_vars(scheme, vars):
+    res = {}
+    if vars is None:
+        vars = {}
+    _extend_dict(vars, get_config_vars())
+
+    for key, value in _INSTALL_SCHEMES[scheme].items():
+        if os.name in ('posix', 'nt'):
+            value = os.path.expanduser(value)
+        res[key] = os.path.normpath(_subst_vars(value, vars))
+    return res
+
+def _get_default_scheme():
+    if os.name == 'posix':
+        # the default scheme for posix is posix_prefix
+        return 'posix_prefix'
+    return os.name
+
+def _getuserbase():
+    env_base = os.environ.get("PYTHONUSERBASE", None)
+    def joinuser(*args):
+        return os.path.expanduser(os.path.join(*args))
+
+    # what about 'os2emx', 'riscos' ?
+    if os.name == "nt":
+        base = os.environ.get("APPDATA") or "~"
+        return env_base if env_base else joinuser(base, "Python")
+
+    if sys.platform == "darwin":
+        framework = get_config_var("PYTHONFRAMEWORK")
+        if framework:
+            return env_base if env_base else \
+                               joinuser("~", "Library", framework, "%d.%d"
+                                            % (sys.version_info[:2]))
+
+    return env_base if env_base else joinuser("~", ".local")
+
+
+def _parse_makefile(filename, vars=None):
+    """Parse a Makefile-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    import re
+    # Regexes needed for parsing Makefile (and similar syntaxes,
+    # like old-style Setup files).
+    _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
+    _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
+    _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
+
+    if vars is None:
+        vars = {}
+    done = {}
+    notdone = {}
+
+    with open(filename) as f:
+        lines = f.readlines()
+
+    for line in lines:
+        if line.startswith('#') or line.strip() == '':
+            continue
+        m = _variable_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            v = v.strip()
+            # `$$' is a literal `$' in make
+            tmpv = v.replace('$$', '')
+
+            if "$" in tmpv:
+                notdone[n] = v
+            else:
+                try:
+                    v = int(v)
+                except ValueError:
+                    # insert literal `$'
+                    done[n] = v.replace('$$', '$')
+                else:
+                    done[n] = v
+
+    # do variable interpolation here
+    while notdone:
+        for name in notdone.keys():
+            value = notdone[name]
+            m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
+            if m:
+                n = m.group(1)
+                found = True
+                if n in done:
+                    item = str(done[n])
+                elif n in notdone:
+                    # get it on a subsequent round
+                    found = False
+                elif n in os.environ:
+                    # do it like make: fall back to environment
+                    item = os.environ[n]
+                else:
+                    done[n] = item = ""
+                if found:
+                    after = value[m.end():]
+                    value = value[:m.start()] + item + after
+                    if "$" in after:
+                        notdone[name] = value
+                    else:
+                        try: value = int(value)
+                        except ValueError:
+                            done[name] = value.strip()
+                        else:
+                            done[name] = value
+                        del notdone[name]
+            else:
+                # bogus variable reference; just drop it since we can't deal
+                del notdone[name]
+    # strip spurious spaces
+    for k, v in done.items():
+        if isinstance(v, str):
+            done[k] = v.strip()
+
+    # save the results in the global dictionary
+    vars.update(done)
+    return vars
+
+
+def get_makefile_filename():
+    """Return the path of the Makefile."""
+    if _PYTHON_BUILD:
+        return os.path.join(_PROJECT_BASE, "Makefile")
+    return os.path.join(get_path('platstdlib'), "config", "Makefile")
+
+# Issue #22199: retain undocumented private name for compatibility
+_get_makefile_filename = get_makefile_filename
+
+def _generate_posix_vars():
+    """Generate the Python module containing build-time variables."""
+    import pprint
+    vars = {}
+    # load the installed Makefile:
+    makefile = get_makefile_filename()
+    try:
+        _parse_makefile(makefile, vars)
+    except IOError, e:
+        msg = "invalid Python installation: unable to open %s" % makefile
+        if hasattr(e, "strerror"):
+            msg = msg + " (%s)" % e.strerror
+        raise IOError(msg)
+
+    # load the installed pyconfig.h:
+    config_h = get_config_h_filename()
+    try:
+        with open(config_h) as f:
+            parse_config_h(f, vars)
+    except IOError, e:
+        msg = "invalid Python installation: unable to open %s" % config_h
+        if hasattr(e, "strerror"):
+            msg = msg + " (%s)" % e.strerror
+        raise IOError(msg)
+
+    # On AIX, there are wrong paths to the linker scripts in the Makefile
+    # -- these paths are relative to the Python source, but when installed
+    # the scripts are in another directory.
+    if _PYTHON_BUILD:
+        vars['LDSHARED'] = vars['BLDSHARED']
+
+    # There's a chicken-and-egg situation on OS X with regards to the
+    # _sysconfigdata module after the changes introduced by #15298:
+    # get_config_vars() is called by get_platform() as part of the
+    # `make pybuilddir.txt` target -- which is a precursor to the
+    # _sysconfigdata.py module being constructed.  Unfortunately,
+    # get_config_vars() eventually calls _init_posix(), which attempts
+    # to import _sysconfigdata, which we won't have built yet.  In order
+    # for _init_posix() to work, if we're on Darwin, just mock up the
+    # _sysconfigdata module manually and populate it with the build vars.
+    # This is more than sufficient for ensuring the subsequent call to
+    # get_platform() succeeds.
+    name = '_sysconfigdata'
+    if 'darwin' in sys.platform:
+        import imp
+        module = imp.new_module(name)
+        module.build_time_vars = vars
+        sys.modules[name] = module
+
+    pybuilddir = 'build/lib.%s-%s' % (get_platform(), sys.version[:3])
+    if hasattr(sys, "gettotalrefcount"):
+        pybuilddir += '-pydebug'
+    try:
+        os.makedirs(pybuilddir)
+    except OSError:
+        pass
+    destfile = os.path.join(pybuilddir, name + '.py')
+
+    with open(destfile, 'wb') as f:
+        f.write('# system configuration generated and used by'
+                ' the sysconfig module\n')
+        f.write('build_time_vars = ')
+        pprint.pprint(vars, stream=f)
+
+    # Create file used for sys.path fixup -- see Modules/getpath.c
+    with open('pybuilddir.txt', 'w') as f:
+        f.write(pybuilddir)
+
+def _init_posix(vars):
+    """Initialize the module as appropriate for POSIX systems."""
+    # _sysconfigdata is generated at build time, see _generate_posix_vars()
+    from _sysconfigdata import build_time_vars
+    vars.update(build_time_vars)
+
+def _init_non_posix(vars):
+    """Initialize the module as appropriate for NT"""
+    # set basic install directories
+    vars['LIBDEST'] = get_path('stdlib')
+    vars['BINLIBDEST'] = get_path('platstdlib')
+    vars['INCLUDEPY'] = get_path('include')
+    vars['SO'] = '.pyd'
+    vars['EXE'] = '.exe'
+    vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
+    vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
+
+#
+# public APIs
+#
+
+
+def parse_config_h(fp, vars=None):
+    """Parse a config.h-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    import re
+    if vars is None:
+        vars = {}
+    define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
+    undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
+
+    while True:
+        line = fp.readline()
+        if not line:
+            break
+        m = define_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            try: v = int(v)
+            except ValueError: pass
+            vars[n] = v
+        else:
+            m = undef_rx.match(line)
+            if m:
+                vars[m.group(1)] = 0
+    return vars
+
+def get_config_h_filename():
+    """Returns the path of pyconfig.h."""
+    if _PYTHON_BUILD:
+        if os.name == "nt":
+            inc_dir = os.path.join(_PROJECT_BASE, "PC")
+        else:
+            inc_dir = _PROJECT_BASE
+    else:
+        inc_dir = get_path('platinclude')
+    return os.path.join(inc_dir, 'pyconfig.h')
+
+def get_scheme_names():
+    """Returns a tuple containing the schemes names."""
+    schemes = _INSTALL_SCHEMES.keys()
+    schemes.sort()
+    return tuple(schemes)
+
+def get_path_names():
+    """Returns a tuple containing the paths names."""
+    return _SCHEME_KEYS
+
+def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
+    """Returns a mapping containing an install scheme.
+
+    ``scheme`` is the install scheme name. If not provided, it will
+    return the default scheme for the current platform.
+    """
+    if expand:
+        return _expand_vars(scheme, vars)
+    else:
+        return _INSTALL_SCHEMES[scheme]
+
+def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
+    """Returns a path corresponding to the scheme.
+
+    ``scheme`` is the install scheme name.
+    """
+    return get_paths(scheme, vars, expand)[name]
+
+def get_config_vars(*args):
+    """With no arguments, return a dictionary of all configuration
+    variables relevant for the current platform.
+
+    On Unix, this means every variable defined in Python's installed Makefile;
+    On Windows and Mac OS it's a much smaller set.
+
+    With arguments, return a list of values that result from looking up
+    each argument in the configuration variable dictionary.
+    """
+    import re
+    global _CONFIG_VARS
+    if _CONFIG_VARS is None:
+        _CONFIG_VARS = {}
+        # Normalized versions of prefix and exec_prefix are handy to have;
+        # in fact, these are the standard versions used most places in the
+        # Distutils.
+        _CONFIG_VARS['prefix'] = _PREFIX
+        _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
+        _CONFIG_VARS['py_version'] = _PY_VERSION
+        _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
+        _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
+        _CONFIG_VARS['base'] = _PREFIX
+        _CONFIG_VARS['platbase'] = _EXEC_PREFIX
+        _CONFIG_VARS['projectbase'] = _PROJECT_BASE
+
+        if os.name in ('nt', 'os2'):
+            _init_non_posix(_CONFIG_VARS)
+        if os.name == 'posix':
+            _init_posix(_CONFIG_VARS)
+
+        # Setting 'userbase' is done below the call to the
+        # init function to enable using 'get_config_var' in
+        # the init-function.
+        _CONFIG_VARS['userbase'] = _getuserbase()
+
+        if 'srcdir' not in _CONFIG_VARS:
+            _CONFIG_VARS['srcdir'] = _PROJECT_BASE
+
+        # Convert srcdir into an absolute path if it appears necessary.
+        # Normally it is relative to the build directory.  However, during
+        # testing, for example, we might be running a non-installed python
+        # from a different directory.
+        if _PYTHON_BUILD and os.name == "posix":
+            base = _PROJECT_BASE
+            try:
+                cwd = os.getcwd()
+            except OSError:
+                cwd = None
+            if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
+                base != cwd):
+                # srcdir is relative and we are not in the same directory
+                # as the executable. Assume executable is in the build
+                # directory and make srcdir absolute.
+                srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
+                _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
+
+        # OS X platforms require special customization to handle
+        # multi-architecture, multi-os-version installers
+        if sys.platform == 'darwin':
+            import _osx_support
+            _osx_support.customize_config_vars(_CONFIG_VARS)
+
+    if args:
+        vals = []
+        for name in args:
+            vals.append(_CONFIG_VARS.get(name))
+        return vals
+    else:
+        return _CONFIG_VARS
+
+def get_config_var(name):
+    """Return the value of a single variable using the dictionary returned by
+    'get_config_vars()'.
+
+    Equivalent to get_config_vars().get(name)
+    """
+    return get_config_vars().get(name)
+
+def get_platform():
+    """Return a string that identifies the current platform.
+
+    This is used mainly to distinguish platform-specific build directories and
+    platform-specific built distributions.  Typically includes the OS name
+    and version and the architecture (as supplied by 'os.uname()'),
+    although the exact information included depends on the OS; eg. for IRIX
+    the architecture isn't particularly important (IRIX only runs on SGI
+    hardware), but for Linux the kernel version isn't particularly
+    important.
+
+    Examples of returned values:
+       linux-i586
+       linux-alpha (?)
+       solaris-2.6-sun4u
+       irix-5.3
+       irix64-6.2
+
+    Windows will return one of:
+       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+       win-ia64 (64bit Windows on Itanium)
+       win32 (all others - specifically, sys.platform is returned)
+
+    For other non-POSIX platforms, currently just returns 'sys.platform'.
+    """
+    import re
+    if os.name == 'nt':
+        # sniff sys.version for architecture.
+        prefix = " bit ("
+        i = sys.version.find(prefix)
+        if i == -1:
+            return sys.platform
+        j = sys.version.find(")", i)
+        look = sys.version[i+len(prefix):j].lower()
+        if look == 'amd64':
+            return 'win-amd64'
+        if look == 'itanium':
+            return 'win-ia64'
+        return sys.platform
+
+    # Set for cross builds explicitly
+    if "_PYTHON_HOST_PLATFORM" in os.environ:
+        return os.environ["_PYTHON_HOST_PLATFORM"]
+
+    if os.name != "posix" or not hasattr(os, 'uname'):
+        # XXX what about the architecture? NT is Intel or Alpha,
+        # Mac OS is M68k or PPC, etc.
+        return sys.platform
+
+    # Try to distinguish various flavours of Unix
+    osname, host, release, version, machine = os.uname()
+
+    # Convert the OS name to lowercase, remove '/' characters
+    # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
+    osname = osname.lower().replace('/', '')
+    machine = machine.replace(' ', '_')
+    machine = machine.replace('/', '-')
+
+    if osname[:5] == "linux":
+        # At least on Linux/Intel, 'machine' is the processor --
+        # i386, etc.
+        # XXX what about Alpha, SPARC, etc?
+        return  "%s-%s" % (osname, machine)
+    elif osname[:5] == "sunos":
+        if release[0] >= "5":           # SunOS 5 == Solaris 2
+            osname = "solaris"
+            release = "%d.%s" % (int(release[0]) - 3, release[2:])
+            # We can't use "platform.architecture()[0]" because a
+            # bootstrap problem. We use a dict to get an error
+            # if some suspicious happens.
+            bitness = {2147483647:"32bit", 9223372036854775807:"64bit"}
+            machine += ".%s" % bitness[sys.maxint]
+        # fall through to standard osname-release-machine representation
+    elif osname[:4] == "irix":              # could be "irix64"!
+        return "%s-%s" % (osname, release)
+    elif osname[:3] == "aix":
+        return "%s-%s.%s" % (osname, version, release)
+    elif osname[:6] == "cygwin":
+        osname = "cygwin"
+        rel_re = re.compile (r'[\d.]+')
+        m = rel_re.match(release)
+        if m:
+            release = m.group()
+    elif osname[:6] == "darwin":
+        import _osx_support
+        osname, release, machine = _osx_support.get_platform_osx(
+                                            get_config_vars(),
+                                            osname, release, machine)
+
+    return "%s-%s-%s" % (osname, release, machine)
+
+
+def get_python_version():
+    return _PY_VERSION_SHORT
+
+
+def _print_dict(title, data):
+    for index, (key, value) in enumerate(sorted(data.items())):
+        if index == 0:
+            print '%s: ' % (title)
+        print '\t%s = "%s"' % (key, value)
+
+
+def _main():
+    """Display all information sysconfig detains."""
+    if '--generate-posix-vars' in sys.argv:
+        _generate_posix_vars()
+        return
+    print 'Platform: "%s"' % get_platform()
+    print 'Python version: "%s"' % get_python_version()
+    print 'Current installation scheme: "%s"' % _get_default_scheme()
+    print
+    _print_dict('Paths', get_paths())
+    print
+    _print_dict('Variables', get_config_vars())
+
+
+if __name__ == '__main__':
+    _main()
Index: Lib
===================================================================
--- Lib	(nonexistent)
+++ Lib	(revision 5)

Property changes on: Lib
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,73 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~
Index: Makefile.pre.in
===================================================================
--- Makefile.pre.in	(nonexistent)
+++ Makefile.pre.in	(revision 5)
@@ -0,0 +1,1498 @@
+# Top-level Makefile for Python
+#
+# As distributed, this file is called Makefile.pre.in; it is processed
+# into the real Makefile by running the script ./configure, which
+# replaces things like @spam@ with values appropriate for your system.
+# This means that if you edit Makefile, your changes get lost the next
+# time you run the configure script.  Ideally, you can do:
+#
+#	./configure
+#	make
+#	make test
+#	make install
+#
+# If you have a previous version of Python installed that you don't
+# want to overwrite, you can use "make altinstall" instead of "make
+# install".  Refer to the "Installing" section in the README file for
+# additional details.
+#
+# See also the section "Build instructions" in the README file.
+
+# === Variables set by makesetup ===
+
+MODOBJS=        _MODOBJS_
+MODLIBS=        _MODLIBS_
+
+# === Variables set by configure
+VERSION=	@VERSION@
+srcdir=		@srcdir@
+VPATH=		@srcdir@
+abs_srcdir=	@abs_srcdir@
+abs_builddir=	@abs_builddir@
+build=		@build@
+host=		@host@
+
+CC=		@CC@
+CXX=		@CXX@
+MAINCC=		@MAINCC@
+LINKCC=		@LINKCC@
+AR=		@AR@
+RANLIB=		@RANLIB@
+GITVERSION=	@GITVERSION@
+GITTAG=		@GITTAG@
+GITBRANCH=	@GITBRANCH@
+PGO_PROF_GEN_FLAG=@PGO_PROF_GEN_FLAG@
+PGO_PROF_USE_FLAG=@PGO_PROF_USE_FLAG@
+LLVM_PROF_MERGER=@LLVM_PROF_MERGER@
+LLVM_PROF_FILE=@LLVM_PROF_FILE@
+LLVM_PROF_ERR=@LLVM_PROF_ERR@
+
+GNULD=          @GNULD@
+
+# Shell used by make (some versions default to the login shell, which is bad)
+SHELL=		/bin/sh
+
+# Use this to make a link between python$(VERSION) and python in $(BINDIR)
+LN=		@LN@
+
+# Portable install script (configure doesn't always guess right)
+INSTALL=	@INSTALL@
+INSTALL_PROGRAM=@INSTALL_PROGRAM@
+INSTALL_SCRIPT= @INSTALL_SCRIPT@
+INSTALL_DATA=	@INSTALL_DATA@
+# Shared libraries must be installed with executable mode on some systems;
+# rather than figuring out exactly which, we always give them executable mode.
+# Also, making them read-only seems to be a good idea...
+INSTALL_SHARED= ${INSTALL} -m 555
+
+MKDIR_P=	@MKDIR_P@
+
+MAKESETUP=      $(srcdir)/Modules/makesetup
+
+# Compiler options
+OPT=		@OPT@
+BASECFLAGS=	@BASECFLAGS@
+CFLAGS=		$(BASECFLAGS) @CFLAGS@ $(OPT) $(EXTRA_CFLAGS)
+# Both CPPFLAGS and LDFLAGS need to contain the shell's value for setup.py to
+# be able to build extension modules using the directories specified in the
+# environment variables
+CPPFLAGS=	-I. -IInclude -I$(srcdir)/Include @CPPFLAGS@
+LDFLAGS=	@LDFLAGS@
+LDLAST=		@LDLAST@
+SGI_ABI=	@SGI_ABI@
+CCSHARED=	@CCSHARED@
+LINKFORSHARED=	@LINKFORSHARED@
+ARFLAGS=	@ARFLAGS@
+# Extra C flags added for building the interpreter object files.
+CFLAGSFORSHARED=@CFLAGSFORSHARED@
+# C flags used for building the interpreter object files
+PY_CFLAGS=	$(CFLAGS) $(CPPFLAGS) $(CFLAGSFORSHARED) -DPy_BUILD_CORE
+
+
+# Machine-dependent subdirectories
+MACHDEP=	@MACHDEP@
+
+# Multiarch directory (may be empty)
+MULTIARCH=	@MULTIARCH@
+
+# Install prefix for architecture-independent files
+prefix=		@prefix@
+
+# Install prefix for architecture-dependent files
+exec_prefix=	@exec_prefix@
+
+# Install prefix for data files
+datarootdir=    @datarootdir@
+
+# Expanded directories
+BINDIR=		@bindir@
+LIBDIR=		@libdir@
+MANDIR=		@mandir@
+INCLUDEDIR=	@includedir@
+CONFINCLUDEDIR=	$(exec_prefix)/include
+SCRIPTDIR=	$(prefix)/lib32
+
+# Detailed destination directories
+BINLIBDEST=	$(LIBDIR)/python$(VERSION)
+LIBDEST=	$(SCRIPTDIR)/python$(VERSION)
+INCLUDEPY=	$(INCLUDEDIR)/python$(VERSION)
+CONFINCLUDEPY=	$(CONFINCLUDEDIR)/python$(VERSION)
+LIBP=		$(LIBDIR)/python$(VERSION)
+
+# Symbols used for using shared libraries
+SO=		@SO@
+LDSHARED=	@LDSHARED@ $(LDFLAGS)
+BLDSHARED=	@BLDSHARED@ $(LDFLAGS)
+LDCXXSHARED=	@LDCXXSHARED@
+DESTSHARED=	$(BINLIBDEST)/lib-dynload
+
+# Executable suffix (.exe on Windows and Mac OS X)
+EXE=		@EXEEXT@
+BUILDEXE=	@BUILDEXEEXT@
+
+# Short name and location for Mac OS X Python framework
+UNIVERSALSDK=@UNIVERSALSDK@
+PYTHONFRAMEWORK=	@PYTHONFRAMEWORK@
+PYTHONFRAMEWORKDIR=	@PYTHONFRAMEWORKDIR@
+PYTHONFRAMEWORKPREFIX=	@PYTHONFRAMEWORKPREFIX@
+PYTHONFRAMEWORKINSTALLDIR= @PYTHONFRAMEWORKINSTALLDIR@
+# Deployment target selected during configure, to be checked
+# by distutils. The export statement is needed to ensure that the
+# deployment target is active during build.
+MACOSX_DEPLOYMENT_TARGET=@CONFIGURE_MACOSX_DEPLOYMENT_TARGET@
+@EXPORT_MACOSX_DEPLOYMENT_TARGET@export MACOSX_DEPLOYMENT_TARGET
+
+# Options to enable prebinding (for fast startup prior to Mac OS X 10.3)
+OTHER_LIBTOOL_OPT=@OTHER_LIBTOOL_OPT@
+
+# Environment to run shared python without installed libraries
+RUNSHARED=       @RUNSHARED@
+
+# ensurepip options
+ENSUREPIP=      @ENSUREPIP@
+
+# Modes for directories, executables and data files created by the
+# install process.  Default to user-only-writable for all file types.
+DIRMODE=	755
+EXEMODE=	755
+FILEMODE=	644
+
+# configure script arguments
+CONFIG_ARGS=	@CONFIG_ARGS@
+
+
+# Subdirectories with code
+SRCDIRS= 	@SRCDIRS@
+
+# Other subdirectories
+SUBDIRSTOO=	Include Lib Misc Demo
+
+# Files and directories to be distributed
+CONFIGFILES=	configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in
+DISTFILES=	README ChangeLog $(CONFIGFILES)
+DISTDIRS=	$(SUBDIRS) $(SUBDIRSTOO) Ext-dummy
+DIST=		$(DISTFILES) $(DISTDIRS)
+
+
+LIBRARY=	@LIBRARY@
+LDLIBRARY=      @LDLIBRARY@
+BLDLIBRARY=     @BLDLIBRARY@
+DLLLIBRARY=	@DLLLIBRARY@
+LDLIBRARYDIR=   @LDLIBRARYDIR@
+INSTSONAME=	@INSTSONAME@
+
+
+LIBS=		@LIBS@
+LIBM=		@LIBM@
+LIBC=		@LIBC@
+SYSLIBS=	$(LIBM) $(LIBC)
+SHLIBS=		@SHLIBS@
+
+THREADOBJ=	@THREADOBJ@
+DLINCLDIR=	@DLINCLDIR@
+DYNLOADFILE=	@DYNLOADFILE@
+MACHDEP_OBJS=	@MACHDEP_OBJS@
+LIBOBJDIR=	Python/
+LIBOBJS=	@LIBOBJS@
+UNICODE_OBJS=   @UNICODE_OBJS@
+
+PYTHON=		python$(EXE)
+BUILDPYTHON=	python$(BUILDEXE)
+
+PYTHON_FOR_REGEN=@PYTHON_FOR_REGEN@
+PYTHON_FOR_BUILD=@PYTHON_FOR_BUILD@
+_PYTHON_HOST_PLATFORM=@_PYTHON_HOST_PLATFORM@
+HOST_GNU_TYPE=  @host@
+
+# Tcl and Tk config info from --with-tcltk-includes and -libs options
+TCLTK_INCLUDES=	@TCLTK_INCLUDES@
+TCLTK_LIBS=	@TCLTK_LIBS@
+
+# The task to run while instrument when building the profile-opt target
+# We exclude unittests with -x that take a rediculious amount of time to
+# run in the instrumented training build or do not provide much value.
+PROFILE_TASK=-m test.regrtest --pgo -x test_asyncore test_gdb test_multiprocessing test_subprocess
+
+# report files for gcov / lcov coverage report
+COVERAGE_INFO=	$(abs_builddir)/coverage.info
+COVERAGE_REPORT=$(abs_builddir)/lcov-report
+COVERAGE_REPORT_OPTIONS=--no-branch-coverage --title "CPython lcov report"
+
+# === Definitions added by makesetup ===
+
+
+##########################################################################
+# Modules
+MODULE_OBJS=	\
+		Modules/config.o \
+		Modules/getpath.o \
+		Modules/main.o \
+		Modules/gcmodule.o
+
+# Used of signalmodule.o is not available
+SIGNAL_OBJS=	@SIGNAL_OBJS@
+
+
+##########################################################################
+
+LIBFFI_INCLUDEDIR=	@LIBFFI_INCLUDEDIR@
+
+##########################################################################
+# Parser
+PGEN=		Parser/pgen$(EXE)
+
+PSRCS=		\
+		Parser/acceler.c \
+		Parser/grammar1.c \
+		Parser/listnode.c \
+		Parser/node.c \
+		Parser/parser.c \
+		Parser/parsetok.c \
+		Parser/bitset.c \
+		Parser/metagrammar.c \
+		Parser/firstsets.c \
+		Parser/grammar.c \
+		Parser/pgen.c
+
+POBJS=		\
+		Parser/acceler.o \
+		Parser/grammar1.o \
+		Parser/listnode.o \
+		Parser/node.o \
+		Parser/parser.o \
+		Parser/parsetok.o \
+		Parser/bitset.o \
+		Parser/metagrammar.o \
+		Parser/firstsets.o \
+		Parser/grammar.o \
+		Parser/pgen.o
+
+PARSER_OBJS=	$(POBJS) Parser/myreadline.o Parser/tokenizer.o
+
+PGSRCS=		\
+		Objects/obmalloc.c \
+		Python/mysnprintf.c \
+		Python/pyctype.c \
+		Parser/tokenizer_pgen.c \
+		Parser/printgrammar.c \
+		Parser/pgenmain.c
+
+PGOBJS=		\
+		Objects/obmalloc.o \
+		Python/mysnprintf.o \
+		Python/pyctype.o \
+		Parser/tokenizer_pgen.o \
+		Parser/printgrammar.o \
+		Parser/pgenmain.o
+
+PARSER_HEADERS= \
+		Parser/parser.h \
+		Parser/tokenizer.h
+
+PGENSRCS=	$(PSRCS) $(PGSRCS)
+PGENOBJS=	$(POBJS) $(PGOBJS)
+
+##########################################################################
+PYTHON_OBJS=	\
+		Python/_warnings.o \
+		Python/Python-ast.o \
+		Python/asdl.o \
+		Python/ast.o \
+		Python/bltinmodule.o \
+		Python/ceval.o \
+		Python/compile.o \
+		Python/codecs.o \
+		Python/errors.o \
+		Python/frozen.o \
+		Python/frozenmain.o \
+		Python/future.o \
+		Python/getargs.o \
+		Python/getcompiler.o \
+		Python/getcopyright.o \
+		Python/getplatform.o \
+		Python/getversion.o \
+		Python/graminit.o \
+		Python/import.o \
+		Python/importdl.o \
+		Python/marshal.o \
+		Python/modsupport.o \
+		Python/mystrtoul.o \
+		Python/mysnprintf.o \
+		Python/peephole.o \
+		Python/pyarena.o \
+		Python/pyctype.o \
+		Python/pyfpe.o \
+		Python/pymath.o \
+		Python/pystate.o \
+		Python/pythonrun.o \
+                Python/random.o \
+		Python/structmember.o \
+		Python/symtable.o \
+		Python/sysmodule.o \
+		Python/traceback.o \
+		Python/getopt.o \
+		Python/pystrcmp.o \
+		Python/pystrtod.o \
+		Python/dtoa.o \
+		Python/formatter_unicode.o \
+		Python/formatter_string.o \
+		Python/$(DYNLOADFILE) \
+		$(LIBOBJS) \
+		$(MACHDEP_OBJS) \
+		$(THREADOBJ)
+
+
+##########################################################################
+# Objects
+OBJECT_OBJS=	\
+		Objects/abstract.o \
+		Objects/boolobject.o \
+		Objects/bufferobject.o \
+		Objects/bytes_methods.o \
+		Objects/bytearrayobject.o \
+		Objects/capsule.o \
+		Objects/cellobject.o \
+		Objects/classobject.o \
+		Objects/cobject.o \
+		Objects/codeobject.o \
+		Objects/complexobject.o \
+		Objects/descrobject.o \
+		Objects/enumobject.o \
+		Objects/exceptions.o \
+		Objects/genobject.o \
+		Objects/fileobject.o \
+		Objects/floatobject.o \
+		Objects/frameobject.o \
+		Objects/funcobject.o \
+		Objects/intobject.o \
+		Objects/iterobject.o \
+		Objects/listobject.o \
+		Objects/longobject.o \
+		Objects/dictobject.o \
+		Objects/memoryobject.o \
+		Objects/methodobject.o \
+		Objects/moduleobject.o \
+		Objects/object.o \
+		Objects/obmalloc.o \
+		Objects/rangeobject.o \
+		Objects/setobject.o \
+		Objects/sliceobject.o \
+		Objects/stringobject.o \
+		Objects/structseq.o \
+		Objects/tupleobject.o \
+		Objects/typeobject.o \
+		Objects/weakrefobject.o \
+		$(UNICODE_OBJS)
+
+
+##########################################################################
+# objects that get linked into the Python library
+LIBRARY_OBJS=	\
+		Modules/getbuildinfo.o \
+		$(PARSER_OBJS) \
+		$(OBJECT_OBJS) \
+		$(PYTHON_OBJS) \
+		$(MODULE_OBJS) \
+		$(SIGNAL_OBJS) \
+		$(MODOBJS)
+
+#########################################################################
+# Rules
+
+# Default target
+all:		@DEF_MAKE_ALL_RULE@
+build_all:	check-clean-src $(BUILDPYTHON) oldsharedmods sharedmods gdbhooks
+
+# Check that the source is clean when building out of source.
+check-clean-src:
+	@if test -n "$(VPATH)" -a -f "$(srcdir)/Modules/python.o"; then \
+		echo "Error: The source directory ($(srcdir)) is not clean" ; \
+		echo "Building Python out of the source tree (in $(abs_builddir)) requires a clean source tree ($(abs_srcdir))" ; \
+		echo "Try to run: make -C \"$(srcdir)\" clean" ; \
+		exit 1; \
+	fi
+
+# Compile a binary with profile guided optimization.
+profile-opt:
+	@if [ $(LLVM_PROF_ERR) = yes ]; then \
+		echo "Error: Cannot perform PGO build because llvm-profdata was not found in PATH" ;\
+		echo "Please add it to PATH and run ./configure again" ;\
+		exit 1;\
+	fi
+	@echo "Building with support for profile generation:"
+	$(MAKE) clean
+	$(MAKE) profile-removal
+	$(MAKE) build_all_generate_profile
+	$(MAKE) profile-removal
+	@echo "Running code to generate profile data (this can take a while):"
+	$(MAKE) run_profile_task
+	$(MAKE) build_all_merge_profile
+	@echo "Rebuilding with profile guided optimizations:"
+	$(MAKE) clean
+	$(MAKE) build_all_use_profile
+	$(MAKE) profile-removal
+
+build_all_generate_profile:
+	$(MAKE) @DEF_MAKE_RULE@ CFLAGS="$(CFLAGS) $(PGO_PROF_GEN_FLAG) @LTOFLAGS@" LDFLAGS="$(LDFLAGS) $(PGO_PROF_GEN_FLAG) @LTOFLAGS@" LIBS="$(LIBS)"
+
+run_profile_task:
+	: # FIXME: can't run for a cross build
+	$(LLVM_PROF_FILE) $(RUNSHARED) ./$(BUILDPYTHON) $(PROFILE_TASK) || true
+
+build_all_merge_profile:
+	$(LLVM_PROF_MERGER)
+
+build_all_use_profile:
+	$(MAKE) @DEF_MAKE_RULE@ CFLAGS="$(CFLAGS) $(PGO_PROF_USE_FLAG) @LTOFLAGS@" LDFLAGS="$(LDFLAGS) @LTOFLAGS@"
+
+# Compile and run with gcov
+.PHONY=coverage coverage-lcov coverage-report
+coverage:
+	@echo "Building with support for coverage checking:"
+	$(MAKE) clean profile-removal
+	$(MAKE) @DEF_MAKE_RULE@ CFLAGS="$(CFLAGS) -O0 -pg -fprofile-arcs -ftest-coverage" LIBS="$(LIBS) -lgcov"
+
+coverage-lcov:
+	@echo "Creating Coverage HTML report with LCOV:"
+	@rm -f $(COVERAGE_INFO)
+	@rm -rf $(COVERAGE_REPORT)
+	@lcov --capture --directory $(abs_builddir) \
+	    --base-directory $(realpath $(abs_builddir)) \
+	    --path $(realpath $(abs_srcdir)) \
+	    --output-file $(COVERAGE_INFO)
+	: # remove 3rd party modules and system headers
+	@lcov --remove $(COVERAGE_INFO) \
+	    '*/Modules/_ctypes/libffi*/*' \
+	    '*/Modules/expat/*' \
+	    '*/Modules/zlib/*' \
+	    '*/Include/*' \
+	    '/usr/include/*' \
+	    '/usr/local/include/*' \
+	    --output-file $(COVERAGE_INFO)
+	@genhtml $(COVERAGE_INFO) --output-directory $(COVERAGE_REPORT) \
+	    $(COVERAGE_REPORT_OPTIONS)
+	@echo
+	@echo "lcov report at $(COVERAGE_REPORT)/index.html"
+	@echo
+
+# Force regeneration of parser
+coverage-report: regen-grammar
+	: # build with coverage info
+	$(MAKE) coverage
+	: # run tests, ignore failures
+	$(TESTPYTHON) $(TESTPROG) $(TESTOPTS) || true
+	: # build lcov report
+	$(MAKE) coverage-lcov
+
+
+# Build the interpreter
+$(BUILDPYTHON):	Modules/python.o $(LIBRARY) $(LDLIBRARY)
+		$(LINKCC) $(LDFLAGS) $(LINKFORSHARED) -o $@ \
+			Modules/python.o \
+			$(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
+
+platform: $(BUILDPYTHON) pybuilddir.txt
+	$(RUNSHARED) $(PYTHON_FOR_BUILD) -c 'import sys ; from sysconfig import get_platform ; print get_platform()+"-"+sys.version[0:3]' >platform
+
+# Create build directory and generate the sysconfig build-time data there.
+# pybuilddir.txt contains the name of the build dir and is used for
+# sys.path fixup -- see Modules/getpath.c.
+# Since this step runs before shared modules are built, try to avoid bootstrap
+# problems by creating a dummy pybuilddir.txt just to allow interpreter
+# initialization to succeed.  It will be overwritten by generate-posix-vars
+# or removed in case of failure.
+pybuilddir.txt: $(BUILDPYTHON)
+	@echo "none" > ./pybuilddir.txt
+	$(RUNSHARED) $(PYTHON_FOR_BUILD) -S -m sysconfig --generate-posix-vars ;\
+	if test $$? -ne 0 ; then \
+		echo "generate-posix-vars failed" ; \
+		rm -f ./pybuilddir.txt ; \
+		exit 1 ; \
+	fi
+
+# This is shared by the math and cmath modules
+Modules/_math.o: Modules/_math.c Modules/_math.h
+	$(CC) -c $(CCSHARED) $(PY_CFLAGS) -o $@ $<
+
+# Build the shared modules
+# Under GNU make, MAKEFLAGS are sorted and normalized; the 's' for
+# -s, --silent or --quiet is always the first char.
+# Under BSD make, MAKEFLAGS might be " -s -v x=y".
+sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
+	@case "$$MAKEFLAGS" in \
+	    *\ -s*|s*) quiet="-q";; \
+	    *) quiet="";; \
+	esac; \
+	$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \
+		_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \
+		$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
+
+# Build static library
+# avoid long command lines, same as LIBRARY_OBJS
+$(LIBRARY): $(LIBRARY_OBJS)
+	-rm -f $@
+	$(AR) $(ARFLAGS) $@ Modules/getbuildinfo.o
+	$(AR) $(ARFLAGS) $@ $(PARSER_OBJS)
+	$(AR) $(ARFLAGS) $@ $(OBJECT_OBJS)
+	$(AR) $(ARFLAGS) $@ $(PYTHON_OBJS)
+	$(AR) $(ARFLAGS) $@ $(MODULE_OBJS) $(SIGNAL_OBJS)
+	$(AR) $(ARFLAGS) $@ $(MODOBJS)
+	$(RANLIB) $@
+
+libpython$(VERSION).so: $(LIBRARY_OBJS)
+	if test $(INSTSONAME) != $(LDLIBRARY); then \
+		$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
+		$(LN) -f $(INSTSONAME) $@; \
+	else \
+		$(BLDSHARED) -o $@ $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
+	fi
+
+libpython$(VERSION).dylib: $(LIBRARY_OBJS)
+	 $(CC) -dynamiclib -Wl,-single_module $(LDFLAGS) -undefined dynamic_lookup -Wl,-install_name,$(prefix)/lib/libpython$(VERSION).dylib -Wl,-compatibility_version,$(VERSION) -Wl,-current_version,$(VERSION) -o $@ $(LIBRARY_OBJS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
+
+
+libpython$(VERSION).sl: $(LIBRARY_OBJS)
+	$(LDSHARED) -o $@ $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST)
+
+# Copy up the gdb python hooks into a position where they can be automatically
+# loaded by gdb during Lib/test/test_gdb.py
+#
+# Distributors are likely to want to install this somewhere else e.g. relative
+# to the stripped DWARF data for the shared library.
+gdbhooks: $(BUILDPYTHON)-gdb.py
+
+SRC_GDB_HOOKS=$(srcdir)/Tools/gdb/libpython.py
+$(BUILDPYTHON)-gdb.py: $(SRC_GDB_HOOKS)
+	$(INSTALL_DATA) $(SRC_GDB_HOOKS) $(BUILDPYTHON)-gdb.py
+
+# This rule is here for OPENSTEP/Rhapsody/MacOSX. It builds a temporary
+# minimal framework (not including the Lib directory and such) in the current
+# directory.
+RESSRCDIR=Mac/Resources/framework
+$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK): \
+		$(LIBRARY) \
+		$(RESSRCDIR)/Info.plist
+	$(INSTALL) -d -m $(DIRMODE) $(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)
+	$(CC) -o $(LDLIBRARY) $(LDFLAGS)  -dynamiclib \
+		-all_load $(LIBRARY) -Wl,-single_module \
+		-install_name $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK) \
+		-compatibility_version $(VERSION) \
+		-current_version $(VERSION);
+	$(INSTALL) -d -m $(DIRMODE)  \
+		$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/Resources/English.lproj
+	$(INSTALL_DATA) $(RESSRCDIR)/Info.plist \
+		$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/Resources/Info.plist
+	$(LN) -fsn $(VERSION) $(PYTHONFRAMEWORKDIR)/Versions/Current
+	$(LN) -fsn Versions/Current/$(PYTHONFRAMEWORK) $(PYTHONFRAMEWORKDIR)/$(PYTHONFRAMEWORK)
+	$(LN) -fsn Versions/Current/Headers $(PYTHONFRAMEWORKDIR)/Headers
+	$(LN) -fsn Versions/Current/Resources $(PYTHONFRAMEWORKDIR)/Resources
+
+# This rule builds the Cygwin Python DLL and import library if configured
+# for a shared core library; otherwise, this rule is a noop.
+$(DLLLIBRARY) libpython$(VERSION).dll.a: $(LIBRARY_OBJS)
+	if test -n "$(DLLLIBRARY)"; then \
+		$(LDSHARED) -Wl,--out-implib=$@ -o $(DLLLIBRARY) $^ \
+			$(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST); \
+	else true; \
+	fi
+
+
+oldsharedmods: $(SHAREDMODS)
+
+
+Makefile Modules/config.c: Makefile.pre \
+				$(srcdir)/Modules/config.c.in \
+				$(MAKESETUP) \
+				Modules/Setup.config \
+				Modules/Setup \
+				Modules/Setup.local
+	$(SHELL) $(MAKESETUP) -c $(srcdir)/Modules/config.c.in \
+				-s Modules \
+				Modules/Setup.config \
+				Modules/Setup.local \
+				Modules/Setup
+	@mv config.c Modules
+	@echo "The Makefile was updated, you may need to re-run make."
+
+
+Modules/Setup: $(srcdir)/Modules/Setup.dist
+	@if test -f Modules/Setup; then \
+		echo "-----------------------------------------------"; \
+		echo "Modules/Setup.dist is newer than Modules/Setup;"; \
+		echo "check to make sure you have all the updates you"; \
+		echo "need in your Modules/Setup file."; \
+		echo "Usually, copying Modules/Setup.dist to Modules/Setup will work."; \
+		echo "-----------------------------------------------"; \
+	fi
+
+
+############################################################################
+# Regenerate all generated files
+
+regen-all: regen-opcode-targets regen-grammar regen-ast
+
+############################################################################
+# Special rules for object files
+
+Modules/getbuildinfo.o: $(PARSER_OBJS) \
+		$(OBJECT_OBJS) \
+		$(PYTHON_OBJS) \
+		$(MODULE_OBJS) \
+		$(SIGNAL_OBJS) \
+		$(MODOBJS) \
+		$(srcdir)/Modules/getbuildinfo.c
+	$(CC) -c $(PY_CFLAGS) \
+	      -DGITVERSION="\"`LC_ALL=C $(GITVERSION)`\"" \
+	      -DGITTAG="\"`LC_ALL=C $(GITTAG)`\"" \
+	      -DGITBRANCH="\"`LC_ALL=C $(GITBRANCH)`\"" \
+	      -o $@ $(srcdir)/Modules/getbuildinfo.c
+
+Modules/getpath.o: $(srcdir)/Modules/getpath.c Makefile
+	$(CC) -c $(PY_CFLAGS) -DPYTHONPATH='"$(PYTHONPATH)"' \
+		-DPREFIX='"$(prefix)"' \
+		-DEXEC_PREFIX='"$(exec_prefix)"' \
+		-DVERSION='"$(VERSION)"' \
+		-DVPATH='"$(VPATH)"' \
+		-o $@ $(srcdir)/Modules/getpath.c
+
+Modules/python.o: $(srcdir)/Modules/python.c
+	$(MAINCC) -c $(PY_CFLAGS) -o $@ $(srcdir)/Modules/python.c
+
+Modules/posixmodule.o: $(srcdir)/Modules/posixmodule.c $(srcdir)/Modules/posixmodule.h
+
+Modules/grpmodule.o: $(srcdir)/Modules/grpmodule.c $(srcdir)/Modules/posixmodule.h
+
+Modules/pwdmodule.o: $(srcdir)/Modules/pwdmodule.c $(srcdir)/Modules/posixmodule.h
+
+$(PGEN):	$(PGENOBJS)
+		$(CC) $(OPT) $(LDFLAGS) $(PGENOBJS) $(LIBS) -o $(PGEN)
+
+.PHONY: regen-grammar
+regen-grammar: $(PGEN)
+	# Regenerate Include/graminit.h and Python/graminit.c
+	# from Grammar/Grammar using pgen
+	@$(MKDIR_P) Include
+	$(PGEN) $(srcdir)/Grammar/Grammar \
+		$(srcdir)/Include/graminit.h \
+		$(srcdir)/Python/graminit.c
+
+Parser/grammar.o:	$(srcdir)/Parser/grammar.c \
+				$(srcdir)/Include/token.h \
+				$(srcdir)/Include/grammar.h
+Parser/metagrammar.o:	$(srcdir)/Parser/metagrammar.c
+
+Parser/tokenizer_pgen.o:	$(srcdir)/Parser/tokenizer.c
+
+Parser/pgenmain.o:	$(srcdir)/Include/parsetok.h
+
+.PHONY=regen-ast
+regen-ast:
+	# Regenerate Include/Python-ast.h using Parser/asdl_c.py -h
+	$(MKDIR_P) $(srcdir)/Include
+	$(PYTHON_FOR_REGEN) $(srcdir)/Parser/asdl_c.py \
+		-h $(srcdir)/Include \
+		$(srcdir)/Parser/Python.asdl
+	# Regenerate Python/Python-ast.c using Parser/asdl_c.py -c
+	$(MKDIR_P) $(srcdir)/Python
+	$(PYTHON_FOR_REGEN) $(srcdir)/Parser/asdl_c.py \
+		-c $(srcdir)/Python \
+		$(srcdir)/Parser/Python.asdl
+
+Python/compile.o Python/symtable.o Python/ast.o: $(srcdir)/Include/graminit.h $(srcdir)/Include/Python-ast.h
+
+Python/getplatform.o: $(srcdir)/Python/getplatform.c
+		$(CC) -c $(PY_CFLAGS) -DPLATFORM='"$(MACHDEP)"' -o $@ $(srcdir)/Python/getplatform.c
+
+Python/importdl.o: $(srcdir)/Python/importdl.c
+		$(CC) -c $(PY_CFLAGS) -I$(DLINCLDIR) -o $@ $(srcdir)/Python/importdl.c
+
+Objects/unicodectype.o:	$(srcdir)/Objects/unicodectype.c \
+				$(srcdir)/Objects/unicodetype_db.h
+
+STRINGLIB_HEADERS= \
+		$(srcdir)/Include/bytes_methods.h \
+		$(srcdir)/Objects/stringlib/count.h \
+		$(srcdir)/Objects/stringlib/ctype.h \
+		$(srcdir)/Objects/stringlib/fastsearch.h \
+		$(srcdir)/Objects/stringlib/find.h \
+		$(srcdir)/Objects/stringlib/formatter.h \
+		$(srcdir)/Objects/stringlib/partition.h \
+		$(srcdir)/Objects/stringlib/split.h \
+		$(srcdir)/Objects/stringlib/stringdefs.h \
+		$(srcdir)/Objects/stringlib/string_format.h \
+		$(srcdir)/Objects/stringlib/transmogrify.h \
+		$(srcdir)/Objects/stringlib/unicodedefs.h \
+		$(srcdir)/Objects/stringlib/localeutil.h
+
+Objects/unicodeobject.o: $(srcdir)/Objects/unicodeobject.c \
+				$(STRINGLIB_HEADERS)
+
+Objects/bytearrayobject.o: $(srcdir)/Objects/bytearrayobject.c \
+				$(STRINGLIB_HEADERS)
+
+Objects/stringobject.o: $(srcdir)/Objects/stringobject.c \
+				$(STRINGLIB_HEADERS)
+
+.PHONY: regen-opcode-targets
+regen-opcode-targets:
+	# Regenerate Python/opcode_targets.h from Lib/opcode.py
+	# using Python/makeopcodetargets.py
+	$(PYTHON_FOR_REGEN) $(srcdir)/Python/makeopcodetargets.py \
+		$(srcdir)/Python/opcode_targets.h
+
+Python/ceval.o: $(srcdir)/Python/opcode_targets.h
+
+Python/formatter_unicode.o: $(srcdir)/Python/formatter_unicode.c \
+				$(STRINGLIB_HEADERS)
+
+Python/formatter_string.o: $(srcdir)/Python/formatter_string.c \
+				$(STRINGLIB_HEADERS)
+
+############################################################################
+# Header files
+
+PYTHON_HEADERS= \
+		Include/Python-ast.h \
+		Include/Python.h \
+		Include/abstract.h \
+		Include/asdl.h \
+		Include/ast.h \
+		Include/bitset.h \
+		Include/boolobject.h \
+		Include/bytearrayobject.h \
+		Include/bytes_methods.h \
+		Include/bytesobject.h \
+		Include/bufferobject.h \
+		Include/cellobject.h \
+		Include/ceval.h \
+		Include/classobject.h \
+		Include/cobject.h \
+		Include/code.h \
+		Include/codecs.h \
+		Include/compile.h \
+		Include/complexobject.h \
+		Include/descrobject.h \
+		Include/dictobject.h \
+		Include/dtoa.h \
+		Include/enumobject.h \
+		Include/errcode.h \
+		Include/eval.h \
+		Include/fileobject.h \
+		Include/floatobject.h \
+		Include/frameobject.h \
+		Include/funcobject.h \
+		Include/genobject.h \
+		Include/import.h \
+		Include/intobject.h \
+		Include/intrcheck.h \
+		Include/iterobject.h \
+		Include/listobject.h \
+		Include/longintrepr.h \
+		Include/longobject.h \
+		Include/marshal.h \
+		Include/memoryobject.h \
+		Include/metagrammar.h \
+		Include/methodobject.h \
+		Include/modsupport.h \
+		Include/moduleobject.h \
+		Include/node.h \
+		Include/object.h \
+		Include/objimpl.h \
+		Include/opcode.h \
+		Include/osdefs.h \
+		Include/parsetok.h \
+		Include/patchlevel.h \
+		Include/pgen.h \
+		Include/pgenheaders.h \
+		Include/pyarena.h \
+		Include/pycapsule.h \
+		Include/pyctype.h \
+		Include/pydebug.h \
+		Include/pyerrors.h \
+		Include/pyfpe.h \
+		Include/pymath.h \
+		Include/pygetopt.h \
+		Include/pymem.h \
+		Include/pyport.h \
+		Include/pystate.h \
+		Include/pystrcmp.h \
+		Include/pystrtod.h \
+		Include/pythonrun.h \
+		Include/pythread.h \
+		Include/rangeobject.h \
+		Include/setobject.h \
+		Include/sliceobject.h \
+		Include/stringobject.h \
+		Include/structmember.h \
+		Include/structseq.h \
+		Include/symtable.h \
+		Include/sysmodule.h \
+		Include/traceback.h \
+		Include/tupleobject.h \
+		Include/ucnhash.h \
+		Include/unicodeobject.h \
+		Include/warnings.h \
+		Include/weakrefobject.h \
+		pyconfig.h \
+		$(PARSER_HEADERS) \
+		$(srcdir)/Include/Python-ast.h
+
+$(LIBRARY_OBJS) $(MODOBJS) Modules/python.o: $(PYTHON_HEADERS)
+
+
+######################################################################
+
+# Test the interpreter (twice, once without .pyc files, once with)
+# In the past, we've had problems where bugs in the marshalling or
+# elsewhere caused bytecode read from .pyc files to behave differently
+# than bytecode generated directly from a .py source file.  Sometimes
+# the bytecode read from a .pyc file had the bug, sometimes the directly
+# generated bytecode.  This is sometimes a very shy bug needing a lot of
+# sample data.
+
+.PHONY: test testall testuniversal buildbottest pythoninfo
+
+TESTOPTS=	-l $(EXTRATESTOPTS)
+TESTPROG=	$(srcdir)/Lib/test/regrtest.py
+TESTPYTHON=	$(RUNSHARED) ./$(BUILDPYTHON) -Wd -3 -E -tt $(TESTPYTHONOPTS)
+
+# Remove "test_python_*" directories of previous failed test jobs.
+# Pass TESTOPTS options because it can contain --tempdir option.
+cleantest: build_all
+	$(TESTPYTHON) $(TESTPROG) $(TESTOPTS) --cleanup
+
+test:		@DEF_MAKE_RULE@ platform
+		-find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f
+		-$(TESTPYTHON) $(TESTPROG) $(TESTOPTS)
+		$(TESTPYTHON) $(TESTPROG) $(TESTOPTS)
+
+testall:	@DEF_MAKE_RULE@ platform
+		-find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f
+		$(TESTPYTHON) $(srcdir)/Lib/compileall.py
+		-find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f
+		-$(TESTPYTHON) $(TESTPROG) -uall $(TESTOPTS)
+		$(TESTPYTHON) $(TESTPROG) -uall $(TESTOPTS)
+
+#  Run the unitests for both architectures in a Universal build on OSX
+#  Must be run on an Intel box.
+testuniversal:	@DEF_MAKE_RULE@ platform
+		if [ `arch` != 'i386' ];then \
+			echo "This can only be used on OSX/i386" ;\
+			exit 1 ;\
+		fi
+		-find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f
+		-$(TESTPYTHON) $(TESTPROG) -uall $(TESTOPTS)
+		$(TESTPYTHON) $(TESTPROG) -uall $(TESTOPTS)
+		$(RUNSHARED) /usr/libexec/oah/translate ./$(BUILDPYTHON) -E -tt $(TESTPROG) -uall $(TESTOPTS)
+
+
+# Like testall, but with a single pass only
+# run an optional script to include some information about the build environment
+buildbottest:	build_all platform
+		-@if which pybuildbot.identify >/dev/null 2>&1; then \
+			pybuildbot.identify "CC='$(CC)'" "CXX='$(CXX)'"; \
+		fi
+		$(TESTPYTHON) -R $(TESTPROG) -uall --slowest -rwW $(TESTOPTS)
+
+pythoninfo: build_all
+		$(RUNSHARED) ./$(BUILDPYTHON) -m test.pythoninfo
+
+QUICKTESTOPTS=	$(TESTOPTS) -x test_subprocess test_io test_lib2to3 \
+		test_multibytecodec test_urllib2_localnet test_itertools \
+		test_multiprocessing test_mailbox test_socket test_poll \
+		test_select test_zipfile
+quicktest:	@DEF_MAKE_RULE@ platform
+		-find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f
+		-$(TESTPYTHON) $(TESTPROG) $(QUICKTESTOPTS)
+		$(TESTPYTHON) $(TESTPROG) $(QUICKTESTOPTS)
+
+MEMTESTOPTS=    $(QUICKTESTOPTS) -x test_dl test___all__ test_fork1 \
+		test_longexp
+memtest:	@DEF_MAKE_RULE@ platform
+		-rm -f $(srcdir)/Lib/test/*.py[co]
+		-$(TESTPYTHON) $(TESTPROG) $(MEMTESTOPTS)
+		$(TESTPYTHON) $(TESTPROG) $(MEMTESTOPTS)
+
+# SSL tests
+.PHONY: multisslcompile multissltest
+multisslcompile: build_all
+	$(RUNSHARED) ./$(BUILDPYTHON) Tools/ssl/multissltests.py --compile-only
+
+multissltest: build_all
+	$(RUNSHARED) ./$(BUILDPYTHON) Tools/ssl/multissltests.py
+
+# Install everything
+install:	@FRAMEWORKINSTALLFIRST@ commoninstall bininstall maninstall @FRAMEWORKINSTALLLAST@
+	if test "x$(ENSUREPIP)" != "xno"  ; then \
+		case $(ENSUREPIP) in \
+			upgrade) ensurepip="--upgrade" ;; \
+			install|*) ensurepip="" ;; \
+		esac; \
+		$(RUNSHARED) $(PYTHON_FOR_BUILD) -m ensurepip \
+			$$ensurepip --root=$(DESTDIR)/ ; \
+	fi
+
+# Install almost everything without disturbing previous versions
+altinstall:	commoninstall
+	if test "x$(ENSUREPIP)" != "xno"  ; then \
+		case $(ENSUREPIP) in \
+			upgrade) ensurepip="--altinstall --upgrade --no-default-pip" ;; \
+			install|*) ensurepip="--altinstall --no-default-pip" ;; \
+		esac; \
+		$(RUNSHARED) $(PYTHON_FOR_BUILD) -m ensurepip \
+			$$ensurepip --root=$(DESTDIR)/ ; \
+	fi
+
+commoninstall:	check-clean-src @FRAMEWORKALTINSTALLFIRST@ \
+		altbininstall libinstall inclinstall libainstall \
+		sharedinstall oldsharedinstall altmaninstall \
+		@FRAMEWORKALTINSTALLLAST@
+
+# Install shared libraries enabled by Setup
+DESTDIRS=	$(exec_prefix) $(LIBDIR) $(BINLIBDEST) $(DESTSHARED)
+
+oldsharedinstall: $(DESTSHARED) $(SHAREDMODS)
+		@for i in X $(SHAREDMODS); do \
+		  if test $$i != X; then \
+		    echo $(INSTALL_SHARED) $$i $(DESTSHARED)/`basename $$i`; \
+		    $(INSTALL_SHARED) $$i $(DESTDIR)$(DESTSHARED)/`basename $$i`; \
+		  fi; \
+		done
+
+$(DESTSHARED):
+		@for i in $(DESTDIRS); \
+		do \
+			if test ! -d $(DESTDIR)$$i; then \
+				echo "Creating directory $$i"; \
+				$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+			else    true; \
+			fi; \
+		done
+
+
+# Install the interpreter by creating a symlink chain:
+#  $(PYTHON) -> python2 -> python$(VERSION))
+# Also create equivalent chains for other installed files
+bininstall:	altbininstall
+	if test ! -d $(DESTDIR)$(LIBPC); then \
+		echo "Creating directory $(LIBPC)"; \
+		$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(LIBPC); \
+	fi
+	-if test -f $(DESTDIR)$(BINDIR)/$(PYTHON) -o -h $(DESTDIR)$(BINDIR)/$(PYTHON); \
+	then rm -f $(DESTDIR)$(BINDIR)/$(PYTHON); \
+	else true; \
+	fi
+	(cd $(DESTDIR)$(BINDIR); $(LN) -s python2$(EXE) $(PYTHON))
+	-rm -f $(DESTDIR)$(BINDIR)/python2$(EXE)
+	(cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)$(EXE) python2$(EXE))
+	-rm -f $(DESTDIR)$(BINDIR)/python2-config
+	(cd $(DESTDIR)$(BINDIR); $(LN) -s python$(VERSION)-config python2-config)
+	-rm -f $(DESTDIR)$(BINDIR)/python-config
+	(cd $(DESTDIR)$(BINDIR); $(LN) -s python2-config python-config)
+	-test -d $(DESTDIR)$(LIBPC) || $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$(LIBPC)
+	-rm -f $(DESTDIR)$(LIBPC)/python2.pc
+	(cd $(DESTDIR)$(LIBPC); $(LN) -s python-$(VERSION).pc python2.pc)
+	-rm -f $(DESTDIR)$(LIBPC)/python.pc
+	(cd $(DESTDIR)$(LIBPC); $(LN) -s python2.pc python.pc)
+
+# Install the interpreter with $(VERSION) affixed
+# This goes into $(exec_prefix)
+altbininstall:	$(BUILDPYTHON)
+	@for i in $(BINDIR) $(LIBDIR); \
+	do \
+		if test ! -d $(DESTDIR)$$i; then \
+			echo "Creating directory $$i"; \
+			$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+		else	true; \
+		fi; \
+	done
+	$(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE)
+	if test -f $(LDLIBRARY); then \
+		if test -n "$(DLLLIBRARY)" ; then \
+			$(INSTALL_SHARED) $(DLLLIBRARY) $(DESTDIR)$(BINDIR); \
+		else \
+			$(INSTALL_SHARED) $(LDLIBRARY) $(DESTDIR)$(LIBDIR)/$(INSTSONAME); \
+			if test $(LDLIBRARY) != $(INSTSONAME); then \
+				(cd $(DESTDIR)$(LIBDIR); $(LN) -sf $(INSTSONAME) $(LDLIBRARY)) \
+			fi \
+		fi; \
+	else	true; \
+	fi
+
+# Install the versioned manual page
+altmaninstall:
+	@for i in $(MANDIR) $(MANDIR)/man1; \
+	do \
+		if test ! -d $(DESTDIR)$$i; then \
+			echo "Creating directory $$i"; \
+			$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+		else	true; \
+		fi; \
+	done
+	$(INSTALL_DATA) $(srcdir)/Misc/python.man \
+		$(DESTDIR)$(MANDIR)/man1/python$(VERSION).1
+
+# Install the unversioned manual pages
+maninstall:	altmaninstall
+	-rm -f $(DESTDIR)$(MANDIR)/man1/python2.1
+	(cd $(DESTDIR)$(MANDIR)/man1; $(LN) -s python$(VERSION).1 python2.1)
+	-rm -f $(DESTDIR)$(MANDIR)/man1/python.1
+	(cd $(DESTDIR)$(MANDIR)/man1; $(LN) -s python2.1 python.1)
+
+# Install the library
+PLATDIR=	@PLATDIR@
+EXTRAPLATDIR= @EXTRAPLATDIR@
+EXTRAMACHDEPPATH=@EXTRAMACHDEPPATH@
+MACHDEPS=	$(PLATDIR) $(EXTRAPLATDIR)
+XMLLIBSUBDIRS=  xml xml/dom xml/etree xml/parsers xml/sax
+PLATMACDIRS= plat-mac plat-mac/Carbon plat-mac/lib-scriptpackages \
+	plat-mac/lib-scriptpackages/_builtinSuites \
+	plat-mac/lib-scriptpackages/CodeWarrior \
+	plat-mac/lib-scriptpackages/Explorer \
+	plat-mac/lib-scriptpackages/Finder \
+	plat-mac/lib-scriptpackages/Netscape \
+	plat-mac/lib-scriptpackages/StdSuites \
+	plat-mac/lib-scriptpackages/SystemEvents \
+	plat-mac/lib-scriptpackages/Terminal
+PLATMACPATH=:plat-mac:plat-mac/lib-scriptpackages
+LIBSUBDIRS=	lib-tk lib-tk/test lib-tk/test/test_tkinter \
+		lib-tk/test/test_ttk site-packages test test/audiodata test/capath \
+		test/data test/cjkencodings test/decimaltestdata test/xmltestdata \
+		test/imghdrdata \
+		test/subprocessdata \
+		test/support \
+		test/tracedmodules \
+		encodings compiler hotshot \
+		email email/mime email/test email/test/data \
+		ensurepip ensurepip/_bundled \
+		json json/tests \
+		sqlite3 sqlite3/test \
+		logging bsddb bsddb/test csv importlib wsgiref \
+		lib2to3 lib2to3/fixes lib2to3/pgen2 lib2to3/tests \
+		lib2to3/tests/data lib2to3/tests/data/fixers lib2to3/tests/data/fixers/myfixes \
+		ctypes ctypes/test ctypes/macholib \
+		idlelib idlelib/Icons idlelib/idle_test \
+		distutils distutils/command distutils/tests $(XMLLIBSUBDIRS) \
+		multiprocessing multiprocessing/dummy \
+		unittest unittest/test \
+		lib-old \
+		curses pydoc_data $(MACHDEPS)
+libinstall:	build_all $(srcdir)/Lib/$(PLATDIR) $(srcdir)/Modules/xxmodule.c
+	@for i in $(SCRIPTDIR) $(LIBDEST); \
+	do \
+		if test ! -d $(DESTDIR)$$i; then \
+			echo "Creating directory $$i"; \
+			$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+		else	true; \
+		fi; \
+	done
+	@for d in $(LIBSUBDIRS); \
+	do \
+		a=$(srcdir)/Lib/$$d; \
+		if test ! -d $$a; then continue; else true; fi; \
+		b=$(LIBDEST)/$$d; \
+		if test ! -d $(DESTDIR)$$b; then \
+			echo "Creating directory $$b"; \
+			$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$b; \
+		else	true; \
+		fi; \
+	done
+	@for i in $(srcdir)/Lib/*.py `cat pybuilddir.txt`/_sysconfigdata.py $(srcdir)/Lib/*.doc $(srcdir)/Lib/*.egg-info ; \
+	do \
+		if test -x $$i; then \
+			$(INSTALL_SCRIPT) $$i $(DESTDIR)$(LIBDEST); \
+			echo $(INSTALL_SCRIPT) $$i $(LIBDEST); \
+		else \
+			$(INSTALL_DATA) $$i $(DESTDIR)$(LIBDEST); \
+			echo $(INSTALL_DATA) $$i $(LIBDEST); \
+		fi; \
+	done
+	@for d in $(LIBSUBDIRS); \
+	do \
+		a=$(srcdir)/Lib/$$d; \
+		if test ! -d $$a; then continue; else true; fi; \
+		if test `ls $$a | wc -l` -lt 1; then continue; fi; \
+		b=$(LIBDEST)/$$d; \
+		for i in $$a/*; \
+		do \
+			case $$i in \
+			*CVS) ;; \
+			*.py[co]) ;; \
+			*.orig) ;; \
+			*~) ;; \
+			*) \
+				if test -d $$i; then continue; fi; \
+				if test -x $$i; then \
+				    echo $(INSTALL_SCRIPT) $$i $$b; \
+				    $(INSTALL_SCRIPT) $$i $(DESTDIR)$$b; \
+				else \
+				    echo $(INSTALL_DATA) $$i $$b; \
+				    $(INSTALL_DATA) $$i $(DESTDIR)$$b; \
+				fi;; \
+			esac; \
+		done; \
+	done
+	$(INSTALL_DATA) $(srcdir)/LICENSE $(DESTDIR)$(LIBDEST)/LICENSE.txt
+	if test -d $(DESTDIR)$(LIBDEST)/distutils/tests; then \
+		$(INSTALL_DATA) $(srcdir)/Modules/xxmodule.c \
+			$(DESTDIR)$(LIBDEST)/distutils/tests ; \
+	fi
+	PYTHONPATH=$(DESTDIR)$(LIBDEST)  $(RUNSHARED) \
+		$(PYTHON_FOR_BUILD) -Wi -tt $(DESTDIR)$(LIBDEST)/compileall.py \
+		-d $(LIBDEST) -f \
+		-x 'bad_coding|badsyntax|site-packages|lib2to3/tests/data' \
+		$(DESTDIR)$(LIBDEST)
+	PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
+		$(PYTHON_FOR_BUILD) -Wi -tt -O $(DESTDIR)$(LIBDEST)/compileall.py \
+		-d $(LIBDEST) -f \
+		-x 'bad_coding|badsyntax|site-packages|lib2to3/tests/data' \
+		$(DESTDIR)$(LIBDEST)
+	-PYTHONPATH=$(DESTDIR)$(LIBDEST)  $(RUNSHARED) \
+		$(PYTHON_FOR_BUILD) -Wi -t $(DESTDIR)$(LIBDEST)/compileall.py \
+		-d $(LIBDEST)/site-packages -f \
+		-x badsyntax $(DESTDIR)$(LIBDEST)/site-packages
+	-PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
+		$(PYTHON_FOR_BUILD) -Wi -t -O $(DESTDIR)$(LIBDEST)/compileall.py \
+		-d $(LIBDEST)/site-packages -f \
+		-x badsyntax $(DESTDIR)$(LIBDEST)/site-packages
+	-PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
+		$(PYTHON_FOR_BUILD) -m lib2to3.pgen2.driver $(DESTDIR)$(LIBDEST)/lib2to3/Grammar.txt
+	-PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
+		$(PYTHON_FOR_BUILD) -m lib2to3.pgen2.driver $(DESTDIR)$(LIBDEST)/lib2to3/PatternGrammar.txt
+
+# Create the PLATDIR source directory, if one wasn't distributed..
+$(srcdir)/Lib/$(PLATDIR):
+	mkdir $(srcdir)/Lib/$(PLATDIR)
+	cp $(srcdir)/Lib/plat-generic/regen $(srcdir)/Lib/$(PLATDIR)/regen
+	export PATH; PATH="`pwd`:$$PATH"; \
+	export PYTHONPATH; PYTHONPATH="$(srcdir)/Lib:$(abs_builddir)/`cat pybuilddir.txt`"; \
+	export DYLD_FRAMEWORK_PATH; DYLD_FRAMEWORK_PATH="`pwd`"; \
+	export EXE; EXE="$(BUILDEXE)"; \
+	if [ -n "$(MULTIARCH)" ]; then export MULTIARCH; MULTIARCH=$(MULTIARCH); fi; \
+	export PYTHON_FOR_BUILD; \
+	if [ "$(build)" = "$(host)" ]; then \
+	  PYTHON_FOR_BUILD="$(BUILDPYTHON)"; \
+	else \
+	  PYTHON_FOR_BUILD="$(PYTHON_FOR_BUILD)"; \
+	fi; \
+	cd $(srcdir)/Lib/$(PLATDIR); $(RUNSHARED) ./regen
+
+python-config: $(srcdir)/Misc/python-config.in
+	# Substitution happens here, as the completely-expanded BINDIR
+	# is not available in configure
+	sed -e "s,@EXENAME@,$(BINDIR)/python$(VERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config
+
+# Install the include files
+INCLDIRSTOMAKE=$(INCLUDEDIR) $(CONFINCLUDEDIR) $(INCLUDEPY) $(CONFINCLUDEPY)
+inclinstall:
+	@for i in $(INCLDIRSTOMAKE); \
+	do \
+		if test ! -d $(DESTDIR)$$i; then \
+			echo "Creating directory $$i"; \
+			$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+		else	true; \
+		fi; \
+	done
+	@for i in $(srcdir)/Include/*.h; \
+	do \
+		echo $(INSTALL_DATA) $$i $(INCLUDEPY); \
+		$(INSTALL_DATA) $$i $(DESTDIR)$(INCLUDEPY); \
+	done
+	$(INSTALL_DATA) pyconfig.h $(DESTDIR)$(CONFINCLUDEPY)/pyconfig.h
+
+# Install the library and miscellaneous stuff needed for extending/embedding
+# This goes into $(exec_prefix)
+LIBPL=		$(LIBP)/config
+
+# pkgconfig directory
+LIBPC=		$(LIBDIR)/pkgconfig
+
+libainstall:	@DEF_MAKE_RULE@ python-config
+	@for i in $(LIBDIR) $(LIBP) $(LIBPL) $(LIBPC); \
+	do \
+		if test ! -d $(DESTDIR)$$i; then \
+			echo "Creating directory $$i"; \
+			$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+		else	true; \
+		fi; \
+	done
+	@if test -d $(LIBRARY); then :; else \
+		if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
+			if test "$(SO)" = .dll; then \
+				$(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
+			else \
+				$(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
+				$(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
+			fi; \
+		else \
+			echo Skip install of $(LIBRARY) - use make frameworkinstall; \
+		fi; \
+	fi
+	$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
+	$(INSTALL_DATA) Modules/python.o $(DESTDIR)$(LIBPL)/python.o
+	$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
+	$(INSTALL_DATA) Makefile $(DESTDIR)$(LIBPL)/Makefile
+	$(INSTALL_DATA) Modules/Setup $(DESTDIR)$(LIBPL)/Setup
+	$(INSTALL_DATA) Modules/Setup.local $(DESTDIR)$(LIBPL)/Setup.local
+	$(INSTALL_DATA) Modules/Setup.config $(DESTDIR)$(LIBPL)/Setup.config
+	$(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(VERSION).pc
+	$(INSTALL_SCRIPT) $(srcdir)/Modules/makesetup $(DESTDIR)$(LIBPL)/makesetup
+	$(INSTALL_SCRIPT) $(srcdir)/install-sh $(DESTDIR)$(LIBPL)/install-sh
+	$(INSTALL_SCRIPT) python-config $(DESTDIR)$(BINDIR)/python$(VERSION)-config
+	rm python-config
+	@if [ -s Modules/python.exp -a \
+		"`echo $(MACHDEP) | sed 's/^\(...\).*/\1/'`" = "aix" ]; then \
+		echo; echo "Installing support files for building shared extension modules on AIX:"; \
+		$(INSTALL_DATA) Modules/python.exp		\
+				$(DESTDIR)$(LIBPL)/python.exp;		\
+		echo; echo "$(LIBPL)/python.exp";		\
+		$(INSTALL_SCRIPT) $(srcdir)/Modules/makexp_aix	\
+				$(DESTDIR)$(LIBPL)/makexp_aix;		\
+		echo "$(LIBPL)/makexp_aix";			\
+		$(INSTALL_SCRIPT) Modules/ld_so_aix	\
+				$(DESTDIR)$(LIBPL)/ld_so_aix;		\
+		echo "$(LIBPL)/ld_so_aix";			\
+		echo; echo "See Misc/AIX-NOTES for details.";	\
+	else true; \
+	fi
+	@case "$(MACHDEP)" in beos*) \
+		echo; echo "Installing support files for building shared extension modules on BeOS:"; \
+		$(INSTALL_DATA) Misc/BeOS-NOTES $(DESTDIR)$(LIBPL)/README;	\
+		echo; echo "$(LIBPL)/README";			\
+		$(INSTALL_SCRIPT) Modules/ar_beos $(DESTDIR)$(LIBPL)/ar_beos; \
+		echo "$(LIBPL)/ar_beos";			\
+		$(INSTALL_SCRIPT) Modules/ld_so_beos $(DESTDIR)$(LIBPL)/ld_so_beos; \
+		echo "$(LIBPL)/ld_so_beos";			\
+		echo; echo "See Misc/BeOS-NOTES for details.";	\
+		;; \
+	esac
+
+# Install the dynamically loadable modules
+# This goes into $(exec_prefix)
+sharedinstall: sharedmods
+	$(RUNSHARED) $(PYTHON_FOR_BUILD) $(srcdir)/setup.py install \
+	   	--prefix=$(prefix) \
+		--install-scripts=$(BINDIR) \
+		--install-platlib=$(DESTSHARED) \
+		--root=$(DESTDIR)/
+	-rm $(DESTDIR)$(DESTSHARED)/_sysconfigdata.py*
+
+# Here are a couple of targets for MacOSX again, to install a full
+# framework-based Python. frameworkinstall installs everything, the
+# subtargets install specific parts. Much of the actual work is offloaded to
+# the Makefile in Mac
+#
+#
+# This target is here for backward compatibility, previous versions of Python
+# hadn't integrated framework installation in the normal install process.
+frameworkinstall: install
+
+# On install, we re-make the framework
+# structure in the install location, /Library/Frameworks/ or the argument to
+# --enable-framework. If --enable-framework has been specified then we have
+# automatically set prefix to the location deep down in the framework, so we
+# only have to cater for the structural bits of the framework.
+
+frameworkinstallframework: frameworkinstallstructure install frameworkinstallmaclib
+
+frameworkinstallstructure:	$(LDLIBRARY)
+	@if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
+		echo Not configured with --enable-framework; \
+		exit 1; \
+	else true; \
+	fi
+	@for i in $(prefix)/Resources/English.lproj $(prefix)/lib; do\
+		if test ! -d $(DESTDIR)$$i; then \
+			echo "Creating directory $(DESTDIR)$$i"; \
+			$(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+		else	true; \
+		fi; \
+	done
+	$(LN) -fsn include/python$(VERSION) $(DESTDIR)$(prefix)/Headers
+	sed 's/%VERSION%/'"`$(RUNSHARED) ./$(BUILDPYTHON) -c 'import platform; print platform.python_version()'`"'/g' < $(RESSRCDIR)/Info.plist > $(DESTDIR)$(prefix)/Resources/Info.plist
+	$(LN) -fsn $(VERSION) $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Versions/Current
+	$(LN) -fsn Versions/Current/$(PYTHONFRAMEWORK) $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/$(PYTHONFRAMEWORK)
+	$(LN) -fsn Versions/Current/Headers $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers
+	$(LN) -fsn Versions/Current/Resources $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Resources
+	$(INSTALL_SHARED) $(LDLIBRARY) $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/$(LDLIBRARY)
+
+# This installs Mac/Lib into the framework
+# Install a number of symlinks to keep software that expects a normal unix
+# install (which includes python-config) happy.
+frameworkinstallmaclib:
+	ln -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/python$(VERSION)/config/libpython$(VERSION).a"
+	ln -fs "../../../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/python$(VERSION)/config/libpython$(VERSION).dylib"
+	ln -fs "../$(PYTHONFRAMEWORK)" "$(DESTDIR)$(prefix)/lib/libpython$(VERSION).dylib"
+	cd Mac && $(MAKE) installmacsubtree DESTDIR="$(DESTDIR)"
+
+# This installs the IDE, the Launcher and other apps into /Applications
+frameworkinstallapps:
+	cd Mac && $(MAKE) installapps DESTDIR="$(DESTDIR)"
+
+# This install the unix python and pythonw tools in /usr/local/bin
+frameworkinstallunixtools:
+	cd Mac && $(MAKE) installunixtools DESTDIR="$(DESTDIR)"
+
+frameworkaltinstallunixtools:
+	cd Mac && $(MAKE) altinstallunixtools DESTDIR="$(DESTDIR)"
+
+# This installs the Demos and Tools into the applications directory.
+# It is not part of a normal frameworkinstall
+frameworkinstallextras:
+	cd Mac && $(MAKE) installextras DESTDIR="$(DESTDIR)"
+
+# This installs a few of the useful scripts in Tools/scripts
+scriptsinstall:
+	SRCDIR=$(srcdir) $(RUNSHARED) \
+	$(PYTHON_FOR_BUILD) $(srcdir)/Tools/scripts/setup.py install \
+	--prefix=$(prefix) \
+	--install-scripts=$(BINDIR) \
+	--root=$(DESTDIR)/
+
+# Build the toplevel Makefile
+Makefile.pre: Makefile.pre.in config.status
+	CONFIG_FILES=Makefile.pre CONFIG_HEADERS= $(SHELL) config.status
+	$(MAKE) -f Makefile.pre Makefile
+
+# Run the configure script.
+config.status:	$(srcdir)/configure
+	$(SHELL) $(srcdir)/configure $(CONFIG_ARGS)
+
+.PRECIOUS: config.status $(BUILDPYTHON) Makefile Makefile.pre
+
+# Some make's put the object file in the current directory
+.c.o:
+	$(CC) -c $(PY_CFLAGS) -o $@ $<
+
+# Run reindent on the library
+reindent:
+	./$(BUILDPYTHON) $(srcdir)/Tools/scripts/reindent.py -r $(srcdir)/Lib
+
+# Rerun configure with the same options as it was run last time,
+# provided the config.status script exists
+recheck:
+	$(SHELL) config.status --recheck
+	$(SHELL) config.status
+
+# Regenerate configure and pyconfig.h.in
+.PHONY: autoconf
+autoconf:
+	# Regenerate the configure script from configure.ac using autoconf
+	(cd $(srcdir); autoconf)
+	# Regenerate pyconfig.h.in from configure.ac using autoheader
+	(cd $(srcdir); autoheader)
+
+# Create a tags file for vi
+tags::
+	ctags -w $(srcdir)/Include/*.h
+	for i in $(SRCDIRS); do ctags -f tags -w -a $(srcdir)/$$i/*.[ch]; done
+	ctags -f tags -w -a $(srcdir)/Modules/_ctypes/*.[ch]
+	LC_ALL=C sort -o tags tags
+
+# Create a tags file for GNU Emacs
+TAGS::
+	cd $(srcdir); \
+	etags Include/*.h; \
+	for i in $(SRCDIRS); do etags -a $$i/*.[ch]; done
+
+# Sanitation targets -- clean leaves libraries, executables and tags
+# files, which clobber removes as well
+pycremoval:
+	find $(srcdir) -name '*.py[co]' -exec rm -f {} ';'
+
+clean: pycremoval
+	find . -name '*.[oa]' -exec rm -f {} ';'
+	find . -name '*.s[ol]' -exec rm -f {} ';'
+	find . -name '*.so.[0-9]*.[0-9]*' -exec rm -f {} ';'
+	find build -name 'fficonfig.h' -exec rm -f {} ';' || true
+	find build -name 'fficonfig.py' -exec rm -f {} ';' || true
+	-rm -f Lib/lib2to3/*Grammar*.pickle
+	-find build -type f -a ! -name '*.gc??' -exec rm -f {} ';'
+
+profile-removal:
+	find . -name '*.gc??' -exec rm -f {} ';'
+	find . -name '*.profclang?' -exec rm -f {} ';'
+	find . -name '*.dyn' -exec rm -f {} ';'
+
+clobber: clean profile-removal
+	-rm -f $(BUILDPYTHON) $(PGEN) $(LIBRARY) $(LDLIBRARY) $(DLLLIBRARY) \
+		tags TAGS \
+		config.cache config.log pyconfig.h Modules/config.c
+	-rm -rf build platform
+	-rm -rf $(PYTHONFRAMEWORKDIR)
+
+# Make things extra clean, before making a distribution:
+# remove all generated files, even Makefile[.pre]
+# Keep configure and Python-ast.[ch], it's possible they can't be generated
+distclean: clobber
+	for file in Lib/test/data/* ; do \
+	    if test "$$file" != "Lib/test/data/README"; then rm "$$file"; fi; \
+	done
+	-rm -f core Makefile Makefile.pre config.status \
+		Modules/Setup Modules/Setup.local Modules/Setup.config \
+		Modules/ld_so_aix Modules/python.exp Misc/python.pc
+	-rm -f python*-gdb.py
+	-rm -f pybuilddir.txt
+	# Issue #28258: set LC_ALL to avoid issues with Estonian locale.
+	# Expansion is performed here by shell (spawned by make) itself before
+	# arguments are passed to find. So LC_ALL=C must be set as a separate
+	# command.
+	LC_ALL=C; find $(srcdir)/[a-zA-Z]* '(' -name '*.fdc' -o -name '*~' \
+				     -o -name '[@,#]*' -o -name '*.old' \
+				     -o -name '*.orig' -o -name '*.rej' \
+				     -o -name '*.bak' ')' \
+				     -exec rm -f {} ';'
+
+# Check for smelly exported symbols (not starting with Py/_Py)
+smelly: @DEF_MAKE_RULE@
+	nm -p $(LIBRARY) | \
+		sed -n "/ [TDB] /s/.* //p" | grep -v "^_*Py" | sort -u; \
+
+# Find files with funny names
+funny:
+	find $(SUBDIRS) $(SUBDIRSTOO) -type d \
+		-o -name '*.[chs]' \
+		-o -name '*.py' \
+		-o -name '*.doc' \
+		-o -name '*.sty' \
+		-o -name '*.bib' \
+		-o -name '*.dat' \
+		-o -name '*.el' \
+		-o -name '*.fd' \
+		-o -name '*.in' \
+		-o -name '*.tex' \
+		-o -name '*,[vpt]' \
+		-o -name 'Setup' \
+		-o -name 'Setup.*' \
+		-o -name README \
+		-o -name Makefile \
+		-o -name ChangeLog \
+		-o -name Repository \
+		-o -name Root \
+		-o -name Entries \
+		-o -name Tag \
+		-o -name tags \
+		-o -name TAGS \
+		-o -name .cvsignore \
+		-o -name MANIFEST \
+		-o -print
+
+# Perform some verification checks on any modified files.
+patchcheck:
+	$(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/scripts/patchcheck.py
+
+# Dependencies
+
+Python/thread.o: @THREADHEADERS@
+
+# Declare targets that aren't real files
+.PHONY: all build_all sharedmods check-clean-src oldsharedmods test quicktest memtest
+.PHONY: install altinstall oldsharedinstall bininstall altbininstall
+.PHONY: maninstall libinstall inclinstall libainstall sharedinstall
+.PHONY: frameworkinstall frameworkinstallframework frameworkinstallstructure
+.PHONY: frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools
+.PHONY: frameworkaltinstallunixtools recheck clean clobber distclean
+.PHONY: smelly funny patchcheck altmaninstall commoninstall
+.PHONY: gdbhooks
+
+# IF YOU PUT ANYTHING HERE IT WILL GO AWAY
Index: Modules/getpath.c
===================================================================
--- Modules/getpath.c	(nonexistent)
+++ Modules/getpath.c	(revision 5)
@@ -0,0 +1,693 @@
+/* Return the initial module search path. */
+
+#include "Python.h"
+#include "osdefs.h"
+
+#include <sys/types.h>
+#include <string.h>
+
+#ifdef __APPLE__
+#include <mach-o/dyld.h>
+#endif
+
+/* Search in some common locations for the associated Python libraries.
+ *
+ * Two directories must be found, the platform independent directory
+ * (prefix), containing the common .py and .pyc files, and the platform
+ * dependent directory (exec_prefix), containing the shared library
+ * modules.  Note that prefix and exec_prefix can be the same directory,
+ * but for some installations, they are different.
+ *
+ * Py_GetPath() carries out separate searches for prefix and exec_prefix.
+ * Each search tries a number of different locations until a ``landmark''
+ * file or directory is found.  If no prefix or exec_prefix is found, a
+ * warning message is issued and the preprocessor defined PREFIX and
+ * EXEC_PREFIX are used (even though they will not work); python carries on
+ * as best as is possible, but most imports will fail.
+ *
+ * Before any searches are done, the location of the executable is
+ * determined.  If argv[0] has one or more slashes in it, it is used
+ * unchanged.  Otherwise, it must have been invoked from the shell's path,
+ * so we search $PATH for the named executable and use that.  If the
+ * executable was not found on $PATH (or there was no $PATH environment
+ * variable), the original argv[0] string is used.
+ *
+ * Next, the executable location is examined to see if it is a symbolic
+ * link.  If so, the link is chased (correctly interpreting a relative
+ * pathname if one is found) and the directory of the link target is used.
+ *
+ * Finally, argv0_path is set to the directory containing the executable
+ * (i.e. the last component is stripped).
+ *
+ * With argv0_path in hand, we perform a number of steps.  The same steps
+ * are performed for prefix and for exec_prefix, but with a different
+ * landmark.
+ *
+ * Step 1. Are we running python out of the build directory?  This is
+ * checked by looking for a different kind of landmark relative to
+ * argv0_path.  For prefix, the landmark's path is derived from the VPATH
+ * preprocessor variable (taking into account that its value is almost, but
+ * not quite, what we need).  For exec_prefix, the landmark is
+ * Modules/Setup.  If the landmark is found, we're done.
+ *
+ * For the remaining steps, the prefix landmark will always be
+ * lib/python$VERSION/os.py and the exec_prefix will always be
+ * lib/python$VERSION/lib-dynload, where $VERSION is Python's version
+ * number as supplied by the Makefile.  Note that this means that no more
+ * build directory checking is performed; if the first step did not find
+ * the landmarks, the assumption is that python is running from an
+ * installed setup.
+ *
+ * Step 2. See if the $PYTHONHOME environment variable points to the
+ * installed location of the Python libraries.  If $PYTHONHOME is set, then
+ * it points to prefix and exec_prefix.  $PYTHONHOME can be a single
+ * directory, which is used for both, or the prefix and exec_prefix
+ * directories separated by a colon.
+ *
+ * Step 3. Try to find prefix and exec_prefix relative to argv0_path,
+ * backtracking up the path until it is exhausted.  This is the most common
+ * step to succeed.  Note that if prefix and exec_prefix are different,
+ * exec_prefix is more likely to be found; however if exec_prefix is a
+ * subdirectory of prefix, both will be found.
+ *
+ * Step 4. Search the directories pointed to by the preprocessor variables
+ * PREFIX and EXEC_PREFIX.  These are supplied by the Makefile but can be
+ * passed in as options to the configure script.
+ *
+ * That's it!
+ *
+ * Well, almost.  Once we have determined prefix and exec_prefix, the
+ * preprocessor variable PYTHONPATH is used to construct a path.  Each
+ * relative path on PYTHONPATH is prefixed with prefix.  Then the directory
+ * containing the shared library modules is appended.  The environment
+ * variable $PYTHONPATH is inserted in front of it all.  Finally, the
+ * prefix and exec_prefix globals are tweaked so they reflect the values
+ * expected by other code, by stripping the "lib/python$VERSION/..." stuff
+ * off.  If either points to the build directory, the globals are reset to
+ * the corresponding preprocessor variables (so sys.prefix will reflect the
+ * installation location, even though sys.path points into the build
+ * directory).  This seems to make more sense given that currently the only
+ * known use of sys.prefix and sys.exec_prefix is for the ILU installation
+ * process to find the installed Python tree.
+ */
+
+#ifdef __cplusplus
+ extern "C" {
+#endif
+
+
+#if !defined(PREFIX) || !defined(EXEC_PREFIX) || !defined(VERSION) || !defined(VPATH)
+#error "PREFIX, EXEC_PREFIX, VERSION, and VPATH must be constant defined"
+#endif
+
+#ifndef LANDMARK
+#define LANDMARK "os.py"
+#endif
+
+static char prefix[MAXPATHLEN+1];
+static char exec_prefix[MAXPATHLEN+1];
+static char progpath[MAXPATHLEN+1];
+static char *module_search_path = NULL;
+static char lib_python[] = "lib32/python" VERSION;
+
+static void
+reduce(char *dir)
+{
+    size_t i = strlen(dir);
+    while (i > 0 && dir[i] != SEP)
+        --i;
+    dir[i] = '\0';
+}
+
+
+static int
+isfile(char *filename)          /* Is file, not directory */
+{
+    struct stat buf;
+    if (stat(filename, &buf) != 0)
+        return 0;
+    if (!S_ISREG(buf.st_mode))
+        return 0;
+    return 1;
+}
+
+
+static int
+ismodule(char *filename)        /* Is module -- check for .pyc/.pyo too */
+{
+    if (isfile(filename))
+        return 1;
+
+    /* Check for the compiled version of prefix. */
+    if (strlen(filename) < MAXPATHLEN) {
+        strcat(filename, Py_OptimizeFlag ? "o" : "c");
+        if (isfile(filename))
+            return 1;
+    }
+    return 0;
+}
+
+
+static int
+isxfile(char *filename)         /* Is executable file */
+{
+    struct stat buf;
+    if (stat(filename, &buf) != 0)
+        return 0;
+    if (!S_ISREG(buf.st_mode))
+        return 0;
+    if ((buf.st_mode & 0111) == 0)
+        return 0;
+    return 1;
+}
+
+
+static int
+isdir(char *filename)                   /* Is directory */
+{
+    struct stat buf;
+    if (stat(filename, &buf) != 0)
+        return 0;
+    if (!S_ISDIR(buf.st_mode))
+        return 0;
+    return 1;
+}
+
+
+/* Add a path component, by appending stuff to buffer.
+   buffer must have at least MAXPATHLEN + 1 bytes allocated, and contain a
+   NUL-terminated string with no more than MAXPATHLEN characters (not counting
+   the trailing NUL).  It's a fatal error if it contains a string longer than
+   that (callers must be careful!).  If these requirements are met, it's
+   guaranteed that buffer will still be a NUL-terminated string with no more
+   than MAXPATHLEN characters at exit.  If stuff is too long, only as much of
+   stuff as fits will be appended.
+*/
+static void
+joinpath(char *buffer, char *stuff)
+{
+    size_t n, k;
+    if (stuff[0] == SEP)
+        n = 0;
+    else {
+        n = strlen(buffer);
+        if (n > 0 && buffer[n-1] != SEP && n < MAXPATHLEN)
+            buffer[n++] = SEP;
+    }
+    if (n > MAXPATHLEN)
+        Py_FatalError("buffer overflow in getpath.c's joinpath()");
+    k = strlen(stuff);
+    if (n + k > MAXPATHLEN)
+        k = MAXPATHLEN - n;
+    strncpy(buffer+n, stuff, k);
+    buffer[n+k] = '\0';
+}
+
+/* copy_absolute requires that path be allocated at least
+   MAXPATHLEN + 1 bytes and that p be no more than MAXPATHLEN bytes. */
+static void
+copy_absolute(char *path, char *p)
+{
+    if (p[0] == SEP)
+        strcpy(path, p);
+    else {
+        if (!getcwd(path, MAXPATHLEN)) {
+            /* unable to get the current directory */
+            strcpy(path, p);
+            return;
+        }
+        if (p[0] == '.' && p[1] == SEP)
+            p += 2;
+        joinpath(path, p);
+    }
+}
+
+/* absolutize() requires that path be allocated at least MAXPATHLEN+1 bytes. */
+static void
+absolutize(char *path)
+{
+    char buffer[MAXPATHLEN + 1];
+
+    if (path[0] == SEP)
+        return;
+    copy_absolute(buffer, path);
+    strcpy(path, buffer);
+}
+
+/* search_for_prefix requires that argv0_path be no more than MAXPATHLEN
+   bytes long.
+*/
+static int
+search_for_prefix(char *argv0_path, char *home)
+{
+    size_t n;
+    char *vpath;
+
+    /* If PYTHONHOME is set, we believe it unconditionally */
+    if (home) {
+        char *delim;
+        strncpy(prefix, home, MAXPATHLEN);
+        delim = strchr(prefix, DELIM);
+        if (delim)
+            *delim = '\0';
+        joinpath(prefix, lib_python);
+        joinpath(prefix, LANDMARK);
+        return 1;
+    }
+
+    /* Check to see if argv[0] is in the build directory */
+    strcpy(prefix, argv0_path);
+    joinpath(prefix, "Modules/Setup");
+    if (isfile(prefix)) {
+        /* Check VPATH to see if argv0_path is in the build directory. */
+        vpath = VPATH;
+        strcpy(prefix, argv0_path);
+        joinpath(prefix, vpath);
+        joinpath(prefix, "Lib");
+        joinpath(prefix, LANDMARK);
+        if (ismodule(prefix))
+            return -1;
+    }
+
+    /* Search from argv0_path, until root is found */
+    copy_absolute(prefix, argv0_path);
+    do {
+        n = strlen(prefix);
+        joinpath(prefix, lib_python);
+        joinpath(prefix, LANDMARK);
+        if (ismodule(prefix))
+            return 1;
+        prefix[n] = '\0';
+        reduce(prefix);
+    } while (prefix[0]);
+
+    /* Look at configure's PREFIX */
+    strncpy(prefix, PREFIX, MAXPATHLEN);
+    joinpath(prefix, lib_python);
+    joinpath(prefix, LANDMARK);
+    if (ismodule(prefix))
+        return 1;
+
+    /* Fail */
+    return 0;
+}
+
+
+/* search_for_exec_prefix requires that argv0_path be no more than
+   MAXPATHLEN bytes long.
+*/
+static int
+search_for_exec_prefix(char *argv0_path, char *home)
+{
+    size_t n;
+
+    /* If PYTHONHOME is set, we believe it unconditionally */
+    if (home) {
+        char *delim;
+        delim = strchr(home, DELIM);
+        if (delim)
+            strncpy(exec_prefix, delim+1, MAXPATHLEN);
+        else
+            strncpy(exec_prefix, home, MAXPATHLEN);
+        joinpath(exec_prefix, lib_python);
+        joinpath(exec_prefix, "lib-dynload");
+        return 1;
+    }
+
+    /* Check to see if argv[0] is in the build directory. "pybuilddir.txt"
+       is written by setup.py and contains the relative path to the location
+       of shared library modules. */
+    strcpy(exec_prefix, argv0_path);
+    joinpath(exec_prefix, "pybuilddir.txt");
+    if (isfile(exec_prefix)) {
+      FILE *f = fopen(exec_prefix, "r");
+      if (f == NULL)
+	errno = 0;
+      else {
+	char rel_builddir_path[MAXPATHLEN+1];
+	size_t n;
+	n = fread(rel_builddir_path, 1, MAXPATHLEN, f);
+	rel_builddir_path[n] = '\0';
+	fclose(f);
+	strcpy(exec_prefix, argv0_path);
+	joinpath(exec_prefix, rel_builddir_path);
+	return -1;
+      }
+    }
+
+    /* Search from argv0_path, until root is found */
+    copy_absolute(exec_prefix, argv0_path);
+    do {
+        n = strlen(exec_prefix);
+        joinpath(exec_prefix, lib_python);
+        joinpath(exec_prefix, "lib-dynload");
+        if (isdir(exec_prefix))
+            return 1;
+        exec_prefix[n] = '\0';
+        reduce(exec_prefix);
+    } while (exec_prefix[0]);
+
+    /* Look at configure's EXEC_PREFIX */
+    strncpy(exec_prefix, EXEC_PREFIX, MAXPATHLEN);
+    joinpath(exec_prefix, lib_python);
+    joinpath(exec_prefix, "lib-dynload");
+    if (isdir(exec_prefix))
+        return 1;
+
+    /* Fail */
+    return 0;
+}
+
+
+static void
+calculate_path(void)
+{
+    extern char *Py_GetProgramName(void);
+
+    static char delimiter[2] = {DELIM, '\0'};
+    static char separator[2] = {SEP, '\0'};
+    char *pythonpath = PYTHONPATH;
+    char *rtpypath = Py_GETENV("PYTHONPATH");
+    char *home = Py_GetPythonHome();
+    char *path = getenv("PATH");
+    char *prog = Py_GetProgramName();
+    char argv0_path[MAXPATHLEN+1];
+    char zip_path[MAXPATHLEN+1];
+    int pfound, efound; /* 1 if found; -1 if found build directory */
+    char *buf;
+    size_t bufsz;
+    size_t prefixsz;
+    char *defpath = pythonpath;
+#ifdef WITH_NEXT_FRAMEWORK
+    NSModule pythonModule;
+#endif
+#ifdef __APPLE__
+#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_4
+    uint32_t nsexeclength = MAXPATHLEN;
+#else
+    unsigned long nsexeclength = MAXPATHLEN;
+#endif
+#endif
+
+        /* If there is no slash in the argv0 path, then we have to
+         * assume python is on the user's $PATH, since there's no
+         * other way to find a directory to start the search from.  If
+         * $PATH isn't exported, you lose.
+         */
+        if (strchr(prog, SEP))
+                strncpy(progpath, prog, MAXPATHLEN);
+#ifdef __APPLE__
+     /* On Mac OS X, if a script uses an interpreter of the form
+      * "#!/opt/python2.3/bin/python", the kernel only passes "python"
+      * as argv[0], which falls through to the $PATH search below.
+      * If /opt/python2.3/bin isn't in your path, or is near the end,
+      * this algorithm may incorrectly find /usr/bin/python. To work
+      * around this, we can use _NSGetExecutablePath to get a better
+      * hint of what the intended interpreter was, although this
+      * will fail if a relative path was used. but in that case,
+      * absolutize() should help us out below
+      */
+     else if(0 == _NSGetExecutablePath(progpath, &nsexeclength) && progpath[0] == SEP)
+       ;
+#endif /* __APPLE__ */
+        else if (path) {
+                while (1) {
+                        char *delim = strchr(path, DELIM);
+
+                        if (delim) {
+                                size_t len = delim - path;
+                                if (len > MAXPATHLEN)
+                                        len = MAXPATHLEN;
+                                strncpy(progpath, path, len);
+                                *(progpath + len) = '\0';
+                        }
+                        else
+                                strncpy(progpath, path, MAXPATHLEN);
+
+                        joinpath(progpath, prog);
+                        if (isxfile(progpath))
+                                break;
+
+                        if (!delim) {
+                                progpath[0] = '\0';
+                                break;
+                        }
+                        path = delim + 1;
+                }
+        }
+        else
+                progpath[0] = '\0';
+        if (progpath[0] != SEP && progpath[0] != '\0')
+                absolutize(progpath);
+        strncpy(argv0_path, progpath, MAXPATHLEN);
+        argv0_path[MAXPATHLEN] = '\0';
+
+#ifdef WITH_NEXT_FRAMEWORK
+        /* On Mac OS X we have a special case if we're running from a framework.
+        ** This is because the python home should be set relative to the library,
+        ** which is in the framework, not relative to the executable, which may
+        ** be outside of the framework. Except when we're in the build directory...
+        */
+    pythonModule = NSModuleForSymbol(NSLookupAndBindSymbol("_Py_Initialize"));
+    /* Use dylib functions to find out where the framework was loaded from */
+    buf = (char *)NSLibraryNameForModule(pythonModule);
+    if (buf != NULL) {
+        /* We're in a framework. */
+        /* See if we might be in the build directory. The framework in the
+        ** build directory is incomplete, it only has the .dylib and a few
+        ** needed symlinks, it doesn't have the Lib directories and such.
+        ** If we're running with the framework from the build directory we must
+        ** be running the interpreter in the build directory, so we use the
+        ** build-directory-specific logic to find Lib and such.
+        */
+        strncpy(argv0_path, buf, MAXPATHLEN);
+        reduce(argv0_path);
+        joinpath(argv0_path, lib_python);
+        joinpath(argv0_path, LANDMARK);
+        if (!ismodule(argv0_path)) {
+                /* We are in the build directory so use the name of the
+                   executable - we know that the absolute path is passed */
+                strncpy(argv0_path, progpath, MAXPATHLEN);
+        }
+        else {
+                /* Use the location of the library as the progpath */
+                strncpy(argv0_path, buf, MAXPATHLEN);
+        }
+    }
+#endif
+
+#if HAVE_READLINK
+    {
+        char tmpbuffer[MAXPATHLEN+1];
+        int linklen = readlink(progpath, tmpbuffer, MAXPATHLEN);
+        while (linklen != -1) {
+            /* It's not null terminated! */
+            tmpbuffer[linklen] = '\0';
+            if (tmpbuffer[0] == SEP)
+                /* tmpbuffer should never be longer than MAXPATHLEN,
+                   but extra check does not hurt */
+                strncpy(argv0_path, tmpbuffer, MAXPATHLEN + 1);
+            else {
+                /* Interpret relative to progpath */
+                reduce(argv0_path);
+                joinpath(argv0_path, tmpbuffer);
+            }
+            linklen = readlink(argv0_path, tmpbuffer, MAXPATHLEN);
+        }
+    }
+#endif /* HAVE_READLINK */
+
+    reduce(argv0_path);
+    /* At this point, argv0_path is guaranteed to be less than
+       MAXPATHLEN bytes long.
+    */
+
+    if (!(pfound = search_for_prefix(argv0_path, home))) {
+        if (!Py_FrozenFlag)
+            fprintf(stderr,
+                "Could not find platform independent libraries <prefix>\n");
+        strncpy(prefix, PREFIX, MAXPATHLEN);
+        joinpath(prefix, lib_python);
+    }
+    else
+        reduce(prefix);
+
+    strncpy(zip_path, prefix, MAXPATHLEN);
+    zip_path[MAXPATHLEN] = '\0';
+    if (pfound > 0) { /* Use the reduced prefix returned by Py_GetPrefix() */
+        reduce(zip_path);
+        reduce(zip_path);
+    }
+    else
+        strncpy(zip_path, PREFIX, MAXPATHLEN);
+    joinpath(zip_path, "lib32/python00.zip");
+    bufsz = strlen(zip_path);   /* Replace "00" with version */
+    zip_path[bufsz - 6] = VERSION[0];
+    zip_path[bufsz - 5] = VERSION[2];
+
+    if (!(efound = search_for_exec_prefix(argv0_path, home))) {
+        if (!Py_FrozenFlag)
+            fprintf(stderr,
+                "Could not find platform dependent libraries <exec_prefix>\n");
+        strncpy(exec_prefix, EXEC_PREFIX, MAXPATHLEN);
+        joinpath(exec_prefix, "lib32/lib-dynload");
+    }
+    /* If we found EXEC_PREFIX do *not* reduce it!  (Yet.) */
+
+    if ((!pfound || !efound) && !Py_FrozenFlag)
+        fprintf(stderr,
+                "Consider setting $PYTHONHOME to <prefix>[:<exec_prefix>]\n");
+
+    /* Calculate size of return buffer.
+     */
+    bufsz = 0;
+
+    if (rtpypath)
+        bufsz += strlen(rtpypath) + 1;
+
+    prefixsz = strlen(prefix) + 1;
+
+    while (1) {
+        char *delim = strchr(defpath, DELIM);
+
+        if (defpath[0] != SEP)
+            /* Paths are relative to prefix */
+            bufsz += prefixsz;
+
+        if (delim)
+            bufsz += delim - defpath + 1;
+        else {
+            bufsz += strlen(defpath) + 1;
+            break;
+        }
+        defpath = delim + 1;
+    }
+
+    bufsz += strlen(zip_path) + 1;
+    bufsz += strlen(exec_prefix) + 1;
+
+    /* This is the only malloc call in this file */
+    buf = (char *)PyMem_Malloc(bufsz);
+
+    if (buf == NULL) {
+        /* We can't exit, so print a warning and limp along */
+        fprintf(stderr, "Not enough memory for dynamic PYTHONPATH.\n");
+        fprintf(stderr, "Using default static PYTHONPATH.\n");
+        module_search_path = PYTHONPATH;
+    }
+    else {
+        /* Run-time value of $PYTHONPATH goes first */
+        if (rtpypath) {
+            strcpy(buf, rtpypath);
+            strcat(buf, delimiter);
+        }
+        else
+            buf[0] = '\0';
+
+        /* Next is the default zip path */
+        strcat(buf, zip_path);
+        strcat(buf, delimiter);
+
+        /* Next goes merge of compile-time $PYTHONPATH with
+         * dynamically located prefix.
+         */
+        defpath = pythonpath;
+        while (1) {
+            char *delim = strchr(defpath, DELIM);
+
+            if (defpath[0] != SEP) {
+                strcat(buf, prefix);
+                if (prefixsz >= 2 && prefix[prefixsz - 2] != SEP &&
+                    defpath[0] != (delim ? DELIM : L'\0')) {  /* not empty */
+                    strcat(buf, separator);
+                }
+            }
+
+            if (delim) {
+                size_t len = delim - defpath + 1;
+                size_t end = strlen(buf) + len;
+                strncat(buf, defpath, len);
+                *(buf + end) = '\0';
+            }
+            else {
+                strcat(buf, defpath);
+                break;
+            }
+            defpath = delim + 1;
+        }
+        strcat(buf, delimiter);
+
+        /* Finally, on goes the directory for dynamic-load modules */
+        strcat(buf, exec_prefix);
+
+        /* And publish the results */
+        module_search_path = buf;
+    }
+
+    /* Reduce prefix and exec_prefix to their essence,
+     * e.g. /usr/local/lib/python1.5 is reduced to /usr/local.
+     * If we're loading relative to the build directory,
+     * return the compiled-in defaults instead.
+     */
+    if (pfound > 0) {
+        reduce(prefix);
+        reduce(prefix);
+        /* The prefix is the root directory, but reduce() chopped
+         * off the "/". */
+        if (!prefix[0])
+                strcpy(prefix, separator);
+    }
+    else
+        strncpy(prefix, PREFIX, MAXPATHLEN);
+
+    if (efound > 0) {
+        reduce(exec_prefix);
+        reduce(exec_prefix);
+        reduce(exec_prefix);
+        if (!exec_prefix[0])
+                strcpy(exec_prefix, separator);
+    }
+    else
+        strncpy(exec_prefix, EXEC_PREFIX, MAXPATHLEN);
+}
+
+
+/* External interface */
+
+char *
+Py_GetPath(void)
+{
+    if (!module_search_path)
+        calculate_path();
+    return module_search_path;
+}
+
+char *
+Py_GetPrefix(void)
+{
+    if (!module_search_path)
+        calculate_path();
+    return prefix;
+}
+
+char *
+Py_GetExecPrefix(void)
+{
+    if (!module_search_path)
+        calculate_path();
+    return exec_prefix;
+}
+
+char *
+Py_GetProgramFullPath(void)
+{
+    if (!module_search_path)
+        calculate_path();
+    return progpath;
+}
+
+
+#ifdef __cplusplus
+}
+#endif
+
Index: Modules
===================================================================
--- Modules	(nonexistent)
+++ Modules	(revision 5)

Property changes on: Modules
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,73 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~
Index: setup.py
===================================================================
--- setup.py	(nonexistent)
+++ setup.py	(revision 5)
@@ -0,0 +1,2352 @@
+# Autodetecting setup.py script for building the Python extensions
+#
+
+__version__ = "$Revision$"
+
+import sys, os, imp, re, optparse
+from glob import glob
+from platform import machine as platform_machine
+import sysconfig
+
+from distutils import log
+from distutils import text_file
+from distutils.errors import *
+from distutils.core import Extension, setup
+from distutils.command.build_ext import build_ext
+from distutils.command.install import install
+from distutils.command.install_lib import install_lib
+from distutils.spawn import find_executable
+
+cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ
+
+def get_platform():
+    # cross build
+    if "_PYTHON_HOST_PLATFORM" in os.environ:
+        return os.environ["_PYTHON_HOST_PLATFORM"]
+    # Get value of sys.platform
+    if sys.platform.startswith('osf1'):
+        return 'osf1'
+    return sys.platform
+host_platform = get_platform()
+
+# Were we compiled --with-pydebug or with #define Py_DEBUG?
+COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS"))
+
+# This global variable is used to hold the list of modules to be disabled.
+disabled_module_list = []
+
+def add_dir_to_list(dirlist, dir):
+    """Add the directory 'dir' to the list 'dirlist' (at the front) if
+    1) 'dir' is not already in 'dirlist'
+    2) 'dir' actually exists, and is a directory."""
+    if dir is not None and dir not in dirlist:
+        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
+            # If in a macOS SDK path, check relative to the SDK root
+            dir_exists = os.path.isdir(
+                os.path.join(macosx_sdk_root(), dir[1:]))
+        else:
+            dir_exists = os.path.isdir(dir)
+        if dir_exists:
+            dirlist.insert(0, dir)
+
+MACOS_SDK_ROOT = None
+
+def macosx_sdk_root():
+    """Return the directory of the current macOS SDK.
+
+    If no SDK was explicitly configured, call the compiler to find which
+    include files paths are being searched by default.  Use '/' if the
+    compiler is searching /usr/include (meaning system header files are
+    installed) or use the root of an SDK if that is being searched.
+    (The SDK may be supplied via Xcode or via the Command Line Tools).
+    The SDK paths used by Apple-supplied tool chains depend on the
+    setting of various variables; see the xcrun man page for more info.
+    """
+    global MACOS_SDK_ROOT
+
+    # If already called, return cached result.
+    if MACOS_SDK_ROOT:
+        return MACOS_SDK_ROOT
+
+    cflags = sysconfig.get_config_var('CFLAGS')
+    m = re.search(r'-isysroot\s+(\S+)', cflags)
+    if m is not None:
+        MACOS_SDK_ROOT = m.group(1)
+    else:
+        MACOS_SDK_ROOT = '/'
+        cc = sysconfig.get_config_var('CC')
+        tmpfile = '/tmp/setup_sdk_root.%d' % os.getpid()
+        try:
+            os.unlink(tmpfile)
+        except:
+            pass
+        ret = os.system('%s -E -v - </dev/null 2>%s 1>/dev/null' % (cc, tmpfile))
+        in_incdirs = False
+        try:
+            if ret >> 8 == 0:
+                with open(tmpfile) as fp:
+                    for line in fp.readlines():
+                        if line.startswith("#include <...>"):
+                            in_incdirs = True
+                        elif line.startswith("End of search list"):
+                            in_incdirs = False
+                        elif in_incdirs:
+                            line = line.strip()
+                            if line == '/usr/include':
+                                MACOS_SDK_ROOT = '/'
+                            elif line.endswith(".sdk/usr/include"):
+                                MACOS_SDK_ROOT = line[:-12]
+        finally:
+            os.unlink(tmpfile)
+
+    return MACOS_SDK_ROOT
+
+def is_macosx_sdk_path(path):
+    """
+    Returns True if 'path' can be located in an OSX SDK
+    """
+    return ( (path.startswith('/usr/') and not path.startswith('/usr/local'))
+                or path.startswith('/System/')
+                or path.startswith('/Library/') )
+
+def find_file(filename, std_dirs, paths):
+    """Searches for the directory where a given file is located,
+    and returns a possibly-empty list of additional directories, or None
+    if the file couldn't be found at all.
+
+    'filename' is the name of a file, such as readline.h or libcrypto.a.
+    'std_dirs' is the list of standard system directories; if the
+        file is found in one of them, no additional directives are needed.
+    'paths' is a list of additional locations to check; if the file is
+        found in one of them, the resulting list will contain the directory.
+    """
+    if host_platform == 'darwin':
+        # Honor the MacOSX SDK setting when one was specified.
+        # An SDK is a directory with the same structure as a real
+        # system, but with only header files and libraries.
+        sysroot = macosx_sdk_root()
+
+    # Check the standard locations
+    for dir in std_dirs:
+        f = os.path.join(dir, filename)
+
+        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
+            f = os.path.join(sysroot, dir[1:], filename)
+
+        if os.path.exists(f): return []
+
+    # Check the additional directories
+    for dir in paths:
+        f = os.path.join(dir, filename)
+
+        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
+            f = os.path.join(sysroot, dir[1:], filename)
+
+        if os.path.exists(f):
+            return [dir]
+
+    # Not found anywhere
+    return None
+
+def find_library_file(compiler, libname, std_dirs, paths):
+    result = compiler.find_library_file(std_dirs + paths, libname)
+    if result is None:
+        return None
+
+    if host_platform == 'darwin':
+        sysroot = macosx_sdk_root()
+
+    # Check whether the found file is in one of the standard directories
+    dirname = os.path.dirname(result)
+    for p in std_dirs:
+        # Ensure path doesn't end with path separator
+        p = p.rstrip(os.sep)
+
+        if host_platform == 'darwin' and is_macosx_sdk_path(p):
+            # Note that, as of Xcode 7, Apple SDKs may contain textual stub
+            # libraries with .tbd extensions rather than the normal .dylib
+            # shared libraries installed in /.  The Apple compiler tool
+            # chain handles this transparently but it can cause problems
+            # for programs that are being built with an SDK and searching
+            # for specific libraries.  Distutils find_library_file() now
+            # knows to also search for and return .tbd files.  But callers
+            # of find_library_file need to keep in mind that the base filename
+            # of the returned SDK library file might have a different extension
+            # from that of the library file installed on the running system,
+            # for example:
+            #   /Applications/Xcode.app/Contents/Developer/Platforms/
+            #       MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/
+            #       usr/lib/libedit.tbd
+            # vs
+            #   /usr/lib/libedit.dylib
+            if os.path.join(sysroot, p[1:]) == dirname:
+                return [ ]
+
+        if p == dirname:
+            return [ ]
+
+    # Otherwise, it must have been in one of the additional directories,
+    # so we have to figure out which one.
+    for p in paths:
+        # Ensure path doesn't end with path separator
+        p = p.rstrip(os.sep)
+
+        if host_platform == 'darwin' and is_macosx_sdk_path(p):
+            if os.path.join(sysroot, p[1:]) == dirname:
+                return [ p ]
+
+        if p == dirname:
+            return [p]
+    else:
+        assert False, "Internal error: Path not found in std_dirs or paths"
+
+def module_enabled(extlist, modname):
+    """Returns whether the module 'modname' is present in the list
+    of extensions 'extlist'."""
+    extlist = [ext for ext in extlist if ext.name == modname]
+    return len(extlist)
+
+def find_module_file(module, dirlist):
+    """Find a module in a set of possible folders. If it is not found
+    return the unadorned filename"""
+    list = find_file(module, [], dirlist)
+    if not list:
+        return module
+    if len(list) > 1:
+        log.info("WARNING: multiple copies of %s found"%module)
+    return os.path.join(list[0], module)
+
+class PyBuildExt(build_ext):
+
+    def __init__(self, dist):
+        build_ext.__init__(self, dist)
+        self.failed = []
+
+    def build_extensions(self):
+
+        # Detect which modules should be compiled
+        missing = self.detect_modules()
+
+        # Remove modules that are present on the disabled list
+        extensions = [ext for ext in self.extensions
+                      if ext.name not in disabled_module_list]
+        # move ctypes to the end, it depends on other modules
+        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
+        if "_ctypes" in ext_map:
+            ctypes = extensions.pop(ext_map["_ctypes"])
+            extensions.append(ctypes)
+        self.extensions = extensions
+
+        # Fix up the autodetected modules, prefixing all the source files
+        # with Modules/ and adding Python's include directory to the path.
+        (srcdir,) = sysconfig.get_config_vars('srcdir')
+        if not srcdir:
+            # Maybe running on Windows but not using CYGWIN?
+            raise ValueError("No source directory; cannot proceed.")
+        srcdir = os.path.abspath(srcdir)
+        moddirlist = [os.path.join(srcdir, 'Modules')]
+
+        # Platform-dependent module source and include directories
+        incdirlist = []
+
+        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
+            sysconfig.get_config_var("CONFIG_ARGS")):
+            # Mac OS X also includes some mac-specific modules
+            macmoddir = os.path.join(srcdir, 'Mac/Modules')
+            moddirlist.append(macmoddir)
+            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
+
+        # Fix up the paths for scripts, too
+        self.distribution.scripts = [os.path.join(srcdir, filename)
+                                     for filename in self.distribution.scripts]
+
+        # Python header files
+        headers = [sysconfig.get_config_h_filename()]
+        headers += glob(os.path.join(sysconfig.get_path('include'), "*.h"))
+        for ext in self.extensions[:]:
+            ext.sources = [ find_module_file(filename, moddirlist)
+                            for filename in ext.sources ]
+            if ext.depends is not None:
+                ext.depends = [find_module_file(filename, moddirlist)
+                               for filename in ext.depends]
+            else:
+                ext.depends = []
+            # re-compile extensions if a header file has been changed
+            ext.depends.extend(headers)
+
+            # platform specific include directories
+            ext.include_dirs.extend(incdirlist)
+
+            # If a module has already been built statically,
+            # don't build it here
+            if ext.name in sys.builtin_module_names:
+                self.extensions.remove(ext)
+
+        # Parse Modules/Setup and Modules/Setup.local to figure out which
+        # modules are turned on in the file.
+        remove_modules = []
+        for filename in ('Modules/Setup', 'Modules/Setup.local'):
+            input = text_file.TextFile(filename, join_lines=1)
+            while 1:
+                line = input.readline()
+                if not line: break
+                line = line.split()
+                remove_modules.append(line[0])
+            input.close()
+
+        for ext in self.extensions[:]:
+            if ext.name in remove_modules:
+                self.extensions.remove(ext)
+
+        # When you run "make CC=altcc" or something similar, you really want
+        # those environment variables passed into the setup.py phase.  Here's
+        # a small set of useful ones.
+        compiler = os.environ.get('CC')
+        args = {}
+        # unfortunately, distutils doesn't let us provide separate C and C++
+        # compilers
+        if compiler is not None:
+            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
+            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
+        self.compiler.set_executables(**args)
+
+        build_ext.build_extensions(self)
+
+        longest = 0
+        if self.extensions:
+            longest = max([len(e.name) for e in self.extensions])
+        if self.failed:
+            longest = max(longest, max([len(name) for name in self.failed]))
+
+        def print_three_column(lst):
+            lst.sort(key=str.lower)
+            # guarantee zip() doesn't drop anything
+            while len(lst) % 3:
+                lst.append("")
+            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
+                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
+                                              longest, g)
+
+        if missing:
+            print
+            print ("Python build finished, but the necessary bits to build "
+                   "these modules were not found:")
+            print_three_column(missing)
+            print ("To find the necessary bits, look in setup.py in"
+                   " detect_modules() for the module's name.")
+            print
+
+        if self.failed:
+            failed = self.failed[:]
+            print
+            print "Failed to build these modules:"
+            print_three_column(failed)
+            print
+
+    def build_extension(self, ext):
+
+        if ext.name == '_ctypes':
+            if not self.configure_ctypes(ext):
+                return
+
+        try:
+            build_ext.build_extension(self, ext)
+        except (CCompilerError, DistutilsError), why:
+            self.announce('WARNING: building of extension "%s" failed: %s' %
+                          (ext.name, sys.exc_info()[1]))
+            self.failed.append(ext.name)
+            return
+        # Workaround for Mac OS X: The Carbon-based modules cannot be
+        # reliably imported into a command-line Python
+        if 'Carbon' in ext.extra_link_args:
+            self.announce(
+                'WARNING: skipping import check for Carbon-based "%s"' %
+                ext.name)
+            return
+
+        if host_platform == 'darwin' and (
+                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
+            # Don't bother doing an import check when an extension was
+            # build with an explicit '-arch' flag on OSX. That's currently
+            # only used to build 32-bit only extensions in a 4-way
+            # universal build and loading 32-bit code into a 64-bit
+            # process will fail.
+            self.announce(
+                'WARNING: skipping import check for "%s"' %
+                ext.name)
+            return
+
+        # Workaround for Cygwin: Cygwin currently has fork issues when many
+        # modules have been imported
+        if host_platform == 'cygwin':
+            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
+                % ext.name)
+            return
+        ext_filename = os.path.join(
+            self.build_lib,
+            self.get_ext_filename(self.get_ext_fullname(ext.name)))
+
+        # Don't try to load extensions for cross builds
+        if cross_compiling:
+            return
+
+        try:
+            imp.load_dynamic(ext.name, ext_filename)
+        except ImportError, why:
+            self.failed.append(ext.name)
+            self.announce('*** WARNING: renaming "%s" since importing it'
+                          ' failed: %s' % (ext.name, why), level=3)
+            assert not self.inplace
+            basename, tail = os.path.splitext(ext_filename)
+            newname = basename + "_failed" + tail
+            if os.path.exists(newname):
+                os.remove(newname)
+            os.rename(ext_filename, newname)
+
+            # XXX -- This relies on a Vile HACK in
+            # distutils.command.build_ext.build_extension().  The
+            # _built_objects attribute is stored there strictly for
+            # use here.
+            # If there is a failure, _built_objects may not be there,
+            # so catch the AttributeError and move on.
+            try:
+                for filename in self._built_objects:
+                    os.remove(filename)
+            except AttributeError:
+                self.announce('unable to remove files (ignored)')
+        except:
+            exc_type, why, tb = sys.exc_info()
+            self.announce('*** WARNING: importing extension "%s" '
+                          'failed with %s: %s' % (ext.name, exc_type, why),
+                          level=3)
+            self.failed.append(ext.name)
+
+    def add_multiarch_paths(self):
+        # Debian/Ubuntu multiarch support.
+        # https://wiki.ubuntu.com/MultiarchSpec
+        cc = sysconfig.get_config_var('CC')
+        tmpfile = os.path.join(self.build_temp, 'multiarch')
+        if not os.path.exists(self.build_temp):
+            os.makedirs(self.build_temp)
+        ret = os.system(
+            '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile))
+        multiarch_path_component = ''
+        try:
+            if ret >> 8 == 0:
+                with open(tmpfile) as fp:
+                    multiarch_path_component = fp.readline().strip()
+        finally:
+            os.unlink(tmpfile)
+
+        if multiarch_path_component != '':
+            add_dir_to_list(self.compiler.library_dirs,
+                            '/usr/lib/' + multiarch_path_component)
+            add_dir_to_list(self.compiler.include_dirs,
+                            '/usr/include/' + multiarch_path_component)
+            return
+
+        if not find_executable('dpkg-architecture'):
+            return
+        opt = ''
+        if cross_compiling:
+            opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE')
+        tmpfile = os.path.join(self.build_temp, 'multiarch')
+        if not os.path.exists(self.build_temp):
+            os.makedirs(self.build_temp)
+        ret = os.system(
+            'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
+            (opt, tmpfile))
+        try:
+            if ret >> 8 == 0:
+                with open(tmpfile) as fp:
+                    multiarch_path_component = fp.readline().strip()
+                add_dir_to_list(self.compiler.library_dirs,
+                                '/usr/lib/' + multiarch_path_component)
+                add_dir_to_list(self.compiler.include_dirs,
+                                '/usr/include/' + multiarch_path_component)
+        finally:
+            os.unlink(tmpfile)
+
+    def add_gcc_paths(self):
+        gcc = sysconfig.get_config_var('CC')
+        tmpfile = os.path.join(self.build_temp, 'gccpaths')
+        if not os.path.exists(self.build_temp):
+            os.makedirs(self.build_temp)
+        ret = os.system('%s -E -v - </dev/null 2>%s 1>/dev/null' % (gcc, tmpfile))
+        is_gcc = False
+        in_incdirs = False
+        inc_dirs = []
+        lib_dirs = []
+        try:
+            if ret >> 8 == 0:
+                with open(tmpfile) as fp:
+                    for line in fp.readlines():
+                        if line.startswith("gcc version"):
+                            is_gcc = True
+                        elif line.startswith("#include <...>"):
+                            in_incdirs = True
+                        elif line.startswith("End of search list"):
+                            in_incdirs = False
+                        elif is_gcc and line.startswith("LIBRARY_PATH"):
+                            for d in line.strip().split("=")[1].split(":"):
+                                d = os.path.normpath(d)
+                                if '/gcc/' not in d:
+                                    add_dir_to_list(self.compiler.library_dirs,
+                                                    d)
+                        elif is_gcc and in_incdirs and '/gcc/' not in line:
+                            add_dir_to_list(self.compiler.include_dirs,
+                                            line.strip())
+        finally:
+            os.unlink(tmpfile)
+
+    def detect_modules(self):
+        # Ensure that /usr/local is always used
+        if not cross_compiling:
+            add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib32')
+            add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
+        if cross_compiling:
+            self.add_gcc_paths()
+        self.add_multiarch_paths()
+
+        # Add paths specified in the environment variables LDFLAGS and
+        # CPPFLAGS for header and library files.
+        # We must get the values from the Makefile and not the environment
+        # directly since an inconsistently reproducible issue comes up where
+        # the environment variable is not set even though the value were passed
+        # into configure and stored in the Makefile (issue found on OS X 10.3).
+        for env_var, arg_name, dir_list in (
+                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
+                ('LDFLAGS', '-L', self.compiler.library_dirs),
+                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
+            env_val = sysconfig.get_config_var(env_var)
+            if env_val:
+                # To prevent optparse from raising an exception about any
+                # options in env_val that it doesn't know about we strip out
+                # all double dashes and any dashes followed by a character
+                # that is not for the option we are dealing with.
+                #
+                # Please note that order of the regex is important!  We must
+                # strip out double-dashes first so that we don't end up with
+                # substituting "--Long" to "-Long" and thus lead to "ong" being
+                # used for a library directory.
+                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
+                                 ' ', env_val)
+                parser = optparse.OptionParser()
+                # Make sure that allowing args interspersed with options is
+                # allowed
+                parser.allow_interspersed_args = True
+                parser.error = lambda msg: None
+                parser.add_option(arg_name, dest="dirs", action="append")
+                options = parser.parse_args(env_val.split())[0]
+                if options.dirs:
+                    for directory in reversed(options.dirs):
+                        add_dir_to_list(dir_list, directory)
+
+        if os.path.normpath(sys.prefix) != '/usr' \
+                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
+            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
+            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
+            # building a framework with different architectures than
+            # the one that is currently installed (issue #7473)
+            add_dir_to_list(self.compiler.library_dirs,
+                            sysconfig.get_config_var("LIBDIR"))
+            add_dir_to_list(self.compiler.include_dirs,
+                            sysconfig.get_config_var("INCLUDEDIR"))
+
+        try:
+            have_unicode = unicode
+        except NameError:
+            have_unicode = 0
+
+        # lib_dirs and inc_dirs are used to search for files;
+        # if a file is found in one of those directories, it can
+        # be assumed that no additional -I,-L directives are needed.
+        inc_dirs = self.compiler.include_dirs[:]
+        lib_dirs = self.compiler.library_dirs[:]
+        if not cross_compiling:
+            for d in (
+                '/usr/include',
+                ):
+                add_dir_to_list(inc_dirs, d)
+            for d in (
+                '/lib64', '/usr/lib64',
+                '/lib', '/usr/lib',
+                ):
+                add_dir_to_list(lib_dirs, d)
+        exts = []
+        missing = []
+
+        config_h = sysconfig.get_config_h_filename()
+        config_h_vars = sysconfig.parse_config_h(open(config_h))
+
+        srcdir = sysconfig.get_config_var('srcdir')
+
+        # Check for AtheOS which has libraries in non-standard locations
+        if host_platform == 'atheos':
+            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
+            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
+            inc_dirs += ['/system/include', '/atheos/autolnk/include']
+            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
+
+        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
+        if host_platform in ['osf1', 'unixware7', 'openunix8']:
+            lib_dirs += ['/usr/ccs/lib']
+
+        # HP-UX11iv3 keeps files in lib/hpux folders.
+        if host_platform == 'hp-ux11':
+            lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32']
+
+        if host_platform == 'darwin':
+            # This should work on any unixy platform ;-)
+            # If the user has bothered specifying additional -I and -L flags
+            # in OPT and LDFLAGS we might as well use them here.
+            #   NOTE: using shlex.split would technically be more correct, but
+            # also gives a bootstrap problem. Let's hope nobody uses directories
+            # with whitespace in the name to store libraries.
+            cflags, ldflags = sysconfig.get_config_vars(
+                    'CFLAGS', 'LDFLAGS')
+            for item in cflags.split():
+                if item.startswith('-I'):
+                    inc_dirs.append(item[2:])
+
+            for item in ldflags.split():
+                if item.startswith('-L'):
+                    lib_dirs.append(item[2:])
+
+        # Check for MacOS X, which doesn't need libm.a at all
+        math_libs = ['m']
+        if host_platform in ['darwin', 'beos']:
+            math_libs = []
+
+        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
+
+        #
+        # The following modules are all pretty straightforward, and compile
+        # on pretty much any POSIXish platform.
+        #
+
+        # Some modules that are normally always on:
+        #exts.append( Extension('_weakref', ['_weakref.c']) )
+
+        # array objects
+        exts.append( Extension('array', ['arraymodule.c']) )
+
+        shared_math = 'Modules/_math.o'
+        # complex math library functions
+        exts.append( Extension('cmath', ['cmathmodule.c'],
+                               extra_objects=[shared_math],
+                               depends=['_math.h', shared_math],
+                               libraries=math_libs) )
+        # math library functions, e.g. sin()
+        exts.append( Extension('math',  ['mathmodule.c'],
+                               extra_objects=[shared_math],
+                               depends=['_math.h', shared_math],
+                               libraries=math_libs) )
+        # fast string operations implemented in C
+        exts.append( Extension('strop', ['stropmodule.c']) )
+        # time operations and variables
+        exts.append( Extension('time', ['timemodule.c'],
+                               libraries=math_libs) )
+        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
+                               libraries=math_libs) )
+        # fast iterator tools implemented in C
+        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
+        # code that will be builtins in the future, but conflict with the
+        #  current builtins
+        exts.append( Extension('future_builtins', ['future_builtins.c']) )
+        # random number generator implemented in C
+        exts.append( Extension("_random", ["_randommodule.c"]) )
+        # high-performance collections
+        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
+        # bisect
+        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
+        # heapq
+        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
+        # operator.add() and similar goodies
+        exts.append( Extension('operator', ['operator.c']) )
+        # Python 3.1 _io library
+        exts.append( Extension("_io",
+            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
+             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
+             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
+        # _functools
+        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
+        # _json speedups
+        exts.append( Extension("_json", ["_json.c"]) )
+        # Python C API test module
+        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
+                               depends=['testcapi_long.h']) )
+        # profilers (_lsprof is for cProfile.py)
+        exts.append( Extension('_hotshot', ['_hotshot.c']) )
+        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
+        # static Unicode character database
+        if have_unicode:
+            exts.append( Extension('unicodedata', ['unicodedata.c']) )
+        else:
+            missing.append('unicodedata')
+        # access to ISO C locale support
+        data = open('pyconfig.h').read()
+        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
+        if m is not None:
+            locale_libs = ['intl']
+        else:
+            locale_libs = []
+        if host_platform == 'darwin':
+            locale_extra_link_args = ['-framework', 'CoreFoundation']
+        else:
+            locale_extra_link_args = []
+
+
+        exts.append( Extension('_locale', ['_localemodule.c'],
+                               libraries=locale_libs,
+                               extra_link_args=locale_extra_link_args) )
+
+        # Modules with some UNIX dependencies -- on by default:
+        # (If you have a really backward UNIX, select and socket may not be
+        # supported...)
+
+        # fcntl(2) and ioctl(2)
+        libs = []
+        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
+            # May be necessary on AIX for flock function
+            libs = ['bsd']
+        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
+        # pwd(3)
+        exts.append( Extension('pwd', ['pwdmodule.c']) )
+        # grp(3)
+        exts.append( Extension('grp', ['grpmodule.c']) )
+        # spwd, shadow passwords
+        if (config_h_vars.get('HAVE_GETSPNAM', False) or
+                config_h_vars.get('HAVE_GETSPENT', False)):
+            exts.append( Extension('spwd', ['spwdmodule.c']) )
+        else:
+            missing.append('spwd')
+
+        # select(2); not on ancient System V
+        exts.append( Extension('select', ['selectmodule.c']) )
+
+        # Fred Drake's interface to the Python parser
+        exts.append( Extension('parser', ['parsermodule.c']) )
+
+        # cStringIO and cPickle
+        exts.append( Extension('cStringIO', ['cStringIO.c']) )
+        exts.append( Extension('cPickle', ['cPickle.c']) )
+
+        # Memory-mapped files (also works on Win32).
+        if host_platform not in ['atheos']:
+            exts.append( Extension('mmap', ['mmapmodule.c']) )
+        else:
+            missing.append('mmap')
+
+        # Lance Ellinghaus's syslog module
+        # syslog daemon interface
+        exts.append( Extension('syslog', ['syslogmodule.c']) )
+
+        # George Neville-Neil's timing module:
+        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
+        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
+        #exts.append( Extension('timing', ['timingmodule.c']) )
+
+        #
+        # Here ends the simple stuff.  From here on, modules need certain
+        # libraries, are platform-specific, or present other surprises.
+        #
+
+        # Multimedia modules
+        # These don't work for 64-bit platforms!!!
+        # These represent audio samples or images as strings:
+
+        # Operations on audio samples
+        # According to #993173, this one should actually work fine on
+        # 64-bit platforms.
+        exts.append( Extension('audioop', ['audioop.c']) )
+
+        # Disabled on 64-bit platforms
+        if sys.maxsize != 9223372036854775807L:
+            # Operations on images
+            exts.append( Extension('imageop', ['imageop.c']) )
+        else:
+            missing.extend(['imageop'])
+
+        # readline
+        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
+        readline_termcap_library = ""
+        curses_library = ""
+        # Determine if readline is already linked against curses or tinfo.
+        if do_readline and find_executable('ldd'):
+            fp = os.popen("ldd %s" % do_readline)
+            ldd_output = fp.readlines()
+            ret = fp.close()
+            if ret is None or ret >> 8 == 0:
+                for ln in ldd_output:
+                    if 'curses' in ln:
+                        readline_termcap_library = re.sub(
+                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
+                        ).rstrip()
+                        break
+                    if 'tinfo' in ln: # termcap interface split out from ncurses
+                        readline_termcap_library = 'tinfo'
+                        break
+        # Issue 7384: If readline is already linked against curses,
+        # use the same library for the readline and curses modules.
+        if 'curses' in readline_termcap_library:
+            curses_library = readline_termcap_library
+        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
+            curses_library = 'ncursesw'
+        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
+            curses_library = 'ncurses'
+        elif self.compiler.find_library_file(lib_dirs, 'curses'):
+            curses_library = 'curses'
+
+        if host_platform == 'darwin':
+            os_release = int(os.uname()[2].split('.')[0])
+            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
+            if (dep_target and
+                    (tuple(int(n) for n in dep_target.split('.')[0:2])
+                        < (10, 5) ) ):
+                os_release = 8
+            if os_release < 9:
+                # MacOSX 10.4 has a broken readline. Don't try to build
+                # the readline module unless the user has installed a fixed
+                # readline package
+                if find_file('readline/rlconf.h', inc_dirs, []) is None:
+                    do_readline = False
+        if do_readline:
+            if host_platform == 'darwin' and os_release < 9:
+                # In every directory on the search path search for a dynamic
+                # library and then a static library, instead of first looking
+                # for dynamic libraries on the entire path.
+                # This way a statically linked custom readline gets picked up
+                # before the (possibly broken) dynamic library in /usr/lib.
+                readline_extra_link_args = ('-Wl,-search_paths_first',)
+            else:
+                readline_extra_link_args = ()
+
+            readline_libs = ['readline']
+            if readline_termcap_library:
+                pass # Issue 7384: Already linked against curses or tinfo.
+            elif curses_library:
+                readline_libs.append(curses_library)
+            elif self.compiler.find_library_file(lib_dirs +
+                                                     ['/usr/lib32/termcap'],
+                                                     'termcap'):
+                readline_libs.append('termcap')
+            exts.append( Extension('readline', ['readline.c'],
+                                   library_dirs=['/usr/lib32/termcap'],
+                                   extra_link_args=readline_extra_link_args,
+                                   libraries=readline_libs) )
+        else:
+            missing.append('readline')
+
+        # crypt module.
+
+        if self.compiler.find_library_file(lib_dirs, 'crypt'):
+            libs = ['crypt']
+        else:
+            libs = []
+        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
+
+        # CSV files
+        exts.append( Extension('_csv', ['_csv.c']) )
+
+        # socket(2)
+        exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'],
+                               depends=['socketmodule.h'],
+                               libraries=math_libs) )
+        # Detect SSL support for the socket module (via _ssl)
+        search_for_ssl_incs_in = [
+                              '/usr/local/ssl/include',
+                              '/usr/contrib/ssl/include/'
+                             ]
+        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
+                             search_for_ssl_incs_in
+                             )
+        if ssl_incs is not None:
+            krb5_h = find_file('krb5.h', inc_dirs,
+                               ['/usr/kerberos/include'])
+            if krb5_h:
+                ssl_incs += krb5_h
+        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
+                                     ['/usr/local/ssl/lib32',
+                                      '/usr/contrib/ssl/lib32/'
+                                     ] )
+
+        if (ssl_incs is not None and
+            ssl_libs is not None):
+            exts.append( Extension('_ssl', ['_ssl.c'],
+                                   include_dirs = ssl_incs,
+                                   library_dirs = ssl_libs,
+                                   libraries = ['ssl', 'crypto'],
+                                   depends = ['socketmodule.h']), )
+        else:
+            missing.append('_ssl')
+
+        # find out which version of OpenSSL we have
+        openssl_ver = 0
+        openssl_ver_re = re.compile(
+            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
+
+        # look for the openssl version header on the compiler search path.
+        opensslv_h = find_file('openssl/opensslv.h', [],
+                inc_dirs + search_for_ssl_incs_in)
+        if opensslv_h:
+            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
+            if host_platform == 'darwin' and is_macosx_sdk_path(name):
+                name = os.path.join(macosx_sdk_root(), name[1:])
+            try:
+                incfile = open(name, 'r')
+                for line in incfile:
+                    m = openssl_ver_re.match(line)
+                    if m:
+                        openssl_ver = eval(m.group(1))
+            except IOError, msg:
+                print "IOError while reading opensshv.h:", msg
+                pass
+
+        min_openssl_ver = 0x00907000
+        have_any_openssl = ssl_incs is not None and ssl_libs is not None
+        have_usable_openssl = (have_any_openssl and
+                               openssl_ver >= min_openssl_ver)
+
+        if have_any_openssl:
+            if have_usable_openssl:
+                # The _hashlib module wraps optimized implementations
+                # of hash functions from the OpenSSL library.
+                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
+                                       include_dirs = ssl_incs,
+                                       library_dirs = ssl_libs,
+                                       libraries = ['ssl', 'crypto']) )
+            else:
+                print ("warning: openssl 0x%08x is too old for _hashlib" %
+                       openssl_ver)
+                missing.append('_hashlib')
+        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
+            # The _sha module implements the SHA1 hash algorithm.
+            exts.append( Extension('_sha', ['shamodule.c']) )
+            # The _md5 module implements the RSA Data Security, Inc. MD5
+            # Message-Digest Algorithm, described in RFC 1321.  The
+            # necessary files md5.c and md5.h are included here.
+            exts.append( Extension('_md5',
+                            sources = ['md5module.c', 'md5.c'],
+                            depends = ['md5.h']) )
+
+        min_sha2_openssl_ver = 0x00908000
+        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
+            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
+            exts.append( Extension('_sha256', ['sha256module.c']) )
+            exts.append( Extension('_sha512', ['sha512module.c']) )
+
+        # Modules that provide persistent dictionary-like semantics.  You will
+        # probably want to arrange for at least one of them to be available on
+        # your machine, though none are defined by default because of library
+        # dependencies.  The Python module anydbm.py provides an
+        # implementation independent wrapper for these; dumbdbm.py provides
+        # similar functionality (but slower of course) implemented in Python.
+
+        # Sleepycat^WOracle Berkeley DB interface.
+        #  http://www.oracle.com/database/berkeley-db/db/index.html
+        #
+        # This requires the Sleepycat^WOracle DB code. The supported versions
+        # are set below.  Visit the URL above to download
+        # a release.  Most open source OSes come with one or more
+        # versions of BerkeleyDB already installed.
+
+        max_db_ver = (5, 3)
+        min_db_ver = (4, 3)
+        db_setup_debug = False   # verbose debug prints from this script?
+
+        def allow_db_ver(db_ver):
+            """Returns a boolean if the given BerkeleyDB version is acceptable.
+
+            Args:
+              db_ver: A tuple of the version to verify.
+            """
+            if not (min_db_ver <= db_ver <= max_db_ver):
+                return False
+            # Use this function to filter out known bad configurations.
+            if (4, 6) == db_ver[:2]:
+                # BerkeleyDB 4.6.x is not stable on many architectures.
+                arch = platform_machine()
+                if arch not in ('i386', 'i486', 'i586', 'i686',
+                                'x86_64', 'ia64'):
+                    return False
+            return True
+
+        def gen_db_minor_ver_nums(major):
+            if major == 5:
+                for x in range(max_db_ver[1]+1):
+                    if allow_db_ver((5, x)):
+                        yield x
+            elif major == 4:
+                for x in range(9):
+                    if allow_db_ver((4, x)):
+                        yield x
+            elif major == 3:
+                for x in (3,):
+                    if allow_db_ver((3, x)):
+                        yield x
+            else:
+                raise ValueError("unknown major BerkeleyDB version", major)
+
+        # construct a list of paths to look for the header file in on
+        # top of the normal inc_dirs.
+        db_inc_paths = [
+            '/usr/include/db4',
+            '/usr/local/include/db4',
+            '/opt/sfw/include/db4',
+            '/usr/include/db3',
+            '/usr/local/include/db3',
+            '/opt/sfw/include/db3',
+            # Fink defaults (http://fink.sourceforge.net/)
+            '/sw/include/db4',
+            '/sw/include/db3',
+        ]
+        # 4.x minor number specific paths
+        for x in gen_db_minor_ver_nums(4):
+            db_inc_paths.append('/usr/include/db4%d' % x)
+            db_inc_paths.append('/usr/include/db4.%d' % x)
+            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
+            db_inc_paths.append('/usr/local/include/db4%d' % x)
+            db_inc_paths.append('/pkg/db-4.%d/include' % x)
+            db_inc_paths.append('/opt/db-4.%d/include' % x)
+            # MacPorts default (http://www.macports.org/)
+            db_inc_paths.append('/opt/local/include/db4%d' % x)
+        # 3.x minor number specific paths
+        for x in gen_db_minor_ver_nums(3):
+            db_inc_paths.append('/usr/include/db3%d' % x)
+            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
+            db_inc_paths.append('/usr/local/include/db3%d' % x)
+            db_inc_paths.append('/pkg/db-3.%d/include' % x)
+            db_inc_paths.append('/opt/db-3.%d/include' % x)
+
+        if cross_compiling:
+            db_inc_paths = []
+
+        # Add some common subdirectories for Sleepycat DB to the list,
+        # based on the standard include directories. This way DB3/4 gets
+        # picked up when it is installed in a non-standard prefix and
+        # the user has added that prefix into inc_dirs.
+        std_variants = []
+        for dn in inc_dirs:
+            std_variants.append(os.path.join(dn, 'db3'))
+            std_variants.append(os.path.join(dn, 'db4'))
+            for x in gen_db_minor_ver_nums(4):
+                std_variants.append(os.path.join(dn, "db4%d"%x))
+                std_variants.append(os.path.join(dn, "db4.%d"%x))
+            for x in gen_db_minor_ver_nums(3):
+                std_variants.append(os.path.join(dn, "db3%d"%x))
+                std_variants.append(os.path.join(dn, "db3.%d"%x))
+
+        db_inc_paths = std_variants + db_inc_paths
+        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
+
+        db_ver_inc_map = {}
+
+        if host_platform == 'darwin':
+            sysroot = macosx_sdk_root()
+
+        class db_found(Exception): pass
+        try:
+            # See whether there is a Sleepycat header in the standard
+            # search path.
+            for d in inc_dirs + db_inc_paths:
+                f = os.path.join(d, "db.h")
+
+                if host_platform == 'darwin' and is_macosx_sdk_path(d):
+                    f = os.path.join(sysroot, d[1:], "db.h")
+
+                if db_setup_debug: print "db: looking for db.h in", f
+                if os.path.exists(f):
+                    f = open(f).read()
+                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
+                    if m:
+                        db_major = int(m.group(1))
+                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
+                        db_minor = int(m.group(1))
+                        db_ver = (db_major, db_minor)
+
+                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
+                        if db_ver == (4, 6):
+                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
+                            db_patch = int(m.group(1))
+                            if db_patch < 21:
+                                print "db.h:", db_ver, "patch", db_patch,
+                                print "being ignored (4.6.x must be >= 4.6.21)"
+                                continue
+
+                        if ( (db_ver not in db_ver_inc_map) and
+                            allow_db_ver(db_ver) ):
+                            # save the include directory with the db.h version
+                            # (first occurrence only)
+                            db_ver_inc_map[db_ver] = d
+                            if db_setup_debug:
+                                print "db.h: found", db_ver, "in", d
+                        else:
+                            # we already found a header for this library version
+                            if db_setup_debug: print "db.h: ignoring", d
+                    else:
+                        # ignore this header, it didn't contain a version number
+                        if db_setup_debug:
+                            print "db.h: no version number version in", d
+
+            db_found_vers = db_ver_inc_map.keys()
+            db_found_vers.sort()
+
+            while db_found_vers:
+                db_ver = db_found_vers.pop()
+                db_incdir = db_ver_inc_map[db_ver]
+
+                # check lib directories parallel to the location of the header
+                db_dirs_to_check = [
+                    db_incdir.replace("include", 'lib64'),
+                    db_incdir.replace("include", 'lib'),
+                ]
+
+                if host_platform != 'darwin':
+                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
+
+                else:
+                    # Same as other branch, but takes OSX SDK into account
+                    tmp = []
+                    for dn in db_dirs_to_check:
+                        if is_macosx_sdk_path(dn):
+                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
+                                tmp.append(dn)
+                        else:
+                            if os.path.isdir(dn):
+                                tmp.append(dn)
+                    db_dirs_to_check = tmp
+
+                # Look for a version specific db-X.Y before an ambiguous dbX
+                # XXX should we -ever- look for a dbX name?  Do any
+                # systems really not name their library by version and
+                # symlink to more general names?
+                for dblib in (('db-%d.%d' % db_ver),
+                              ('db%d%d' % db_ver),
+                              ('db%d' % db_ver[0])):
+                    dblib_file = self.compiler.find_library_file(
+                                    db_dirs_to_check + lib_dirs, dblib )
+                    if dblib_file:
+                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
+                        raise db_found
+                    else:
+                        if db_setup_debug: print "db lib: ", dblib, "not found"
+
+        except db_found:
+            if db_setup_debug:
+                print "bsddb using BerkeleyDB lib:", db_ver, dblib
+                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
+            db_incs = [db_incdir]
+            dblibs = [dblib]
+            # We add the runtime_library_dirs argument because the
+            # BerkeleyDB lib we're linking against often isn't in the
+            # system dynamic library search path.  This is usually
+            # correct and most trouble free, but may cause problems in
+            # some unusual system configurations (e.g. the directory
+            # is on an NFS server that goes away).
+            exts.append(Extension('_bsddb', ['_bsddb.c'],
+                                  depends = ['bsddb.h'],
+                                  library_dirs=dblib_dir,
+                                  runtime_library_dirs=dblib_dir,
+                                  include_dirs=db_incs,
+                                  libraries=dblibs))
+        else:
+            if db_setup_debug: print "db: no appropriate library found"
+            db_incs = None
+            dblibs = []
+            dblib_dir = None
+            missing.append('_bsddb')
+
+        # The sqlite interface
+        sqlite_setup_debug = False   # verbose debug prints from this script?
+
+        # We hunt for #define SQLITE_VERSION "n.n.n"
+        # We need to find >= sqlite version 3.0.8
+        sqlite_incdir = sqlite_libdir = None
+        sqlite_inc_paths = [ '/usr/include',
+                             '/usr/include/sqlite',
+                             '/usr/include/sqlite3',
+                             '/usr/local/include',
+                             '/usr/local/include/sqlite',
+                             '/usr/local/include/sqlite3',
+                           ]
+        if cross_compiling:
+            sqlite_inc_paths = []
+        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
+        MIN_SQLITE_VERSION = ".".join([str(x)
+                                    for x in MIN_SQLITE_VERSION_NUMBER])
+
+        # Scan the default include directories before the SQLite specific
+        # ones. This allows one to override the copy of sqlite on OSX,
+        # where /usr/include contains an old version of sqlite.
+        if host_platform == 'darwin':
+            sysroot = macosx_sdk_root()
+
+        for d_ in inc_dirs + sqlite_inc_paths:
+            d = d_
+            if host_platform == 'darwin' and is_macosx_sdk_path(d):
+                d = os.path.join(sysroot, d[1:])
+
+            f = os.path.join(d, "sqlite3.h")
+            if os.path.exists(f):
+                if sqlite_setup_debug: print "sqlite: found %s"%f
+                incf = open(f).read()
+                m = re.search(
+                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf)
+                if m:
+                    sqlite_version = m.group(1)
+                    sqlite_version_tuple = tuple([int(x)
+                                        for x in sqlite_version.split(".")])
+                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
+                        # we win!
+                        if sqlite_setup_debug:
+                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
+                        sqlite_incdir = d
+                        break
+                    else:
+                        if sqlite_setup_debug:
+                            print "%s: version %d is too old, need >= %s"%(d,
+                                        sqlite_version, MIN_SQLITE_VERSION)
+                elif sqlite_setup_debug:
+                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
+
+        if sqlite_incdir:
+            sqlite_dirs_to_check = [
+                os.path.join(sqlite_incdir, '..', 'lib64'),
+                os.path.join(sqlite_incdir, '..', 'lib'),
+                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
+                os.path.join(sqlite_incdir, '..', '..', 'lib'),
+            ]
+            sqlite_libfile = self.compiler.find_library_file(
+                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
+            if sqlite_libfile:
+                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
+
+        if sqlite_incdir and sqlite_libdir:
+            sqlite_srcs = ['_sqlite/cache.c',
+                '_sqlite/connection.c',
+                '_sqlite/cursor.c',
+                '_sqlite/microprotocols.c',
+                '_sqlite/module.c',
+                '_sqlite/prepare_protocol.c',
+                '_sqlite/row.c',
+                '_sqlite/statement.c',
+                '_sqlite/util.c', ]
+
+            sqlite_defines = []
+            if host_platform != "win32":
+                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
+            else:
+                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
+
+            # Comment this out if you want the sqlite3 module to be able to load extensions.
+            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
+
+            if host_platform == 'darwin':
+                # In every directory on the search path search for a dynamic
+                # library and then a static library, instead of first looking
+                # for dynamic libraries on the entire path.
+                # This way a statically linked custom sqlite gets picked up
+                # before the dynamic library in /usr/lib.
+                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
+            else:
+                sqlite_extra_link_args = ()
+
+            exts.append(Extension('_sqlite3', sqlite_srcs,
+                                  define_macros=sqlite_defines,
+                                  include_dirs=["Modules/_sqlite",
+                                                sqlite_incdir],
+                                  library_dirs=sqlite_libdir,
+                                  extra_link_args=sqlite_extra_link_args,
+                                  libraries=["sqlite3",]))
+        else:
+            missing.append('_sqlite3')
+
+        # Look for Berkeley db 1.85.   Note that it is built as a different
+        # module name so it can be included even when later versions are
+        # available.  A very restrictive search is performed to avoid
+        # accidentally building this module with a later version of the
+        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
+        # symbols into libc and place the include file in /usr/include.
+        #
+        # If the better bsddb library can be built (db_incs is defined)
+        # we do not build this one.  Otherwise this build will pick up
+        # the more recent berkeleydb's db.h file first in the include path
+        # when attempting to compile and it will fail.
+        f = "/usr/include/db.h"
+
+        if host_platform == 'darwin':
+            if is_macosx_sdk_path(f):
+                sysroot = macosx_sdk_root()
+                f = os.path.join(sysroot, f[1:])
+
+        if os.path.exists(f) and not db_incs:
+            data = open(f).read()
+            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
+            if m is not None:
+                # bingo - old version used hash file format version 2
+                ### XXX this should be fixed to not be platform-dependent
+                ### but I don't have direct access to an osf1 platform and
+                ### seemed to be muffing the search somehow
+                libraries = host_platform == "osf1" and ['db'] or None
+                if libraries is not None:
+                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
+                                          libraries=libraries))
+                else:
+                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
+            else:
+                missing.append('bsddb185')
+        else:
+            missing.append('bsddb185')
+
+        dbm_order = ['gdbm']
+        # The standard Unix dbm module:
+        if host_platform not in ['cygwin']:
+            config_args = [arg.strip("'")
+                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
+            dbm_args = [arg for arg in config_args
+                        if arg.startswith('--with-dbmliborder=')]
+            if dbm_args:
+                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
+            else:
+                dbm_order = "ndbm:gdbm:bdb".split(":")
+            dbmext = None
+            for cand in dbm_order:
+                if cand == "ndbm":
+                    if find_file("ndbm.h", inc_dirs, []) is not None:
+                        # Some systems have -lndbm, others have -lgdbm_compat,
+                        # others don't have either
+                        if self.compiler.find_library_file(lib_dirs,
+                                                               'ndbm'):
+                            ndbm_libs = ['ndbm']
+                        elif self.compiler.find_library_file(lib_dirs,
+                                                             'gdbm_compat'):
+                            ndbm_libs = ['gdbm_compat']
+                        else:
+                            ndbm_libs = []
+                        print "building dbm using ndbm"
+                        dbmext = Extension('dbm', ['dbmmodule.c'],
+                                           define_macros=[
+                                               ('HAVE_NDBM_H',None),
+                                               ],
+                                           libraries=ndbm_libs)
+                        break
+
+                elif cand == "gdbm":
+                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
+                        gdbm_libs = ['gdbm']
+                        if self.compiler.find_library_file(lib_dirs,
+                                                               'gdbm_compat'):
+                            gdbm_libs.append('gdbm_compat')
+                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
+                            print "building dbm using gdbm"
+                            dbmext = Extension(
+                                'dbm', ['dbmmodule.c'],
+                                define_macros=[
+                                    ('HAVE_GDBM_NDBM_H', None),
+                                    ],
+                                libraries = gdbm_libs)
+                            break
+                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
+                            print "building dbm using gdbm"
+                            dbmext = Extension(
+                                'dbm', ['dbmmodule.c'],
+                                define_macros=[
+                                    ('HAVE_GDBM_DASH_NDBM_H', None),
+                                    ],
+                                libraries = gdbm_libs)
+                            break
+                elif cand == "bdb":
+                    if db_incs is not None:
+                        print "building dbm using bdb"
+                        dbmext = Extension('dbm', ['dbmmodule.c'],
+                                           library_dirs=dblib_dir,
+                                           runtime_library_dirs=dblib_dir,
+                                           include_dirs=db_incs,
+                                           define_macros=[
+                                               ('HAVE_BERKDB_H', None),
+                                               ('DB_DBM_HSEARCH', None),
+                                               ],
+                                           libraries=dblibs)
+                        break
+            if dbmext is not None:
+                exts.append(dbmext)
+            else:
+                missing.append('dbm')
+
+        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
+        if ('gdbm' in dbm_order and
+            self.compiler.find_library_file(lib_dirs, 'gdbm')):
+            exts.append( Extension('gdbm', ['gdbmmodule.c'],
+                                   libraries = ['gdbm'] ) )
+        else:
+            missing.append('gdbm')
+
+        # Unix-only modules
+        if host_platform not in ['win32']:
+            # Steen Lumholt's termios module
+            exts.append( Extension('termios', ['termios.c']) )
+            # Jeremy Hylton's rlimit interface
+            if host_platform not in ['atheos']:
+                exts.append( Extension('resource', ['resource.c']) )
+            else:
+                missing.append('resource')
+
+            nis = self._detect_nis(inc_dirs, lib_dirs)
+            if nis is not None:
+                exts.append(nis)
+            else:
+                missing.append('nis')
+
+        # Curses support, requiring the System V version of curses, often
+        # provided by the ncurses library.
+        panel_library = 'panel'
+        curses_incs = None
+        if curses_library.startswith('ncurses'):
+            if curses_library == 'ncursesw':
+                # Bug 1464056: If _curses.so links with ncursesw,
+                # _curses_panel.so must link with panelw.
+                panel_library = 'panelw'
+            curses_libs = [curses_library]
+            curses_incs = find_file('curses.h', inc_dirs,
+                                    [os.path.join(d, 'ncursesw') for d in inc_dirs])
+            exts.append( Extension('_curses', ['_cursesmodule.c'],
+                                   include_dirs = curses_incs,
+                                   libraries = curses_libs) )
+        elif curses_library == 'curses' and host_platform != 'darwin':
+                # OSX has an old Berkeley curses, not good enough for
+                # the _curses module.
+            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
+                curses_libs = ['curses', 'terminfo']
+            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
+                curses_libs = ['curses', 'termcap']
+            else:
+                curses_libs = ['curses']
+
+            exts.append( Extension('_curses', ['_cursesmodule.c'],
+                                   libraries = curses_libs) )
+        else:
+            missing.append('_curses')
+
+        # If the curses module is enabled, check for the panel module
+        if (module_enabled(exts, '_curses') and
+            self.compiler.find_library_file(lib_dirs, panel_library)):
+            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
+                                   include_dirs = curses_incs,
+                                   libraries = [panel_library] + curses_libs) )
+        else:
+            missing.append('_curses_panel')
+
+        # Andrew Kuchling's zlib module.  Note that some versions of zlib
+        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
+        # http://www.cert.org/advisories/CA-2002-07.html
+        #
+        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
+        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
+        # now, we still accept 1.1.3, because we think it's difficult to
+        # exploit this in Python, and we'd rather make it RedHat's problem
+        # than our problem <wink>.
+        #
+        # You can upgrade zlib to version 1.1.4 yourself by going to
+        # http://www.gzip.org/zlib/
+        zlib_inc = find_file('zlib.h', [], inc_dirs)
+        have_zlib = False
+        if zlib_inc is not None:
+            zlib_h = zlib_inc[0] + '/zlib.h'
+            version = '"0.0.0"'
+            version_req = '"1.1.3"'
+            if host_platform == 'darwin' and is_macosx_sdk_path(zlib_h):
+                zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:])
+            fp = open(zlib_h)
+            while 1:
+                line = fp.readline()
+                if not line:
+                    break
+                if line.startswith('#define ZLIB_VERSION'):
+                    version = line.split()[2]
+                    break
+            if version >= version_req:
+                if (self.compiler.find_library_file(lib_dirs, 'z')):
+                    if host_platform == "darwin":
+                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
+                    else:
+                        zlib_extra_link_args = ()
+                    exts.append( Extension('zlib', ['zlibmodule.c'],
+                                           libraries = ['z'],
+                                           extra_link_args = zlib_extra_link_args))
+                    have_zlib = True
+                else:
+                    missing.append('zlib')
+            else:
+                missing.append('zlib')
+        else:
+            missing.append('zlib')
+
+        # Helper module for various ascii-encoders.  Uses zlib for an optimized
+        # crc32 if we have it.  Otherwise binascii uses its own.
+        if have_zlib:
+            extra_compile_args = ['-DUSE_ZLIB_CRC32']
+            libraries = ['z']
+            extra_link_args = zlib_extra_link_args
+        else:
+            extra_compile_args = []
+            libraries = []
+            extra_link_args = []
+        exts.append( Extension('binascii', ['binascii.c'],
+                               extra_compile_args = extra_compile_args,
+                               libraries = libraries,
+                               extra_link_args = extra_link_args) )
+
+        # Gustavo Niemeyer's bz2 module.
+        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
+            if host_platform == "darwin":
+                bz2_extra_link_args = ('-Wl,-search_paths_first',)
+            else:
+                bz2_extra_link_args = ()
+            exts.append( Extension('bz2', ['bz2module.c'],
+                                   libraries = ['bz2'],
+                                   extra_link_args = bz2_extra_link_args) )
+        else:
+            missing.append('bz2')
+
+        # Interface to the Expat XML parser
+        #
+        # Expat was written by James Clark and is now maintained by a group of
+        # developers on SourceForge; see www.libexpat.org for more information.
+        # The pyexpat module was written by Paul Prescod after a prototype by
+        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
+        # of a system shared libexpat.so is possible with --with-system-expat
+        # configure option.
+        #
+        # More information on Expat can be found at www.libexpat.org.
+        #
+        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
+            expat_inc = []
+            define_macros = []
+            expat_lib = ['expat']
+            expat_sources = []
+            expat_depends = []
+        else:
+            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
+            define_macros = [
+                ('HAVE_EXPAT_CONFIG_H', '1'),
+                # bpo-30947: Python uses best available entropy sources to
+                # call XML_SetHashSalt(), expat entropy sources are not needed
+                ('XML_POOR_ENTROPY', '1'),
+            ]
+            expat_lib = []
+            expat_sources = ['expat/xmlparse.c',
+                             'expat/xmlrole.c',
+                             'expat/xmltok.c']
+            expat_depends = ['expat/ascii.h',
+                             'expat/asciitab.h',
+                             'expat/expat.h',
+                             'expat/expat_config.h',
+                             'expat/expat_external.h',
+                             'expat/internal.h',
+                             'expat/latin1tab.h',
+                             'expat/utf8tab.h',
+                             'expat/xmlrole.h',
+                             'expat/xmltok.h',
+                             'expat/xmltok_impl.h'
+                             ]
+
+        exts.append(Extension('pyexpat',
+                              define_macros = define_macros,
+                              include_dirs = expat_inc,
+                              libraries = expat_lib,
+                              sources = ['pyexpat.c'] + expat_sources,
+                              depends = expat_depends,
+                              ))
+
+        # Fredrik Lundh's cElementTree module.  Note that this also
+        # uses expat (via the CAPI hook in pyexpat).
+
+        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
+            define_macros.append(('USE_PYEXPAT_CAPI', None))
+            exts.append(Extension('_elementtree',
+                                  define_macros = define_macros,
+                                  include_dirs = expat_inc,
+                                  libraries = expat_lib,
+                                  sources = ['_elementtree.c'],
+                                  depends = ['pyexpat.c'] + expat_sources +
+                                      expat_depends,
+                                  ))
+        else:
+            missing.append('_elementtree')
+
+        # Hye-Shik Chang's CJKCodecs modules.
+        if have_unicode:
+            exts.append(Extension('_multibytecodec',
+                                  ['cjkcodecs/multibytecodec.c']))
+            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
+                exts.append(Extension('_codecs_%s' % loc,
+                                      ['cjkcodecs/_codecs_%s.c' % loc]))
+        else:
+            missing.append('_multibytecodec')
+            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
+                missing.append('_codecs_%s' % loc)
+
+        # Dynamic loading module
+        if sys.maxint == 0x7fffffff:
+            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
+            dl_inc = find_file('dlfcn.h', [], inc_dirs)
+            if (dl_inc is not None) and (host_platform not in ['atheos']):
+                exts.append( Extension('dl', ['dlmodule.c']) )
+            else:
+                missing.append('dl')
+        else:
+            missing.append('dl')
+
+        # Thomas Heller's _ctypes module
+        self.detect_ctypes(inc_dirs, lib_dirs)
+
+        # Richard Oudkerk's multiprocessing module
+        if host_platform == 'win32':             # Windows
+            macros = dict()
+            libraries = ['ws2_32']
+
+        elif host_platform == 'darwin':          # Mac OSX
+            macros = dict()
+            libraries = []
+
+        elif host_platform == 'cygwin':          # Cygwin
+            macros = dict()
+            libraries = []
+
+        elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
+            # FreeBSD's P1003.1b semaphore support is very experimental
+            # and has many known problems. (as of June 2008)
+            macros = dict()
+            libraries = []
+
+        elif host_platform.startswith('openbsd'):
+            macros = dict()
+            libraries = []
+
+        elif host_platform.startswith('netbsd'):
+            macros = dict()
+            libraries = []
+
+        else:                                   # Linux and other unices
+            macros = dict()
+            libraries = ['rt']
+
+        if host_platform == 'win32':
+            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
+                                     '_multiprocessing/semaphore.c',
+                                     '_multiprocessing/pipe_connection.c',
+                                     '_multiprocessing/socket_connection.c',
+                                     '_multiprocessing/win32_functions.c'
+                                   ]
+
+        else:
+            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
+                                     '_multiprocessing/socket_connection.c'
+                                   ]
+            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
+                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
+                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
+
+        if sysconfig.get_config_var('WITH_THREAD'):
+            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
+                                    define_macros=macros.items(),
+                                    include_dirs=["Modules/_multiprocessing"]))
+        else:
+            missing.append('_multiprocessing')
+
+        # End multiprocessing
+
+
+        # Platform-specific libraries
+        if host_platform == 'linux2':
+            # Linux-specific modules
+            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
+        else:
+            missing.append('linuxaudiodev')
+
+        if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
+                        'freebsd7', 'freebsd8')
+            or host_platform.startswith("gnukfreebsd")):
+            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
+        else:
+            missing.append('ossaudiodev')
+
+        if host_platform == 'sunos5':
+            # SunOS specific modules
+            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
+        else:
+            missing.append('sunaudiodev')
+
+        if host_platform == 'darwin':
+            # _scproxy
+            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
+                extra_link_args= [
+                    '-framework', 'SystemConfiguration',
+                    '-framework', 'CoreFoundation'
+                ]))
+
+
+        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
+                sysconfig.get_config_var("CONFIG_ARGS")):
+
+            if int(os.uname()[2].split('.')[0]) >= 8:
+                # We're on Mac OS X 10.4 or later, the compiler should
+                # support '-Wno-deprecated-declarations'. This will
+                # suppress deprecation warnings for the Carbon extensions,
+                # these extensions wrap the Carbon APIs and even those
+                # parts that are deprecated.
+                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
+            else:
+                carbon_extra_compile_args = []
+
+            # Mac OS X specific modules.
+            def macSrcExists(name1, name2=''):
+                if not name1:
+                    return None
+                names = (name1,)
+                if name2:
+                    names = (name1, name2)
+                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
+                return os.path.exists(path)
+
+            def addMacExtension(name, kwds, extra_srcs=[]):
+                dirname = ''
+                if name[0] == '_':
+                    dirname = name[1:].lower()
+                cname = name + '.c'
+                cmodulename = name + 'module.c'
+                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
+                if macSrcExists(cname):
+                    srcs = [cname]
+                elif macSrcExists(cmodulename):
+                    srcs = [cmodulename]
+                elif macSrcExists(dirname, cname):
+                    # XXX(nnorwitz): If all the names ended with module, we
+                    # wouldn't need this condition.  ibcarbon is the only one.
+                    srcs = [os.path.join(dirname, cname)]
+                elif macSrcExists(dirname, cmodulename):
+                    srcs = [os.path.join(dirname, cmodulename)]
+                else:
+                    raise RuntimeError("%s not found" % name)
+
+                # Here's the whole point:  add the extension with sources
+                exts.append(Extension(name, srcs + extra_srcs, **kwds))
+
+            # Core Foundation
+            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
+                         'extra_link_args': ['-framework', 'CoreFoundation'],
+                        }
+            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
+            addMacExtension('autoGIL', core_kwds)
+
+
+
+            # Carbon
+            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
+                           'extra_link_args': ['-framework', 'Carbon'],
+                          }
+            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
+                           'OSATerminology', 'icglue',
+                           # All these are in subdirs
+                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
+                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
+                           '_Help', '_Icn', '_IBCarbon', '_List',
+                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
+                           '_Scrap', '_Snd', '_TE',
+                          ]
+            for name in CARBON_EXTS:
+                addMacExtension(name, carbon_kwds)
+
+            # Workaround for a bug in the version of gcc shipped with Xcode 3.
+            # The _Win extension should build just like the other Carbon extensions, but
+            # this actually results in a hard crash of the linker.
+            #
+            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
+                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
+                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
+                           }
+                addMacExtension('_Win', win_kwds)
+            else:
+                addMacExtension('_Win', carbon_kwds)
+
+
+            # Application Services & QuickTime
+            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
+                        'extra_link_args': ['-framework','ApplicationServices'],
+                       }
+            addMacExtension('_Launch', app_kwds)
+            addMacExtension('_CG', app_kwds)
+
+            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
+                        extra_compile_args=carbon_extra_compile_args,
+                        extra_link_args=['-framework', 'QuickTime',
+                                     '-framework', 'Carbon']) )
+
+
+        self.extensions.extend(exts)
+
+        # Call the method for detecting whether _tkinter can be compiled
+        self.detect_tkinter(inc_dirs, lib_dirs)
+
+        if '_tkinter' not in [e.name for e in self.extensions]:
+            missing.append('_tkinter')
+
+##         # Uncomment these lines if you want to play with xxmodule.c
+##         ext = Extension('xx', ['xxmodule.c'])
+##         self.extensions.append(ext)
+
+        return missing
+
+    def detect_tkinter_explicitly(self):
+        # Build _tkinter using explicit locations for Tcl/Tk.
+        #
+        # This is enabled when both arguments are given to ./configure:
+        #
+        #     --with-tcltk-includes="-I/path/to/tclincludes \
+        #                            -I/path/to/tkincludes"
+        #     --with-tcltk-libs="-L/path/to/tcllibs -ltclm.n \
+        #                        -L/path/to/tklibs -ltkm.n"
+        #
+        # These values can also be specified or overridden via make:
+        #    make TCLTK_INCLUDES="..." TCLTK_LIBS="..."
+        #
+        # This can be useful for building and testing tkinter with multiple
+        # versions of Tcl/Tk.  Note that a build of Tk depends on a particular
+        # build of Tcl so you need to specify both arguments and use care when
+        # overriding.
+
+        # The _TCLTK variables are created in the Makefile sharedmods target.
+        tcltk_includes = os.environ.get('_TCLTK_INCLUDES')
+        tcltk_libs = os.environ.get('_TCLTK_LIBS')
+        if not (tcltk_includes and tcltk_libs):
+            # Resume default configuration search.
+            return 0
+
+        extra_compile_args = tcltk_includes.split()
+        extra_link_args = tcltk_libs.split()
+        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
+                        define_macros=[('WITH_APPINIT', 1)],
+                        extra_compile_args = extra_compile_args,
+                        extra_link_args = extra_link_args,
+                        )
+        self.extensions.append(ext)
+        return 1
+
+    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
+        # The _tkinter module, using frameworks. Since frameworks are quite
+        # different the UNIX search logic is not sharable.
+        from os.path import join, exists
+        framework_dirs = [
+            '/Library/Frameworks',
+            '/System/Library/Frameworks/',
+            join(os.getenv('HOME'), '/Library/Frameworks')
+        ]
+
+        sysroot = macosx_sdk_root()
+
+        # Find the directory that contains the Tcl.framework and Tk.framework
+        # bundles.
+        # XXX distutils should support -F!
+        for F in framework_dirs:
+            # both Tcl.framework and Tk.framework should be present
+
+
+            for fw in 'Tcl', 'Tk':
+                if is_macosx_sdk_path(F):
+                    if not exists(join(sysroot, F[1:], fw + '.framework')):
+                        break
+                else:
+                    if not exists(join(F, fw + '.framework')):
+                        break
+            else:
+                # ok, F is now directory with both frameworks. Continure
+                # building
+                break
+        else:
+            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
+            # will now resume.
+            return 0
+
+        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
+        # frameworks. In later release we should hopefully be able to pass
+        # the -F option to gcc, which specifies a framework lookup path.
+        #
+        include_dirs = [
+            join(F, fw + '.framework', H)
+            for fw in 'Tcl', 'Tk'
+            for H in 'Headers', 'Versions/Current/PrivateHeaders'
+        ]
+
+        # For 8.4a2, the X11 headers are not included. Rather than include a
+        # complicated search, this is a hard-coded path. It could bail out
+        # if X11 libs are not found...
+        include_dirs.append('/usr/X11R6/include')
+        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
+
+        # All existing framework builds of Tcl/Tk don't support 64-bit
+        # architectures.
+        cflags = sysconfig.get_config_vars('CFLAGS')[0]
+        archs = re.findall('-arch\s+(\w+)', cflags)
+
+        if is_macosx_sdk_path(F):
+            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
+        else:
+            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
+
+        detected_archs = []
+        for ln in fp:
+            a = ln.split()[-1]
+            if a in archs:
+                detected_archs.append(ln.split()[-1])
+        fp.close()
+
+        for a in detected_archs:
+            frameworks.append('-arch')
+            frameworks.append(a)
+
+        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
+                        define_macros=[('WITH_APPINIT', 1)],
+                        include_dirs = include_dirs,
+                        libraries = [],
+                        extra_compile_args = frameworks[2:],
+                        extra_link_args = frameworks,
+                        )
+        self.extensions.append(ext)
+        return 1
+
+    def detect_tkinter(self, inc_dirs, lib_dirs):
+        # The _tkinter module.
+
+        # Check whether --with-tcltk-includes and --with-tcltk-libs were
+        # configured or passed into the make target.  If so, use these values
+        # to build tkinter and bypass the searches for Tcl and TK in standard
+        # locations.
+        if self.detect_tkinter_explicitly():
+            return
+
+        # Rather than complicate the code below, detecting and building
+        # AquaTk is a separate method. Only one Tkinter will be built on
+        # Darwin - either AquaTk, if it is found, or X11 based Tk.
+        if (host_platform == 'darwin' and
+            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
+            return
+
+        # Assume we haven't found any of the libraries or include files
+        # The versions with dots are used on Unix, and the versions without
+        # dots on Windows, for detection by cygwin.
+        tcllib = tklib = tcl_includes = tk_includes = None
+        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
+                        '8.2', '82', '8.1', '81', '8.0', '80']:
+            tklib = self.compiler.find_library_file(lib_dirs,
+                                                        'tk' + version)
+            tcllib = self.compiler.find_library_file(lib_dirs,
+                                                         'tcl' + version)
+            if tklib and tcllib:
+                # Exit the loop when we've found the Tcl/Tk libraries
+                break
+
+        # Now check for the header files
+        if tklib and tcllib:
+            # Check for the include files on Debian and {Free,Open}BSD, where
+            # they're put in /usr/include/{tcl,tk}X.Y
+            dotversion = version
+            if '.' not in dotversion and "bsd" in host_platform.lower():
+                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
+                # but the include subdirs are named like .../include/tcl8.3.
+                dotversion = dotversion[:-1] + '.' + dotversion[-1]
+            tcl_include_sub = []
+            tk_include_sub = []
+            for dir in inc_dirs:
+                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
+                tk_include_sub += [dir + os.sep + "tk" + dotversion]
+            tk_include_sub += tcl_include_sub
+            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
+            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
+
+        if (tcllib is None or tklib is None or
+            tcl_includes is None or tk_includes is None):
+            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
+            return
+
+        # OK... everything seems to be present for Tcl/Tk.
+
+        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
+        for dir in tcl_includes + tk_includes:
+            if dir not in include_dirs:
+                include_dirs.append(dir)
+
+        # Check for various platform-specific directories
+        if host_platform == 'sunos5':
+            include_dirs.append('/usr/openwin/include')
+            added_lib_dirs.append('/usr/openwin/lib')
+        elif os.path.exists('/usr/X11R6/include'):
+            include_dirs.append('/usr/X11R6/include')
+            added_lib_dirs.append('/usr/X11R6/lib64')
+            added_lib_dirs.append('/usr/X11R6/lib')
+        elif os.path.exists('/usr/X11R5/include'):
+            include_dirs.append('/usr/X11R5/include')
+            added_lib_dirs.append('/usr/X11R5/lib')
+        else:
+            # Assume default location for X11
+            include_dirs.append('/usr/X11/include')
+            added_lib_dirs.append('/usr/X11/lib')
+
+        # If Cygwin, then verify that X is installed before proceeding
+        if host_platform == 'cygwin':
+            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
+            if x11_inc is None:
+                return
+
+        # Check for BLT extension
+        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
+                                               'BLT8.0'):
+            defs.append( ('WITH_BLT', 1) )
+            libs.append('BLT8.0')
+        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
+                                                'BLT'):
+            defs.append( ('WITH_BLT', 1) )
+            libs.append('BLT')
+
+        # Add the Tcl/Tk libraries
+        libs.append('tk'+ version)
+        libs.append('tcl'+ version)
+
+        if host_platform in ['aix3', 'aix4']:
+            libs.append('ld')
+
+        # Finally, link with the X11 libraries (not appropriate on cygwin)
+        if host_platform != "cygwin":
+            libs.append('X11')
+
+        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
+                        define_macros=[('WITH_APPINIT', 1)] + defs,
+                        include_dirs = include_dirs,
+                        libraries = libs,
+                        library_dirs = added_lib_dirs,
+                        )
+        self.extensions.append(ext)
+
+        # XXX handle these, but how to detect?
+        # *** Uncomment and edit for PIL (TkImaging) extension only:
+        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
+        # *** Uncomment and edit for TOGL extension only:
+        #       -DWITH_TOGL togl.c \
+        # *** Uncomment these for TOGL extension only:
+        #       -lGL -lGLU -lXext -lXmu \
+
+    def configure_ctypes_darwin(self, ext):
+        # Darwin (OS X) uses preconfigured files, in
+        # the Modules/_ctypes/libffi_osx directory.
+        srcdir = sysconfig.get_config_var('srcdir')
+        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
+                                                  '_ctypes', 'libffi_osx'))
+        sources = [os.path.join(ffi_srcdir, p)
+                   for p in ['ffi.c',
+                             'x86/darwin64.S',
+                             'x86/x86-darwin.S',
+                             'x86/x86-ffi_darwin.c',
+                             'x86/x86-ffi64.c',
+                             'powerpc/ppc-darwin.S',
+                             'powerpc/ppc-darwin_closure.S',
+                             'powerpc/ppc-ffi_darwin.c',
+                             'powerpc/ppc64-darwin_closure.S',
+                             ]]
+
+        # Add .S (preprocessed assembly) to C compiler source extensions.
+        self.compiler.src_extensions.append('.S')
+
+        include_dirs = [os.path.join(ffi_srcdir, 'include'),
+                        os.path.join(ffi_srcdir, 'powerpc')]
+        ext.include_dirs.extend(include_dirs)
+        ext.sources.extend(sources)
+        return True
+
+    def configure_ctypes(self, ext):
+        if not self.use_system_libffi:
+            if host_platform == 'darwin':
+                return self.configure_ctypes_darwin(ext)
+
+            srcdir = sysconfig.get_config_var('srcdir')
+            ffi_builddir = os.path.join(self.build_temp, 'libffi')
+            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
+                                         '_ctypes', 'libffi'))
+            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
+
+            from distutils.dep_util import newer_group
+
+            config_sources = [os.path.join(ffi_srcdir, fname)
+                              for fname in os.listdir(ffi_srcdir)
+                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
+            if self.force or newer_group(config_sources,
+                                         ffi_configfile):
+                from distutils.dir_util import mkpath
+                mkpath(ffi_builddir)
+                config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split()
+                               if (('--host=' in arg) or ('--build=' in arg))]
+                if not self.verbose:
+                    config_args.append("-q")
+
+                # Pass empty CFLAGS because we'll just append the resulting
+                # CFLAGS to Python's; -g or -O2 is to be avoided.
+                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
+                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
+
+                res = os.system(cmd)
+                if res or not os.path.exists(ffi_configfile):
+                    print "Failed to configure _ctypes module"
+                    return False
+
+            fficonfig = {}
+            with open(ffi_configfile) as f:
+                exec f in fficonfig
+
+            # Add .S (preprocessed assembly) to C compiler source extensions.
+            self.compiler.src_extensions.append('.S')
+
+            include_dirs = [os.path.join(ffi_builddir, 'include'),
+                            ffi_builddir,
+                            os.path.join(ffi_srcdir, 'src')]
+            extra_compile_args = fficonfig['ffi_cflags'].split()
+
+            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
+                               fficonfig['ffi_sources'])
+            ext.include_dirs.extend(include_dirs)
+            ext.extra_compile_args.extend(extra_compile_args)
+        return True
+
+    def detect_ctypes(self, inc_dirs, lib_dirs):
+        self.use_system_libffi = False
+        include_dirs = []
+        extra_compile_args = []
+        extra_link_args = []
+        sources = ['_ctypes/_ctypes.c',
+                   '_ctypes/callbacks.c',
+                   '_ctypes/callproc.c',
+                   '_ctypes/stgdict.c',
+                   '_ctypes/cfield.c']
+        depends = ['_ctypes/ctypes.h']
+
+        if host_platform == 'darwin':
+            sources.append('_ctypes/malloc_closure.c')
+            sources.append('_ctypes/darwin/dlfcn_simple.c')
+            extra_compile_args.append('-DMACOSX')
+            include_dirs.append('_ctypes/darwin')
+# XXX Is this still needed?
+##            extra_link_args.extend(['-read_only_relocs', 'warning'])
+
+        elif host_platform == 'sunos5':
+            # XXX This shouldn't be necessary; it appears that some
+            # of the assembler code is non-PIC (i.e. it has relocations
+            # when it shouldn't. The proper fix would be to rewrite
+            # the assembler code to be PIC.
+            # This only works with GCC; the Sun compiler likely refuses
+            # this option. If you want to compile ctypes with the Sun
+            # compiler, please research a proper solution, instead of
+            # finding some -z option for the Sun compiler.
+            extra_link_args.append('-mimpure-text')
+
+        elif host_platform.startswith('hp-ux'):
+            extra_link_args.append('-fPIC')
+
+        ext = Extension('_ctypes',
+                        include_dirs=include_dirs,
+                        extra_compile_args=extra_compile_args,
+                        extra_link_args=extra_link_args,
+                        libraries=[],
+                        sources=sources,
+                        depends=depends)
+        ext_test = Extension('_ctypes_test',
+                             sources=['_ctypes/_ctypes_test.c'])
+        self.extensions.extend([ext, ext_test])
+
+        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
+            return
+
+        if host_platform == 'darwin':
+            # OS X 10.5 comes with libffi.dylib; the include files are
+            # in /usr/include/ffi
+            inc_dirs.append('/usr/include/ffi')
+
+        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
+        if not ffi_inc or ffi_inc[0] == '':
+            ffi_inc = find_file('ffi.h', [], inc_dirs)
+        if ffi_inc is not None:
+            ffi_h = ffi_inc[0] + '/ffi.h'
+            with open(ffi_h) as f:
+                for line in f:
+                    line = line.strip()
+                    if line.startswith(('#define LIBFFI_H',
+                                        '#define ffi_wrapper_h')):
+                        break
+                else:
+                    ffi_inc = None
+                    print('Header file {} does not define LIBFFI_H or '
+                          'ffi_wrapper_h'.format(ffi_h))
+        ffi_lib = None
+        if ffi_inc is not None:
+            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
+                if (self.compiler.find_library_file(lib_dirs, lib_name)):
+                    ffi_lib = lib_name
+                    break
+
+        if ffi_inc and ffi_lib:
+            ext.include_dirs.extend(ffi_inc)
+            ext.libraries.append(ffi_lib)
+            self.use_system_libffi = True
+
+        if sysconfig.get_config_var('HAVE_LIBDL'):
+            # for dlopen, see bpo-32647
+            ext.libraries.append('dl')
+
+    def _detect_nis(self, inc_dirs, lib_dirs):
+        if host_platform in {'win32', 'cygwin', 'qnx6'}:
+            return None
+
+        libs = []
+        library_dirs = []
+        includes_dirs = []
+
+        # bpo-32521: glibc has deprecated Sun RPC for some time. Fedora 28
+        # moved headers and libraries to libtirpc and libnsl. The headers
+        # are in tircp and nsl sub directories.
+        rpcsvc_inc = find_file(
+            'rpcsvc/yp_prot.h', inc_dirs,
+            [os.path.join(inc_dir, 'nsl') for inc_dir in inc_dirs]
+        )
+        rpc_inc = find_file(
+            'rpc/rpc.h', inc_dirs,
+            [os.path.join(inc_dir, 'tirpc') for inc_dir in inc_dirs]
+        )
+        if rpcsvc_inc is None or rpc_inc is None:
+            # not found
+            return None
+        includes_dirs.extend(rpcsvc_inc)
+        includes_dirs.extend(rpc_inc)
+
+        if self.compiler.find_library_file(lib_dirs, 'nsl'):
+            libs.append('nsl')
+        else:
+            # libnsl-devel: check for libnsl in nsl/ subdirectory
+            nsl_dirs = [os.path.join(lib_dir, 'nsl') for lib_dir in lib_dirs]
+            libnsl = self.compiler.find_library_file(nsl_dirs, 'nsl')
+            if libnsl is not None:
+                library_dirs.append(os.path.dirname(libnsl))
+                libs.append('nsl')
+
+        if self.compiler.find_library_file(lib_dirs, 'tirpc'):
+            libs.append('tirpc')
+
+        return Extension(
+            'nis', ['nismodule.c'],
+            libraries=libs,
+            library_dirs=library_dirs,
+            include_dirs=includes_dirs
+        )
+
+
+class PyBuildInstall(install):
+    # Suppress the warning about installation into the lib_dynload
+    # directory, which is not in sys.path when running Python during
+    # installation:
+    def initialize_options (self):
+        install.initialize_options(self)
+        self.warn_dir=0
+
+class PyBuildInstallLib(install_lib):
+    # Do exactly what install_lib does but make sure correct access modes get
+    # set on installed directories and files. All installed files with get
+    # mode 644 unless they are a shared library in which case they will get
+    # mode 755. All installed directories will get mode 755.
+
+    so_ext = sysconfig.get_config_var("SO")
+
+    def install(self):
+        outfiles = install_lib.install(self)
+        self.set_file_modes(outfiles, 0644, 0755)
+        self.set_dir_modes(self.install_dir, 0755)
+        return outfiles
+
+    def set_file_modes(self, files, defaultMode, sharedLibMode):
+        if not self.is_chmod_supported(): return
+        if not files: return
+
+        for filename in files:
+            if os.path.islink(filename): continue
+            mode = defaultMode
+            if filename.endswith(self.so_ext): mode = sharedLibMode
+            log.info("changing mode of %s to %o", filename, mode)
+            if not self.dry_run: os.chmod(filename, mode)
+
+    def set_dir_modes(self, dirname, mode):
+        if not self.is_chmod_supported(): return
+        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
+
+    def set_dir_modes_visitor(self, mode, dirname, names):
+        if os.path.islink(dirname): return
+        log.info("changing mode of %s to %o", dirname, mode)
+        if not self.dry_run: os.chmod(dirname, mode)
+
+    def is_chmod_supported(self):
+        return hasattr(os, 'chmod')
+
+SUMMARY = """
+Python is an interpreted, interactive, object-oriented programming
+language. It is often compared to Tcl, Perl, Scheme or Java.
+
+Python combines remarkable power with very clear syntax. It has
+modules, classes, exceptions, very high level dynamic data types, and
+dynamic typing. There are interfaces to many system calls and
+libraries, as well as to various windowing systems (X11, Motif, Tk,
+Mac, MFC). New built-in modules are easily written in C or C++. Python
+is also usable as an extension language for applications that need a
+programmable interface.
+
+The Python implementation is portable: it runs on many brands of UNIX,
+on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
+listed here, it may still be supported, if there's a C compiler for
+it. Ask around on comp.lang.python -- or just try compiling Python
+yourself.
+"""
+
+CLASSIFIERS = """
+Development Status :: 6 - Mature
+License :: OSI Approved :: Python Software Foundation License
+Natural Language :: English
+Programming Language :: C
+Programming Language :: Python
+Topic :: Software Development
+"""
+
+def main():
+    # turn off warnings when deprecated modules are imported
+    import warnings
+    warnings.filterwarnings("ignore",category=DeprecationWarning)
+    setup(# PyPI Metadata (PEP 301)
+          name = "Python",
+          version = sys.version.split()[0],
+          url = "http://www.python.org/%s" % sys.version[:3],
+          maintainer = "Guido van Rossum and the Python community",
+          maintainer_email = "python-dev@python.org",
+          description = "A high-level object-oriented programming language",
+          long_description = SUMMARY.strip(),
+          license = "PSF license",
+          classifiers = filter(None, CLASSIFIERS.split("\n")),
+          platforms = ["Many"],
+
+          # Build info
+          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
+                      'install_lib':PyBuildInstallLib},
+          # The struct module is defined here, because build_ext won't be
+          # called unless there's at least one extension module defined.
+          ext_modules=[Extension('_struct', ['_struct.c'])],
+
+          # Scripts to install
+          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
+                     'Tools/scripts/2to3',
+                     'Lib/smtpd.py']
+        )
+
+# --install-platlib
+if __name__ == '__main__':
+    main()
Index: .
===================================================================
--- .	(nonexistent)
+++ .	(revision 5)

Property changes on: .
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,73 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~