Radix cross Linux

The main Radix cross Linux repository contains the build scripts of packages, which have the most complete and common functionality for desktop machines

452 Commits   2 Branches   1 Tag
Index: Lib/distutils/command/install.py
===================================================================
--- Lib/distutils/command/install.py	(nonexistent)
+++ Lib/distutils/command/install.py	(revision 5)
@@ -0,0 +1,679 @@
+"""distutils.command.install
+
+Implements the Distutils 'install' command."""
+
+import sys
+import sysconfig
+import os
+import re
+
+from distutils import log
+from distutils.core import Command
+from distutils.debug import DEBUG
+from distutils.sysconfig import get_config_vars
+from distutils.errors import DistutilsPlatformError
+from distutils.file_util import write_file
+from distutils.util import convert_path, subst_vars, change_root
+from distutils.util import get_platform
+from distutils.errors import DistutilsOptionError
+
+from site import USER_BASE
+from site import USER_SITE
+
+HAS_USER_SITE = (USER_SITE is not None)
+
+# The keys to an installation scheme; if any new types of files are to be
+# installed, be sure to add an entry to every scheme in
+# sysconfig._INSTALL_SCHEMES, and to SCHEME_KEYS here.
+SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
+
+# The following code provides backward-compatible INSTALL_SCHEMES
+# while making the sysconfig module the single point of truth.
+# This makes it easier for OS distributions where they need to
+# alter locations for packages installations in a single place.
+# Note that this module is deprecated (PEP 632); all consumers
+# of this information should switch to using sysconfig directly.
+INSTALL_SCHEMES = {"unix_prefix": {}, "unix_home": {}, "nt": {}}
+
+# Copy from sysconfig._INSTALL_SCHEMES
+for key in SCHEME_KEYS:
+    for distutils_scheme_name, sys_scheme_name in (
+            ("unix_prefix", "posix_prefix"), ("unix_home", "posix_home"),
+            ("nt", "nt")):
+        sys_key = key
+        sys_scheme = sysconfig._INSTALL_SCHEMES[sys_scheme_name]
+        if key == "headers" and key not in sys_scheme:
+            # On POSIX-y platforms, Python will:
+            # - Build from .h files in 'headers' (only there when
+            #   building CPython)
+            # - Install .h files to 'include'
+            # When 'headers' is missing, fall back to 'include'
+            sys_key = 'include'
+        INSTALL_SCHEMES[distutils_scheme_name][key] = sys_scheme[sys_key]
+
+# Transformation to different template format
+for main_key in INSTALL_SCHEMES:
+    for key, value in INSTALL_SCHEMES[main_key].items():
+        # Change all ocurences of {variable} to $variable
+        value = re.sub(r"\{(.+?)\}", r"$\g<1>", value)
+        value = value.replace("$installed_base", "$base")
+        value = value.replace("$py_version_nodot_plat", "$py_version_nodot")
+        if key == "headers":
+            value += "/$dist_name"
+        if sys.version_info >= (3, 9) and key == "platlib":
+            # platlibdir is available since 3.9: bpo-1294959
+            value = value.replace("/lib/", "/$platlibdir/")
+        INSTALL_SCHEMES[main_key][key] = value
+
+# The following part of INSTALL_SCHEMES has a different definition
+# than the one in sysconfig, but because both depend on the site module,
+# the outcomes should be the same.
+if HAS_USER_SITE:
+    INSTALL_SCHEMES['nt_user'] = {
+        'purelib': '$usersite',
+        'platlib': '$usersite',
+        'headers': '$userbase/Python$py_version_nodot/Include/$dist_name',
+        'scripts': '$userbase/Python$py_version_nodot/Scripts',
+        'data'   : '$userbase',
+        }
+
+    INSTALL_SCHEMES['unix_user'] = {
+        'purelib': '$usersite',
+        'platlib': '$usersite',
+        'headers':
+            '$userbase/include/python$py_version_short$abiflags/$dist_name',
+        'scripts': '$userbase/bin/32',
+        'data'   : '$userbase',
+        }
+
+
+class install(Command):
+
+    description = "install everything from build directory"
+
+    user_options = [
+        # Select installation scheme and set base director(y|ies)
+        ('prefix=', None,
+         "installation prefix"),
+        ('exec-prefix=', None,
+         "(Unix only) prefix for platform-specific files"),
+        ('home=', None,
+         "(Unix only) home directory to install under"),
+
+        # Or, just set the base director(y|ies)
+        ('install-base=', None,
+         "base installation directory (instead of --prefix or --home)"),
+        ('install-platbase=', None,
+         "base installation directory for platform-specific files " +
+         "(instead of --exec-prefix or --home)"),
+        ('root=', None,
+         "install everything relative to this alternate root directory"),
+
+        # Or, explicitly set the installation scheme
+        ('install-purelib=', None,
+         "installation directory for pure Python module distributions"),
+        ('install-platlib=', None,
+         "installation directory for non-pure module distributions"),
+        ('install-lib=', None,
+         "installation directory for all module distributions " +
+         "(overrides --install-purelib and --install-platlib)"),
+
+        ('install-headers=', None,
+         "installation directory for C/C++ headers"),
+        ('install-scripts=', None,
+         "installation directory for Python scripts"),
+        ('install-data=', None,
+         "installation directory for data files"),
+
+        # Byte-compilation options -- see install_lib.py for details, as
+        # these are duplicated from there (but only install_lib does
+        # anything with them).
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+
+        # Miscellaneous control options
+        ('force', 'f',
+         "force installation (overwrite any existing files)"),
+        ('skip-build', None,
+         "skip rebuilding everything (for testing/debugging)"),
+
+        # Where to install documentation (eventually!)
+        #('doc-format=', None, "format of documentation to generate"),
+        #('install-man=', None, "directory for Unix man pages"),
+        #('install-html=', None, "directory for HTML documentation"),
+        #('install-info=', None, "directory for GNU info files"),
+
+        ('record=', None,
+         "filename in which to record list of installed files"),
+        ]
+
+    boolean_options = ['compile', 'force', 'skip-build']
+
+    if HAS_USER_SITE:
+        user_options.append(('user', None,
+                             "install in user site-package '%s'" % USER_SITE))
+        boolean_options.append('user')
+
+    negative_opt = {'no-compile' : 'compile'}
+
+
+    def initialize_options(self):
+        """Initializes options."""
+        # High-level options: these select both an installation base
+        # and scheme.
+        self.prefix = None
+        self.exec_prefix = None
+        self.home = None
+        self.user = 0
+
+        # These select only the installation base; it's up to the user to
+        # specify the installation scheme (currently, that means supplying
+        # the --install-{platlib,purelib,scripts,data} options).
+        self.install_base = None
+        self.install_platbase = None
+        self.root = None
+
+        # These options are the actual installation directories; if not
+        # supplied by the user, they are filled in using the installation
+        # scheme implied by prefix/exec-prefix/home and the contents of
+        # that installation scheme.
+        self.install_purelib = None     # for pure module distributions
+        self.install_platlib = None     # non-pure (dists w/ extensions)
+        self.install_headers = None     # for C/C++ headers
+        self.install_lib = None         # set to either purelib or platlib
+        self.install_scripts = None
+        self.install_data = None
+        if HAS_USER_SITE:
+            self.install_userbase = USER_BASE
+            self.install_usersite = USER_SITE
+
+        self.compile = None
+        self.optimize = None
+
+        # Deprecated
+        # These two are for putting non-packagized distributions into their
+        # own directory and creating a .pth file if it makes sense.
+        # 'extra_path' comes from the setup file; 'install_path_file' can
+        # be turned off if it makes no sense to install a .pth file.  (But
+        # better to install it uselessly than to guess wrong and not
+        # install it when it's necessary and would be used!)  Currently,
+        # 'install_path_file' is always true unless some outsider meddles
+        # with it.
+        self.extra_path = None
+        self.install_path_file = 1
+
+        # 'force' forces installation, even if target files are not
+        # out-of-date.  'skip_build' skips running the "build" command,
+        # handy if you know it's not necessary.  'warn_dir' (which is *not*
+        # a user option, it's just there so the bdist_* commands can turn
+        # it off) determines whether we warn about installing to a
+        # directory not in sys.path.
+        self.force = 0
+        self.skip_build = 0
+        self.warn_dir = 1
+
+        # These are only here as a conduit from the 'build' command to the
+        # 'install_*' commands that do the real work.  ('build_base' isn't
+        # actually used anywhere, but it might be useful in future.)  They
+        # are not user options, because if the user told the install
+        # command where the build directory is, that wouldn't affect the
+        # build command.
+        self.build_base = None
+        self.build_lib = None
+
+        # Not defined yet because we don't know anything about
+        # documentation yet.
+        #self.install_man = None
+        #self.install_html = None
+        #self.install_info = None
+
+        self.record = None
+
+
+    # -- Option finalizing methods -------------------------------------
+    # (This is rather more involved than for most commands,
+    # because this is where the policy for installing third-
+    # party Python modules on various platforms given a wide
+    # array of user input is decided.  Yes, it's quite complex!)
+
+    def finalize_options(self):
+        """Finalizes options."""
+        # This method (and its helpers, like 'finalize_unix()',
+        # 'finalize_other()', and 'select_scheme()') is where the default
+        # installation directories for modules, extension modules, and
+        # anything else we care to install from a Python module
+        # distribution.  Thus, this code makes a pretty important policy
+        # statement about how third-party stuff is added to a Python
+        # installation!  Note that the actual work of installation is done
+        # by the relatively simple 'install_*' commands; they just take
+        # their orders from the installation directory options determined
+        # here.
+
+        # Check for errors/inconsistencies in the options; first, stuff
+        # that's wrong on any platform.
+
+        if ((self.prefix or self.exec_prefix or self.home) and
+            (self.install_base or self.install_platbase)):
+            raise DistutilsOptionError(
+                   "must supply either prefix/exec-prefix/home or " +
+                   "install-base/install-platbase -- not both")
+
+        if self.home and (self.prefix or self.exec_prefix):
+            raise DistutilsOptionError(
+                  "must supply either home or prefix/exec-prefix -- not both")
+
+        if self.user and (self.prefix or self.exec_prefix or self.home or
+                self.install_base or self.install_platbase):
+            raise DistutilsOptionError("can't combine user with prefix, "
+                                       "exec_prefix/home, or install_(plat)base")
+
+        # Next, stuff that's wrong (or dubious) only on certain platforms.
+        if os.name != "posix":
+            if self.exec_prefix:
+                self.warn("exec-prefix option ignored on this platform")
+                self.exec_prefix = None
+
+        # Now the interesting logic -- so interesting that we farm it out
+        # to other methods.  The goal of these methods is to set the final
+        # values for the install_{lib,scripts,data,...}  options, using as
+        # input a heady brew of prefix, exec_prefix, home, install_base,
+        # install_platbase, user-supplied versions of
+        # install_{purelib,platlib,lib,scripts,data,...}, and the
+        # INSTALL_SCHEME dictionary above.  Phew!
+
+        self.dump_dirs("pre-finalize_{unix,other}")
+
+        if os.name == 'posix':
+            self.finalize_unix()
+        else:
+            self.finalize_other()
+
+        self.dump_dirs("post-finalize_{unix,other}()")
+
+        # Expand configuration variables, tilde, etc. in self.install_base
+        # and self.install_platbase -- that way, we can use $base or
+        # $platbase in the other installation directories and not worry
+        # about needing recursive variable expansion (shudder).
+
+        py_version = sys.version.split()[0]
+        (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
+        try:
+            abiflags = sys.abiflags
+        except AttributeError:
+            # sys.abiflags may not be defined on all platforms.
+            abiflags = ''
+        self.config_vars = {'dist_name': self.distribution.get_name(),
+                            'dist_version': self.distribution.get_version(),
+                            'dist_fullname': self.distribution.get_fullname(),
+                            'py_version': py_version,
+                            'py_version_short': '%d.%d' % sys.version_info[:2],
+                            'py_version_nodot': '%d%d' % sys.version_info[:2],
+                            'sys_prefix': prefix,
+                            'prefix': prefix,
+                            'sys_exec_prefix': exec_prefix,
+                            'exec_prefix': exec_prefix,
+                            'abiflags': abiflags,
+                            'platlibdir': sys.platlibdir,
+                           }
+
+        if HAS_USER_SITE:
+            self.config_vars['userbase'] = self.install_userbase
+            self.config_vars['usersite'] = self.install_usersite
+
+        if sysconfig.is_python_build(True):
+            self.config_vars['srcdir'] = sysconfig.get_config_var('srcdir')
+
+        self.expand_basedirs()
+
+        self.dump_dirs("post-expand_basedirs()")
+
+        # Now define config vars for the base directories so we can expand
+        # everything else.
+        self.config_vars['base'] = self.install_base
+        self.config_vars['platbase'] = self.install_platbase
+
+        if DEBUG:
+            from pprint import pprint
+            print("config vars:")
+            pprint(self.config_vars)
+
+        # Expand "~" and configuration variables in the installation
+        # directories.
+        self.expand_dirs()
+
+        self.dump_dirs("post-expand_dirs()")
+
+        # Create directories in the home dir:
+        if self.user:
+            self.create_home_path()
+
+        # Pick the actual directory to install all modules to: either
+        # install_purelib or install_platlib, depending on whether this
+        # module distribution is pure or not.  Of course, if the user
+        # already specified install_lib, use their selection.
+        if self.install_lib is None:
+            if self.distribution.ext_modules: # has extensions: non-pure
+                self.install_lib = self.install_platlib
+            else:
+                self.install_lib = self.install_purelib
+
+
+        # Convert directories from Unix /-separated syntax to the local
+        # convention.
+        self.convert_paths('lib', 'purelib', 'platlib',
+                           'scripts', 'data', 'headers')
+        if HAS_USER_SITE:
+            self.convert_paths('userbase', 'usersite')
+
+        # Deprecated
+        # Well, we're not actually fully completely finalized yet: we still
+        # have to deal with 'extra_path', which is the hack for allowing
+        # non-packagized module distributions (hello, Numerical Python!) to
+        # get their own directories.
+        self.handle_extra_path()
+        self.install_libbase = self.install_lib # needed for .pth file
+        self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
+
+        # If a new root directory was supplied, make all the installation
+        # dirs relative to it.
+        if self.root is not None:
+            self.change_roots('libbase', 'lib', 'purelib', 'platlib',
+                              'scripts', 'data', 'headers')
+
+        self.dump_dirs("after prepending root")
+
+        # Find out the build directories, ie. where to install from.
+        self.set_undefined_options('build',
+                                   ('build_base', 'build_base'),
+                                   ('build_lib', 'build_lib'))
+
+        # Punt on doc directories for now -- after all, we're punting on
+        # documentation completely!
+
+    def dump_dirs(self, msg):
+        """Dumps the list of user options."""
+        if not DEBUG:
+            return
+        from distutils.fancy_getopt import longopt_xlate
+        log.debug(msg + ":")
+        for opt in self.user_options:
+            opt_name = opt[0]
+            if opt_name[-1] == "=":
+                opt_name = opt_name[0:-1]
+            if opt_name in self.negative_opt:
+                opt_name = self.negative_opt[opt_name]
+                opt_name = opt_name.translate(longopt_xlate)
+                val = not getattr(self, opt_name)
+            else:
+                opt_name = opt_name.translate(longopt_xlate)
+                val = getattr(self, opt_name)
+            log.debug("  %s: %s", opt_name, val)
+
+    def finalize_unix(self):
+        """Finalizes options for posix platforms."""
+        if self.install_base is not None or self.install_platbase is not None:
+            if ((self.install_lib is None and
+                 self.install_purelib is None and
+                 self.install_platlib is None) or
+                self.install_headers is None or
+                self.install_scripts is None or
+                self.install_data is None):
+                raise DistutilsOptionError(
+                      "install-base or install-platbase supplied, but "
+                      "installation scheme is incomplete")
+            return
+
+        if self.user:
+            if self.install_userbase is None:
+                raise DistutilsPlatformError(
+                    "User base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme("unix_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("unix_home")
+        else:
+            if self.prefix is None:
+                if self.exec_prefix is not None:
+                    raise DistutilsOptionError(
+                          "must not supply exec-prefix without prefix")
+
+                self.prefix = os.path.normpath(sys.prefix)
+                self.exec_prefix = os.path.normpath(sys.exec_prefix)
+
+            else:
+                if self.exec_prefix is None:
+                    self.exec_prefix = self.prefix
+
+            self.install_base = self.prefix
+            self.install_platbase = self.exec_prefix
+            self.select_scheme("unix_prefix")
+
+    def finalize_other(self):
+        """Finalizes options for non-posix platforms"""
+        if self.user:
+            if self.install_userbase is None:
+                raise DistutilsPlatformError(
+                    "User base directory is not specified")
+            self.install_base = self.install_platbase = self.install_userbase
+            self.select_scheme(os.name + "_user")
+        elif self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("unix_home")
+        else:
+            if self.prefix is None:
+                self.prefix = os.path.normpath(sys.prefix)
+
+            self.install_base = self.install_platbase = self.prefix
+            try:
+                self.select_scheme(os.name)
+            except KeyError:
+                raise DistutilsPlatformError(
+                      "I don't know how to install stuff on '%s'" % os.name)
+
+    def select_scheme(self, name):
+        """Sets the install directories by applying the install schemes."""
+        # it's the caller's problem if they supply a bad name!
+        scheme = INSTALL_SCHEMES[name]
+        for key in SCHEME_KEYS:
+            attrname = 'install_' + key
+            if getattr(self, attrname) is None:
+                setattr(self, attrname, scheme[key])
+
+    def _expand_attrs(self, attrs):
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                if os.name == 'posix' or os.name == 'nt':
+                    val = os.path.expanduser(val)
+                val = subst_vars(val, self.config_vars)
+                setattr(self, attr, val)
+
+    def expand_basedirs(self):
+        """Calls `os.path.expanduser` on install_base, install_platbase and
+        root."""
+        self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+    def expand_dirs(self):
+        """Calls `os.path.expanduser` on install dirs."""
+        self._expand_attrs(['install_purelib', 'install_platlib',
+                            'install_lib', 'install_headers',
+                            'install_scripts', 'install_data',])
+
+    def convert_paths(self, *names):
+        """Call `convert_path` over `names`."""
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, convert_path(getattr(self, attr)))
+
+    def handle_extra_path(self):
+        """Set `path_file` and `extra_dirs` using `extra_path`."""
+        if self.extra_path is None:
+            self.extra_path = self.distribution.extra_path
+
+        if self.extra_path is not None:
+            log.warn(
+                "Distribution option extra_path is deprecated. "
+                "See issue27919 for details."
+            )
+            if isinstance(self.extra_path, str):
+                self.extra_path = self.extra_path.split(',')
+
+            if len(self.extra_path) == 1:
+                path_file = extra_dirs = self.extra_path[0]
+            elif len(self.extra_path) == 2:
+                path_file, extra_dirs = self.extra_path
+            else:
+                raise DistutilsOptionError(
+                      "'extra_path' option must be a list, tuple, or "
+                      "comma-separated string with 1 or 2 elements")
+
+            # convert to local form in case Unix notation used (as it
+            # should be in setup scripts)
+            extra_dirs = convert_path(extra_dirs)
+        else:
+            path_file = None
+            extra_dirs = ''
+
+        # XXX should we warn if path_file and not extra_dirs? (in which
+        # case the path file would be harmless but pointless)
+        self.path_file = path_file
+        self.extra_dirs = extra_dirs
+
+    def change_roots(self, *names):
+        """Change the install directories pointed by name using root."""
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, change_root(self.root, getattr(self, attr)))
+
+    def create_home_path(self):
+        """Create directories under ~."""
+        if not self.user:
+            return
+        home = convert_path(os.path.expanduser("~"))
+        for name, path in self.config_vars.items():
+            if path.startswith(home) and not os.path.isdir(path):
+                self.debug_print("os.makedirs('%s', 0o700)" % path)
+                os.makedirs(path, 0o700)
+
+    # -- Command execution methods -------------------------------------
+
+    def run(self):
+        """Runs the command."""
+        # Obviously have to build before we can install
+        if not self.skip_build:
+            self.run_command('build')
+            # If we built for any other platform, we can't install.
+            build_plat = self.distribution.get_command_obj('build').plat_name
+            # check warn_dir - it is a clue that the 'install' is happening
+            # internally, and not to sys.path, so we don't check the platform
+            # matches what we are running.
+            if self.warn_dir and build_plat != get_platform():
+                raise DistutilsPlatformError("Can't install when "
+                                             "cross-compiling")
+
+        # Run all sub-commands (at least those that need to be run)
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        if self.path_file:
+            self.create_path_file()
+
+        # write list of installed files, if requested.
+        if self.record:
+            outputs = self.get_outputs()
+            if self.root:               # strip any package prefix
+                root_len = len(self.root)
+                for counter in range(len(outputs)):
+                    outputs[counter] = outputs[counter][root_len:]
+            self.execute(write_file,
+                         (self.record, outputs),
+                         "writing list of installed files to '%s'" %
+                         self.record)
+
+        sys_path = map(os.path.normpath, sys.path)
+        sys_path = map(os.path.normcase, sys_path)
+        install_lib = os.path.normcase(os.path.normpath(self.install_lib))
+        if (self.warn_dir and
+            not (self.path_file and self.install_path_file) and
+            install_lib not in sys_path):
+            log.debug(("modules installed to '%s', which is not in "
+                       "Python's module search path (sys.path) -- "
+                       "you'll have to change the search path yourself"),
+                       self.install_lib)
+
+    def create_path_file(self):
+        """Creates the .pth file"""
+        filename = os.path.join(self.install_libbase,
+                                self.path_file + ".pth")
+        if self.install_path_file:
+            self.execute(write_file,
+                         (filename, [self.extra_dirs]),
+                         "creating %s" % filename)
+        else:
+            self.warn("path file '%s' not created" % filename)
+
+
+    # -- Reporting methods ---------------------------------------------
+
+    def get_outputs(self):
+        """Assembles the outputs of all the sub-commands."""
+        outputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            # Add the contents of cmd.get_outputs(), ensuring
+            # that outputs doesn't contain duplicate entries
+            for filename in cmd.get_outputs():
+                if filename not in outputs:
+                    outputs.append(filename)
+
+        if self.path_file and self.install_path_file:
+            outputs.append(os.path.join(self.install_libbase,
+                                        self.path_file + ".pth"))
+
+        return outputs
+
+    def get_inputs(self):
+        """Returns the inputs of all the sub-commands"""
+        # XXX gee, this looks familiar ;-(
+        inputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            inputs.extend(cmd.get_inputs())
+
+        return inputs
+
+    # -- Predicates for sub-command list -------------------------------
+
+    def has_lib(self):
+        """Returns true if the current distribution has any Python
+        modules to install."""
+        return (self.distribution.has_pure_modules() or
+                self.distribution.has_ext_modules())
+
+    def has_headers(self):
+        """Returns true if the current distribution has any headers to
+        install."""
+        return self.distribution.has_headers()
+
+    def has_scripts(self):
+        """Returns true if the current distribution has any scripts to.
+        install."""
+        return self.distribution.has_scripts()
+
+    def has_data(self):
+        """Returns true if the current distribution has any data to.
+        install."""
+        return self.distribution.has_data_files()
+
+    # 'sub_commands': a list of commands this command might have to run to
+    # get its work done.  See cmd.py for more info.
+    sub_commands = [('install_lib',     has_lib),
+                    ('install_headers', has_headers),
+                    ('install_scripts', has_scripts),
+                    ('install_data',    has_data),
+                    ('install_egg_info', lambda self:True),
+                   ]
Index: Lib/distutils/command
===================================================================
--- Lib/distutils/command	(nonexistent)
+++ Lib/distutils/command	(revision 5)

Property changes on: Lib/distutils/command
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,73 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~
Index: Lib/distutils/sysconfig.py
===================================================================
--- Lib/distutils/sysconfig.py	(nonexistent)
+++ Lib/distutils/sysconfig.py	(revision 5)
@@ -0,0 +1,351 @@
+"""Provide access to Python's configuration information.  The specific
+configuration variables available depend heavily on the platform and
+configuration.  The values may be retrieved using
+get_config_var(name), and the list of variables is available via
+get_config_vars().keys().  Additional convenience functions are also
+available.
+
+Written by:   Fred L. Drake, Jr.
+Email:        <fdrake@acm.org>
+"""
+
+import _imp
+import os
+import re
+import sys
+import warnings
+
+from functools import partial
+
+from .errors import DistutilsPlatformError
+
+from sysconfig import (
+    _PREFIX as PREFIX,
+    _BASE_PREFIX as BASE_PREFIX,
+    _EXEC_PREFIX as EXEC_PREFIX,
+    _BASE_EXEC_PREFIX as BASE_EXEC_PREFIX,
+    _PROJECT_BASE as project_base,
+    _PYTHON_BUILD as python_build,
+    _init_posix as sysconfig_init_posix,
+    parse_config_h as sysconfig_parse_config_h,
+
+    _init_non_posix,
+    _is_python_source_dir,
+    _sys_home,
+
+    _variable_rx,
+    _findvar1_rx,
+    _findvar2_rx,
+
+    expand_makefile_vars,
+    is_python_build,
+    get_config_h_filename,
+    get_config_var,
+    get_config_vars,
+    get_makefile_filename,
+    get_python_version,
+)
+
+# This is better than
+# from sysconfig import _CONFIG_VARS as _config_vars
+# because it makes sure that the global dictionary is initialized
+# which might not be true in the time of import.
+_config_vars = get_config_vars()
+
+if os.name == "nt":
+    from sysconfig import _fix_pcbuild
+
+warnings.warn(
+    'The distutils.sysconfig module is deprecated, use sysconfig instead',
+    DeprecationWarning,
+    stacklevel=2
+)
+
+
+# Following functions are the same as in sysconfig but with different API
+def parse_config_h(fp, g=None):
+    return sysconfig_parse_config_h(fp, vars=g)
+
+
+_python_build = partial(is_python_build, check_home=True)
+_init_posix = partial(sysconfig_init_posix, _config_vars)
+_init_nt = partial(_init_non_posix, _config_vars)
+
+
+# Similar function is also implemented in sysconfig as _parse_makefile
+# but without the parsing capabilities of distutils.text_file.TextFile.
+def parse_makefile(fn, g=None):
+    """Parse a Makefile-style file.
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    from distutils.text_file import TextFile
+    fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape")
+
+    if g is None:
+        g = {}
+    done = {}
+    notdone = {}
+
+    while True:
+        line = fp.readline()
+        if line is None: # eof
+            break
+        m = re.match(_variable_rx, line)
+        if m:
+            n, v = m.group(1, 2)
+            v = v.strip()
+            # `$$' is a literal `$' in make
+            tmpv = v.replace('$$', '')
+
+            if "$" in tmpv:
+                notdone[n] = v
+            else:
+                try:
+                    v = int(v)
+                except ValueError:
+                    # insert literal `$'
+                    done[n] = v.replace('$$', '$')
+                else:
+                    done[n] = v
+
+    # Variables with a 'PY_' prefix in the makefile. These need to
+    # be made available without that prefix through sysconfig.
+    # Special care is needed to ensure that variable expansion works, even
+    # if the expansion uses the name without a prefix.
+    renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
+
+    # do variable interpolation here
+    while notdone:
+        for name in list(notdone):
+            value = notdone[name]
+            m = re.search(_findvar1_rx, value) or re.search(_findvar2_rx, value)
+            if m:
+                n = m.group(1)
+                found = True
+                if n in done:
+                    item = str(done[n])
+                elif n in notdone:
+                    # get it on a subsequent round
+                    found = False
+                elif n in os.environ:
+                    # do it like make: fall back to environment
+                    item = os.environ[n]
+
+                elif n in renamed_variables:
+                    if name.startswith('PY_') and name[3:] in renamed_variables:
+                        item = ""
+
+                    elif 'PY_' + n in notdone:
+                        found = False
+
+                    else:
+                        item = str(done['PY_' + n])
+                else:
+                    done[n] = item = ""
+                if found:
+                    after = value[m.end():]
+                    value = value[:m.start()] + item + after
+                    if "$" in after:
+                        notdone[name] = value
+                    else:
+                        try: value = int(value)
+                        except ValueError:
+                            done[name] = value.strip()
+                        else:
+                            done[name] = value
+                        del notdone[name]
+
+                        if name.startswith('PY_') \
+                            and name[3:] in renamed_variables:
+
+                            name = name[3:]
+                            if name not in done:
+                                done[name] = value
+            else:
+                # bogus variable reference; just drop it since we can't deal
+                del notdone[name]
+
+    fp.close()
+
+    # strip spurious spaces
+    for k, v in done.items():
+        if isinstance(v, str):
+            done[k] = v.strip()
+
+    # save the results in the global dictionary
+    g.update(done)
+    return g
+
+
+# Following functions are deprecated together with this module and they
+# have no direct replacement
+
+# Calculate the build qualifier flags if they are defined.  Adding the flags
+# to the include and lib directories only makes sense for an installation, not
+# an in-source build.
+build_flags = ''
+try:
+    if not python_build:
+        build_flags = sys.abiflags
+except AttributeError:
+    # It's not a configure-based build, so the sys module doesn't have
+    # this attribute, which is fine.
+    pass
+
+
+def customize_compiler(compiler):
+    """Do any platform-specific customization of a CCompiler instance.
+
+    Mainly needed on Unix, so we can plug in the information that
+    varies across Unices and is stored in Python's Makefile.
+    """
+    if compiler.compiler_type == "unix":
+        if sys.platform == "darwin":
+            # Perform first-time customization of compiler-related
+            # config vars on OS X now that we know we need a compiler.
+            # This is primarily to support Pythons from binary
+            # installers.  The kind and paths to build tools on
+            # the user system may vary significantly from the system
+            # that Python itself was built on.  Also the user OS
+            # version and build tools may not support the same set
+            # of CPU architectures for universal builds.
+            if not _config_vars.get('CUSTOMIZED_OSX_COMPILER'):
+                import _osx_support
+                _osx_support.customize_compiler(_config_vars)
+                _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
+
+        (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
+            get_config_vars('CC', 'CXX', 'CFLAGS',
+                            'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
+
+        if 'CC' in os.environ:
+            newcc = os.environ['CC']
+            if (sys.platform == 'darwin'
+                    and 'LDSHARED' not in os.environ
+                    and ldshared.startswith(cc)):
+                # On OS X, if CC is overridden, use that as the default
+                #       command for LDSHARED as well
+                ldshared = newcc + ldshared[len(cc):]
+            cc = newcc
+        if 'CXX' in os.environ:
+            cxx = os.environ['CXX']
+        if 'LDSHARED' in os.environ:
+            ldshared = os.environ['LDSHARED']
+        if 'CPP' in os.environ:
+            cpp = os.environ['CPP']
+        else:
+            cpp = cc + " -E"           # not always
+        if 'LDFLAGS' in os.environ:
+            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
+        if 'CFLAGS' in os.environ:
+            cflags = cflags + ' ' + os.environ['CFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CFLAGS']
+        if 'CPPFLAGS' in os.environ:
+            cpp = cpp + ' ' + os.environ['CPPFLAGS']
+            cflags = cflags + ' ' + os.environ['CPPFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+        if 'AR' in os.environ:
+            ar = os.environ['AR']
+        if 'ARFLAGS' in os.environ:
+            archiver = ar + ' ' + os.environ['ARFLAGS']
+        else:
+            archiver = ar + ' ' + ar_flags
+
+        cc_cmd = cc + ' ' + cflags
+        compiler.set_executables(
+            preprocessor=cpp,
+            compiler=cc_cmd,
+            compiler_so=cc_cmd + ' ' + ccshared,
+            compiler_cxx=cxx,
+            linker_so=ldshared,
+            linker_exe=cc,
+            archiver=archiver)
+
+        compiler.shared_lib_extension = shlib_suffix
+
+
+def get_python_inc(plat_specific=0, prefix=None):
+    """Return the directory containing installed Python header files.
+
+    If 'plat_specific' is false (the default), this is the path to the
+    non-platform-specific header files, i.e. Python.h and so on;
+    otherwise, this is the path to platform-specific header files
+    (namely pyconfig.h).
+
+    If 'prefix' is supplied, use it instead of sys.base_prefix or
+    sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
+    """
+    if prefix is None:
+        prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
+    if os.name == "posix":
+        if python_build:
+            # Assume the executable is in the build directory.  The
+            # pyconfig.h file should be in the same directory.  Since
+            # the build directory may not be the source directory, we
+            # must use "srcdir" from the makefile to find the "Include"
+            # directory.
+            if plat_specific:
+                return _sys_home or project_base
+            else:
+                incdir = os.path.join(get_config_var('srcdir'), 'Include')
+                return os.path.normpath(incdir)
+        python_dir = 'python' + get_python_version() + build_flags
+        return os.path.join(prefix, "include", python_dir)
+    elif os.name == "nt":
+        if python_build:
+            # Include both the include and PC dir to ensure we can find
+            # pyconfig.h
+            return (os.path.join(prefix, "include") + os.path.pathsep +
+                    os.path.join(prefix, "PC"))
+        return os.path.join(prefix, "include")
+    else:
+        raise DistutilsPlatformError(
+            "I don't know where Python installs its C header files "
+            "on platform '%s'" % os.name)
+
+
+def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+    """Return the directory containing the Python library (standard or
+    site additions).
+
+    If 'plat_specific' is true, return the directory containing
+    platform-specific modules, i.e. any module from a non-pure-Python
+    module distribution; otherwise, return the platform-shared library
+    directory.  If 'standard_lib' is true, return the directory
+    containing standard Python library modules; otherwise, return the
+    directory for site-specific modules.
+
+    If 'prefix' is supplied, use it instead of sys.base_prefix or
+    sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
+    """
+    if prefix is None:
+        if standard_lib:
+            prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
+        else:
+            prefix = plat_specific and EXEC_PREFIX or PREFIX
+
+    if os.name == "posix":
+        if plat_specific or standard_lib:
+            # Platform-specific modules (any module from a non-pure-Python
+            # module distribution) or standard Python library modules.
+            libdir = sys.platlibdir
+        else:
+            # Pure Python
+            libdir = "lib32"
+        libpython = os.path.join(prefix, libdir,
+                                 "python" + get_python_version())
+        if standard_lib:
+            return libpython
+        else:
+            return os.path.join(libpython, "site-packages")
+    elif os.name == "nt":
+        if standard_lib:
+            return os.path.join(prefix, "Lib")
+        else:
+            return os.path.join(prefix, "Lib", "site-packages")
+    else:
+        raise DistutilsPlatformError(
+            "I don't know where Python installs its library "
+            "on platform '%s'" % os.name)
Index: Lib/distutils
===================================================================
--- Lib/distutils	(nonexistent)
+++ Lib/distutils	(revision 5)

Property changes on: Lib/distutils
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,73 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~
Index: Lib/site.py
===================================================================
--- Lib/site.py	(nonexistent)
+++ Lib/site.py	(revision 5)
@@ -0,0 +1,674 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+This will append site-specific paths to the module search path.  On
+Unix (including Mac OSX), it starts with sys.prefix and
+sys.exec_prefix (if different) and appends
+lib/python<version>/site-packages.
+On other platforms (such as Windows), it tries each of the
+prefixes directly, as well as with lib/site-packages appended.  The
+resulting directories, if they exist, are appended to sys.path, and
+also inspected for path configuration files.
+
+If a file named "pyvenv.cfg" exists one directory above sys.executable,
+sys.prefix and sys.exec_prefix are set to that directory and
+it is also checked for site-packages (sys.base_prefix and
+sys.base_exec_prefix will always be the "real" prefixes of the Python
+installation). If "pyvenv.cfg" (a bootstrap configuration file) contains
+the key "include-system-site-packages" set to anything other than "false"
+(case-insensitive), the system-level prefixes will still also be
+searched for site-packages; otherwise they won't.
+
+All of the resulting site-specific directories, if they exist, are
+appended to sys.path, and also inspected for path configuration
+files.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path.  Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once.  Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python2.5/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth.  Assume foo.pth contains the
+following:
+
+  # foo package configuration
+  foo
+  bar
+  bletch
+
+and bar.pth contains:
+
+  # bar package configuration
+  bar
+
+Then the following directories are added to sys.path, in this order:
+
+  /usr/local/lib/python2.5/site-packages/bar
+  /usr/local/lib/python2.5/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+The readline module is also automatically configured to enable
+completion for systems that support it.  This can be overridden in
+sitecustomize, usercustomize or PYTHONSTARTUP.  Starting Python in
+isolated mode (-I) disables automatic readline configuration.
+
+After these operations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations.  If this import fails with an
+ImportError exception, it is silently ignored.
+"""
+
+import sys
+import os
+import builtins
+import _sitebuiltins
+import io
+
+# Prefixes for site-packages; add additional prefixes like /usr/local here
+PREFIXES = [sys.prefix, sys.exec_prefix]
+# Enable per user site-packages directory
+# set it to False to disable the feature or True to force the feature
+ENABLE_USER_SITE = None
+
+# for distutils.commands.install
+# These values are initialized by the getuserbase() and getusersitepackages()
+# functions, through the main() function when Python starts.
+USER_SITE = None
+USER_BASE = None
+
+
+def _trace(message):
+    if sys.flags.verbose:
+        print(message, file=sys.stderr)
+
+
+def makepath(*paths):
+    dir = os.path.join(*paths)
+    try:
+        dir = os.path.abspath(dir)
+    except OSError:
+        pass
+    return dir, os.path.normcase(dir)
+
+
+def abs_paths():
+    """Set all module __file__ and __cached__ attributes to an absolute path"""
+    for m in set(sys.modules.values()):
+        loader_module = None
+        try:
+            loader_module = m.__loader__.__module__
+        except AttributeError:
+            try:
+                loader_module = m.__spec__.loader.__module__
+            except AttributeError:
+                pass
+        if loader_module not in {'_frozen_importlib', '_frozen_importlib_external'}:
+            continue   # don't mess with a PEP 302-supplied __file__
+        try:
+            m.__file__ = os.path.abspath(m.__file__)
+        except (AttributeError, OSError, TypeError):
+            pass
+        try:
+            m.__cached__ = os.path.abspath(m.__cached__)
+        except (AttributeError, OSError, TypeError):
+            pass
+
+
+def removeduppaths():
+    """ Remove duplicate entries from sys.path along with making them
+    absolute"""
+    # This ensures that the initial path provided by the interpreter contains
+    # only absolute pathnames, even if we're running from the build directory.
+    L = []
+    known_paths = set()
+    for dir in sys.path:
+        # Filter out duplicate paths (on case-insensitive file systems also
+        # if they only differ in case); turn relative paths into absolute
+        # paths.
+        dir, dircase = makepath(dir)
+        if dircase not in known_paths:
+            L.append(dir)
+            known_paths.add(dircase)
+    sys.path[:] = L
+    return known_paths
+
+
+def _init_pathinfo():
+    """Return a set containing all existing file system items from sys.path."""
+    d = set()
+    for item in sys.path:
+        try:
+            if os.path.exists(item):
+                _, itemcase = makepath(item)
+                d.add(itemcase)
+        except TypeError:
+            continue
+    return d
+
+
+def addpackage(sitedir, name, known_paths):
+    """Process a .pth file within the site-packages directory:
+       For each line in the file, either combine it with sitedir to a path
+       and add that to known_paths, or execute it if it starts with 'import '.
+    """
+    if known_paths is None:
+        known_paths = _init_pathinfo()
+        reset = True
+    else:
+        reset = False
+    fullname = os.path.join(sitedir, name)
+    _trace(f"Processing .pth file: {fullname!r}")
+    try:
+        # locale encoding is not ideal especially on Windows. But we have used
+        # it for a long time. setuptools uses the locale encoding too.
+        f = io.TextIOWrapper(io.open_code(fullname), encoding="locale")
+    except OSError:
+        return
+    with f:
+        for n, line in enumerate(f):
+            if line.startswith("#"):
+                continue
+            if line.strip() == "":
+                continue
+            try:
+                if line.startswith(("import ", "import\t")):
+                    exec(line)
+                    continue
+                line = line.rstrip()
+                dir, dircase = makepath(sitedir, line)
+                if not dircase in known_paths and os.path.exists(dir):
+                    sys.path.append(dir)
+                    known_paths.add(dircase)
+            except Exception:
+                print("Error processing line {:d} of {}:\n".format(n+1, fullname),
+                      file=sys.stderr)
+                import traceback
+                for record in traceback.format_exception(*sys.exc_info()):
+                    for line in record.splitlines():
+                        print('  '+line, file=sys.stderr)
+                print("\nRemainder of file ignored", file=sys.stderr)
+                break
+    if reset:
+        known_paths = None
+    return known_paths
+
+
+def addsitedir(sitedir, known_paths=None):
+    """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+    'sitedir'"""
+    _trace(f"Adding directory: {sitedir!r}")
+    if known_paths is None:
+        known_paths = _init_pathinfo()
+        reset = True
+    else:
+        reset = False
+    sitedir, sitedircase = makepath(sitedir)
+    if not sitedircase in known_paths:
+        sys.path.append(sitedir)        # Add path component
+        known_paths.add(sitedircase)
+    try:
+        names = os.listdir(sitedir)
+    except OSError:
+        return
+    names = [name for name in names if name.endswith(".pth")]
+    for name in sorted(names):
+        addpackage(sitedir, name, known_paths)
+    if reset:
+        known_paths = None
+    return known_paths
+
+
+def check_enableusersite():
+    """Check if user site directory is safe for inclusion
+
+    The function tests for the command line flag (including environment var),
+    process uid/gid equal to effective uid/gid.
+
+    None: Disabled for security reasons
+    False: Disabled by user (command line option)
+    True: Safe and enabled
+    """
+    if sys.flags.no_user_site:
+        return False
+
+    if hasattr(os, "getuid") and hasattr(os, "geteuid"):
+        # check process uid == effective uid
+        if os.geteuid() != os.getuid():
+            return None
+    if hasattr(os, "getgid") and hasattr(os, "getegid"):
+        # check process gid == effective gid
+        if os.getegid() != os.getgid():
+            return None
+
+    return True
+
+
+# NOTE: sysconfig and it's dependencies are relatively large but site module
+# needs very limited part of them.
+# To speedup startup time, we have copy of them.
+#
+# See https://bugs.python.org/issue29585
+
+# Copy of sysconfig._getuserbase()
+def _getuserbase():
+    env_base = os.environ.get("PYTHONUSERBASE", None)
+    if env_base:
+        return env_base
+
+    # VxWorks has no home directories
+    if sys.platform == "vxworks":
+        return None
+
+    def joinuser(*args):
+        return os.path.expanduser(os.path.join(*args))
+
+    if os.name == "nt":
+        base = os.environ.get("APPDATA") or "~"
+        return joinuser(base, "Python")
+
+    if sys.platform == "darwin" and sys._framework:
+        return joinuser("~", "Library", sys._framework,
+                        "%d.%d" % sys.version_info[:2])
+
+    return joinuser("~", ".local")
+
+
+# Same to sysconfig.get_path('purelib', os.name+'_user')
+def _get_path(userbase):
+    version = sys.version_info
+
+    if os.name == 'nt':
+        ver_nodot = sys.winver.replace('.', '')
+        return f'{userbase}\\Python{ver_nodot}\\site-packages'
+
+    if sys.platform == 'darwin' and sys._framework:
+        return f'{userbase}/lib/python/site-packages'
+
+    return f'{userbase}/lib/python{version[0]}.{version[1]}/site-packages'
+
+
+def getuserbase():
+    """Returns the `user base` directory path.
+
+    The `user base` directory can be used to store data. If the global
+    variable ``USER_BASE`` is not initialized yet, this function will also set
+    it.
+    """
+    global USER_BASE
+    if USER_BASE is None:
+        USER_BASE = _getuserbase()
+    return USER_BASE
+
+
+def getusersitepackages():
+    """Returns the user-specific site-packages directory path.
+
+    If the global variable ``USER_SITE`` is not initialized yet, this
+    function will also set it.
+    """
+    global USER_SITE, ENABLE_USER_SITE
+    userbase = getuserbase() # this will also set USER_BASE
+
+    if USER_SITE is None:
+        if userbase is None:
+            ENABLE_USER_SITE = False # disable user site and return None
+        else:
+            USER_SITE = _get_path(userbase)
+
+    return USER_SITE
+
+def addusersitepackages(known_paths):
+    """Add a per user site-package to sys.path
+
+    Each user has its own python directory with site-packages in the
+    home directory.
+    """
+    # get the per user site-package path
+    # this call will also make sure USER_BASE and USER_SITE are set
+    _trace("Processing user site-packages")
+    user_site = getusersitepackages()
+
+    if ENABLE_USER_SITE and os.path.isdir(user_site):
+        addsitedir(user_site, known_paths)
+    return known_paths
+
+def getsitepackages(prefixes=None):
+    """Returns a list containing all global site-packages directories.
+
+    For each directory present in ``prefixes`` (or the global ``PREFIXES``),
+    this function will find its `site-packages` subdirectory depending on the
+    system environment, and will return a list of full paths.
+    """
+    sitepackages = []
+    seen = set()
+
+    if prefixes is None:
+        prefixes = PREFIXES
+
+    for prefix in prefixes:
+        if not prefix or prefix in seen:
+            continue
+        seen.add(prefix)
+
+        libdirs = [sys.platlibdir]
+        if sys.platlibdir != "lib32":
+            libdirs.append("lib32")
+
+        if os.sep == '/':
+            for libdir in libdirs:
+                path = os.path.join(prefix, libdir,
+                                    "python%d.%d" % sys.version_info[:2],
+                                    "site-packages")
+                sitepackages.append(path)
+        else:
+            sitepackages.append(prefix)
+
+            for libdir in libdirs:
+                path = os.path.join(prefix, libdir, "site-packages")
+                sitepackages.append(path)
+    return sitepackages
+
+def addsitepackages(known_paths, prefixes=None):
+    """Add site-packages to sys.path"""
+    _trace("Processing global site-packages")
+    for sitedir in getsitepackages(prefixes):
+        if os.path.isdir(sitedir):
+            addsitedir(sitedir, known_paths)
+
+    return known_paths
+
+def setquit():
+    """Define new builtins 'quit' and 'exit'.
+
+    These are objects which make the interpreter exit when called.
+    The repr of each object contains a hint at how it works.
+
+    """
+    if os.sep == '\\':
+        eof = 'Ctrl-Z plus Return'
+    else:
+        eof = 'Ctrl-D (i.e. EOF)'
+
+    builtins.quit = _sitebuiltins.Quitter('quit', eof)
+    builtins.exit = _sitebuiltins.Quitter('exit', eof)
+
+
+def setcopyright():
+    """Set 'copyright' and 'credits' in builtins"""
+    builtins.copyright = _sitebuiltins._Printer("copyright", sys.copyright)
+    if sys.platform[:4] == 'java':
+        builtins.credits = _sitebuiltins._Printer(
+            "credits",
+            "Jython is maintained by the Jython developers (www.jython.org).")
+    else:
+        builtins.credits = _sitebuiltins._Printer("credits", """\
+    Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+    for supporting Python development.  See www.python.org for more information.""")
+    files, dirs = [], []
+    # Not all modules are required to have a __file__ attribute.  See
+    # PEP 420 for more details.
+    if hasattr(os, '__file__'):
+        here = os.path.dirname(os.__file__)
+        files.extend(["LICENSE.txt", "LICENSE"])
+        dirs.extend([os.path.join(here, os.pardir), here, os.curdir])
+    builtins.license = _sitebuiltins._Printer(
+        "license",
+        "See https://www.python.org/psf/license/",
+        files, dirs)
+
+
+def sethelper():
+    builtins.help = _sitebuiltins._Helper()
+
+def enablerlcompleter():
+    """Enable default readline configuration on interactive prompts, by
+    registering a sys.__interactivehook__.
+
+    If the readline module can be imported, the hook will set the Tab key
+    as completion key and register ~/.python_history as history file.
+    This can be overridden in the sitecustomize or usercustomize module,
+    or in a PYTHONSTARTUP file.
+    """
+    def register_readline():
+        import atexit
+        try:
+            import readline
+            import rlcompleter
+        except ImportError:
+            return
+
+        # Reading the initialization (config) file may not be enough to set a
+        # completion key, so we set one first and then read the file.
+        readline_doc = getattr(readline, '__doc__', '')
+        if readline_doc is not None and 'libedit' in readline_doc:
+            readline.parse_and_bind('bind ^I rl_complete')
+        else:
+            readline.parse_and_bind('tab: complete')
+
+        try:
+            readline.read_init_file()
+        except OSError:
+            # An OSError here could have many causes, but the most likely one
+            # is that there's no .inputrc file (or .editrc file in the case of
+            # Mac OS X + libedit) in the expected location.  In that case, we
+            # want to ignore the exception.
+            pass
+
+        if readline.get_current_history_length() == 0:
+            # If no history was loaded, default to .python_history.
+            # The guard is necessary to avoid doubling history size at
+            # each interpreter exit when readline was already configured
+            # through a PYTHONSTARTUP hook, see:
+            # http://bugs.python.org/issue5845#msg198636
+            history = os.path.join(os.path.expanduser('~'),
+                                   '.python_history')
+            try:
+                readline.read_history_file(history)
+            except OSError:
+                pass
+
+            def write_history():
+                try:
+                    readline.write_history_file(history)
+                except OSError:
+                    # bpo-19891, bpo-41193: Home directory does not exist
+                    # or is not writable, or the filesystem is read-only.
+                    pass
+
+            atexit.register(write_history)
+
+    sys.__interactivehook__ = register_readline
+
+def venv(known_paths):
+    global PREFIXES, ENABLE_USER_SITE
+
+    env = os.environ
+    if sys.platform == 'darwin' and '__PYVENV_LAUNCHER__' in env:
+        executable = sys._base_executable = os.environ['__PYVENV_LAUNCHER__']
+    else:
+        executable = sys.executable
+    exe_dir, _ = os.path.split(os.path.abspath(executable))
+    site_prefix = os.path.dirname(exe_dir)
+    sys._home = None
+    conf_basename = 'pyvenv.cfg'
+    candidate_confs = [
+        conffile for conffile in (
+            os.path.join(exe_dir, conf_basename),
+            os.path.join(site_prefix, conf_basename)
+            )
+        if os.path.isfile(conffile)
+        ]
+
+    if candidate_confs:
+        virtual_conf = candidate_confs[0]
+        system_site = "true"
+        # Issue 25185: Use UTF-8, as that's what the venv module uses when
+        # writing the file.
+        with open(virtual_conf, encoding='utf-8') as f:
+            for line in f:
+                if '=' in line:
+                    key, _, value = line.partition('=')
+                    key = key.strip().lower()
+                    value = value.strip()
+                    if key == 'include-system-site-packages':
+                        system_site = value.lower()
+                    elif key == 'home':
+                        sys._home = value
+
+        sys.prefix = sys.exec_prefix = site_prefix
+
+        # Doing this here ensures venv takes precedence over user-site
+        addsitepackages(known_paths, [sys.prefix])
+
+        # addsitepackages will process site_prefix again if its in PREFIXES,
+        # but that's ok; known_paths will prevent anything being added twice
+        if system_site == "true":
+            PREFIXES.insert(0, sys.prefix)
+        else:
+            PREFIXES = [sys.prefix]
+            ENABLE_USER_SITE = False
+
+    return known_paths
+
+
+def execsitecustomize():
+    """Run custom site specific code, if available."""
+    try:
+        try:
+            import sitecustomize
+        except ImportError as exc:
+            if exc.name == 'sitecustomize':
+                pass
+            else:
+                raise
+    except Exception as err:
+        if sys.flags.verbose:
+            sys.excepthook(*sys.exc_info())
+        else:
+            sys.stderr.write(
+                "Error in sitecustomize; set PYTHONVERBOSE for traceback:\n"
+                "%s: %s\n" %
+                (err.__class__.__name__, err))
+
+
+def execusercustomize():
+    """Run custom user specific code, if available."""
+    try:
+        try:
+            import usercustomize
+        except ImportError as exc:
+            if exc.name == 'usercustomize':
+                pass
+            else:
+                raise
+    except Exception as err:
+        if sys.flags.verbose:
+            sys.excepthook(*sys.exc_info())
+        else:
+            sys.stderr.write(
+                "Error in usercustomize; set PYTHONVERBOSE for traceback:\n"
+                "%s: %s\n" %
+                (err.__class__.__name__, err))
+
+
+def main():
+    """Add standard site-specific directories to the module search path.
+
+    This function is called automatically when this module is imported,
+    unless the python interpreter was started with the -S flag.
+    """
+    global ENABLE_USER_SITE
+
+    orig_path = sys.path[:]
+    known_paths = removeduppaths()
+    if orig_path != sys.path:
+        # removeduppaths() might make sys.path absolute.
+        # fix __file__ and __cached__ of already imported modules too.
+        abs_paths()
+
+    known_paths = venv(known_paths)
+    if ENABLE_USER_SITE is None:
+        ENABLE_USER_SITE = check_enableusersite()
+    known_paths = addusersitepackages(known_paths)
+    known_paths = addsitepackages(known_paths)
+    setquit()
+    setcopyright()
+    sethelper()
+    if not sys.flags.isolated:
+        enablerlcompleter()
+    execsitecustomize()
+    if ENABLE_USER_SITE:
+        execusercustomize()
+
+# Prevent extending of sys.path when python was started with -S and
+# site is imported later.
+if not sys.flags.no_site:
+    main()
+
+def _script():
+    help = """\
+    %s [--user-base] [--user-site]
+
+    Without arguments print some useful information
+    With arguments print the value of USER_BASE and/or USER_SITE separated
+    by '%s'.
+
+    Exit codes with --user-base or --user-site:
+      0 - user site directory is enabled
+      1 - user site directory is disabled by user
+      2 - user site directory is disabled by super user
+          or for security reasons
+     >2 - unknown error
+    """
+    args = sys.argv[1:]
+    if not args:
+        user_base = getuserbase()
+        user_site = getusersitepackages()
+        print("sys.path = [")
+        for dir in sys.path:
+            print("    %r," % (dir,))
+        print("]")
+        def exists(path):
+            if path is not None and os.path.isdir(path):
+                return "exists"
+            else:
+                return "doesn't exist"
+        print(f"USER_BASE: {user_base!r} ({exists(user_base)})")
+        print(f"USER_SITE: {user_site!r} ({exists(user_site)})")
+        print(f"ENABLE_USER_SITE: {ENABLE_USER_SITE!r}")
+        sys.exit(0)
+
+    buffer = []
+    if '--user-base' in args:
+        buffer.append(USER_BASE)
+    if '--user-site' in args:
+        buffer.append(USER_SITE)
+
+    if buffer:
+        print(os.pathsep.join(buffer))
+        if ENABLE_USER_SITE:
+            sys.exit(0)
+        elif ENABLE_USER_SITE is False:
+            sys.exit(1)
+        elif ENABLE_USER_SITE is None:
+            sys.exit(2)
+        else:
+            sys.exit(3)
+    else:
+        import textwrap
+        print(textwrap.dedent(help % (sys.argv[0], os.pathsep)))
+        sys.exit(10)
+
+if __name__ == '__main__':
+    _script()
Index: Lib/sysconfig.py
===================================================================
--- Lib/sysconfig.py	(nonexistent)
+++ Lib/sysconfig.py	(revision 5)
@@ -0,0 +1,803 @@
+"""Access to Python's configuration information."""
+
+import os
+import sys
+from os.path import pardir, realpath
+
+__all__ = [
+    'get_config_h_filename',
+    'get_config_var',
+    'get_config_vars',
+    'get_makefile_filename',
+    'get_path',
+    'get_path_names',
+    'get_paths',
+    'get_platform',
+    'get_python_version',
+    'get_scheme_names',
+    'parse_config_h',
+]
+
+# Keys for get_config_var() that are never converted to Python integers.
+_ALWAYS_STR = {
+    'MACOSX_DEPLOYMENT_TARGET',
+}
+
+_INSTALL_SCHEMES = {
+    'posix_prefix': {
+        'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}',
+        'platstdlib': '{platbase}/{platlibdir}/python{py_version_short}',
+        'purelib': '{base}/lib32/python{py_version_short}/site-packages',
+        'platlib': '{platbase}/{platlibdir}/python{py_version_short}/site-packages',
+        'include':
+            '{installed_base}/include/python{py_version_short}{abiflags}',
+        'platinclude':
+            '{installed_platbase}/include/python{py_version_short}{abiflags}',
+        'scripts': '{base}/bin/32',
+        'data': '{base}',
+        },
+    'posix_home': {
+        'stdlib': '{installed_base}/lib32/python',
+        'platstdlib': '{base}/lib32/python',
+        'purelib': '{base}/lib32/python',
+        'platlib': '{base}/lib32/python',
+        'include': '{installed_base}/include/python',
+        'platinclude': '{installed_base}/include/python',
+        'scripts': '{base}/bin/32',
+        'data': '{base}',
+        },
+    'nt': {
+        'stdlib': '{installed_base}/Lib',
+        'platstdlib': '{base}/Lib',
+        'purelib': '{base}/Lib/site-packages',
+        'platlib': '{base}/Lib/site-packages',
+        'include': '{installed_base}/Include',
+        'platinclude': '{installed_base}/Include',
+        'scripts': '{base}/Scripts',
+        'data': '{base}',
+        },
+    }
+
+
+# NOTE: site.py has copy of this function.
+# Sync it when modify this function.
+def _getuserbase():
+    env_base = os.environ.get("PYTHONUSERBASE", None)
+    if env_base:
+        return env_base
+
+    # VxWorks has no home directories
+    if sys.platform == "vxworks":
+        return None
+
+    def joinuser(*args):
+        return os.path.expanduser(os.path.join(*args))
+
+    if os.name == "nt":
+        base = os.environ.get("APPDATA") or "~"
+        return joinuser(base, "Python")
+
+    if sys.platform == "darwin" and sys._framework:
+        return joinuser("~", "Library", sys._framework,
+                        f"{sys.version_info[0]}.{sys.version_info[1]}")
+
+    return joinuser("~", ".local")
+
+_HAS_USER_BASE = (_getuserbase() is not None)
+
+if _HAS_USER_BASE:
+    _INSTALL_SCHEMES |= {
+        # NOTE: When modifying "purelib" scheme, update site._get_path() too.
+        'nt_user': {
+            'stdlib': '{userbase}/Python{py_version_nodot_plat}',
+            'platstdlib': '{userbase}/Python{py_version_nodot_plat}',
+            'purelib': '{userbase}/Python{py_version_nodot_plat}/site-packages',
+            'platlib': '{userbase}/Python{py_version_nodot_plat}/site-packages',
+            'include': '{userbase}/Python{py_version_nodot_plat}/Include',
+            'scripts': '{userbase}/Python{py_version_nodot_plat}/Scripts',
+            'data': '{userbase}',
+            },
+        'posix_user': {
+            'stdlib': '{userbase}/{platlibdir}/python{py_version_short}',
+            'platstdlib': '{userbase}/{platlibdir}/python{py_version_short}',
+            'purelib': '{userbase}/lib32/python{py_version_short}/site-packages',
+            'platlib': '{userbase}/lib32/python{py_version_short}/site-packages',
+            'include': '{userbase}/include/python{py_version_short}',
+            'scripts': '{userbase}/bin/32',
+            'data': '{userbase}',
+            },
+        'osx_framework_user': {
+            'stdlib': '{userbase}/lib/python',
+            'platstdlib': '{userbase}/lib/python',
+            'purelib': '{userbase}/lib/python/site-packages',
+            'platlib': '{userbase}/lib/python/site-packages',
+            'include': '{userbase}/include/python{py_version_short}',
+            'scripts': '{userbase}/bin',
+            'data': '{userbase}',
+            },
+    }
+
+_SCHEME_KEYS = ('stdlib', 'platstdlib', 'purelib', 'platlib', 'include',
+                'scripts', 'data')
+
+_PY_VERSION = sys.version.split()[0]
+_PY_VERSION_SHORT = f'{sys.version_info[0]}.{sys.version_info[1]}'
+_PY_VERSION_SHORT_NO_DOT = f'{sys.version_info[0]}{sys.version_info[1]}'
+_PREFIX = os.path.normpath(sys.prefix)
+_BASE_PREFIX = os.path.normpath(sys.base_prefix)
+_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
+_BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
+_CONFIG_VARS = None
+_USER_BASE = None
+
+# Regexes needed for parsing Makefile (and similar syntaxes,
+# like old-style Setup files).
+_variable_rx = r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)"
+_findvar1_rx = r"\$\(([A-Za-z][A-Za-z0-9_]*)\)"
+_findvar2_rx = r"\${([A-Za-z][A-Za-z0-9_]*)}"
+
+
+def _safe_realpath(path):
+    try:
+        return realpath(path)
+    except OSError:
+        return path
+
+if sys.executable:
+    _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
+else:
+    # sys.executable can be empty if argv[0] has been changed and Python is
+    # unable to retrieve the real program name
+    _PROJECT_BASE = _safe_realpath(os.getcwd())
+
+if (os.name == 'nt' and
+    _PROJECT_BASE.lower().endswith(('\\pcbuild\\win32', '\\pcbuild\\amd64'))):
+    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
+
+# set for cross builds
+if "_PYTHON_PROJECT_BASE" in os.environ:
+    _PROJECT_BASE = _safe_realpath(os.environ["_PYTHON_PROJECT_BASE"])
+
+def _is_python_source_dir(d):
+    for fn in ("Setup", "Setup.local"):
+        if os.path.isfile(os.path.join(d, "Modules", fn)):
+            return True
+    return False
+
+_sys_home = getattr(sys, '_home', None)
+
+if os.name == 'nt':
+    def _fix_pcbuild(d):
+        if d and os.path.normcase(d).startswith(
+                os.path.normcase(os.path.join(_PREFIX, "PCbuild"))):
+            return _PREFIX
+        return d
+    _PROJECT_BASE = _fix_pcbuild(_PROJECT_BASE)
+    _sys_home = _fix_pcbuild(_sys_home)
+
+def is_python_build(check_home=False):
+    if check_home and _sys_home:
+        return _is_python_source_dir(_sys_home)
+    return _is_python_source_dir(_PROJECT_BASE)
+
+_PYTHON_BUILD = is_python_build(True)
+
+if _PYTHON_BUILD:
+    for scheme in ('posix_prefix', 'posix_home'):
+        # On POSIX-y platforms, Python will:
+        # - Build from .h files in 'headers' (which is only added to the
+        #   scheme when building CPython)
+        # - Install .h files to 'include'
+        scheme = _INSTALL_SCHEMES[scheme]
+        scheme['headers'] = scheme['include']
+        scheme['include'] = '{srcdir}/Include'
+        scheme['platinclude'] = '{projectbase}/.'
+
+
+def _subst_vars(s, local_vars):
+    try:
+        return s.format(**local_vars)
+    except KeyError as var:
+        try:
+            return s.format(**os.environ)
+        except KeyError:
+            raise AttributeError(f'{var}') from None
+
+def _extend_dict(target_dict, other_dict):
+    target_keys = target_dict.keys()
+    for key, value in other_dict.items():
+        if key in target_keys:
+            continue
+        target_dict[key] = value
+
+
+def _expand_vars(scheme, vars):
+    res = {}
+    if vars is None:
+        vars = {}
+    _extend_dict(vars, get_config_vars())
+
+    for key, value in _INSTALL_SCHEMES[scheme].items():
+        if os.name in ('posix', 'nt'):
+            value = os.path.expanduser(value)
+        res[key] = os.path.normpath(_subst_vars(value, vars))
+    return res
+
+
+def _get_preferred_schemes():
+    if os.name == 'nt':
+        return {
+            'prefix': 'nt',
+            'home': 'posix_home',
+            'user': 'nt_user',
+        }
+    if sys.platform == 'darwin' and sys._framework:
+        return {
+            'prefix': 'posix_prefix',
+            'home': 'posix_home',
+            'user': 'osx_framework_user',
+        }
+    return {
+        'prefix': 'posix_prefix',
+        'home': 'posix_home',
+        'user': 'posix_user',
+    }
+
+
+def get_preferred_scheme(key):
+    scheme = _get_preferred_schemes()[key]
+    if scheme not in _INSTALL_SCHEMES:
+        raise ValueError(
+            f"{key!r} returned {scheme!r}, which is not a valid scheme "
+            f"on this platform"
+        )
+    return scheme
+
+
+def get_default_scheme():
+    return get_preferred_scheme('prefix')
+
+
+def _parse_makefile(filename, vars=None, keep_unresolved=True):
+    """Parse a Makefile-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    import re
+
+    if vars is None:
+        vars = {}
+    done = {}
+    notdone = {}
+
+    with open(filename, encoding=sys.getfilesystemencoding(),
+              errors="surrogateescape") as f:
+        lines = f.readlines()
+
+    for line in lines:
+        if line.startswith('#') or line.strip() == '':
+            continue
+        m = re.match(_variable_rx, line)
+        if m:
+            n, v = m.group(1, 2)
+            v = v.strip()
+            # `$$' is a literal `$' in make
+            tmpv = v.replace('$$', '')
+
+            if "$" in tmpv:
+                notdone[n] = v
+            else:
+                try:
+                    if n in _ALWAYS_STR:
+                        raise ValueError
+
+                    v = int(v)
+                except ValueError:
+                    # insert literal `$'
+                    done[n] = v.replace('$$', '$')
+                else:
+                    done[n] = v
+
+    # do variable interpolation here
+    variables = list(notdone.keys())
+
+    # Variables with a 'PY_' prefix in the makefile. These need to
+    # be made available without that prefix through sysconfig.
+    # Special care is needed to ensure that variable expansion works, even
+    # if the expansion uses the name without a prefix.
+    renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
+
+    while len(variables) > 0:
+        for name in tuple(variables):
+            value = notdone[name]
+            m1 = re.search(_findvar1_rx, value)
+            m2 = re.search(_findvar2_rx, value)
+            if m1 and m2:
+                m = m1 if m1.start() < m2.start() else m2
+            else:
+                m = m1 if m1 else m2
+            if m is not None:
+                n = m.group(1)
+                found = True
+                if n in done:
+                    item = str(done[n])
+                elif n in notdone:
+                    # get it on a subsequent round
+                    found = False
+                elif n in os.environ:
+                    # do it like make: fall back to environment
+                    item = os.environ[n]
+
+                elif n in renamed_variables:
+                    if (name.startswith('PY_') and
+                        name[3:] in renamed_variables):
+                        item = ""
+
+                    elif 'PY_' + n in notdone:
+                        found = False
+
+                    else:
+                        item = str(done['PY_' + n])
+
+                else:
+                    done[n] = item = ""
+
+                if found:
+                    after = value[m.end():]
+                    value = value[:m.start()] + item + after
+                    if "$" in after:
+                        notdone[name] = value
+                    else:
+                        try:
+                            if name in _ALWAYS_STR:
+                                raise ValueError
+                            value = int(value)
+                        except ValueError:
+                            done[name] = value.strip()
+                        else:
+                            done[name] = value
+                        variables.remove(name)
+
+                        if name.startswith('PY_') \
+                        and name[3:] in renamed_variables:
+
+                            name = name[3:]
+                            if name not in done:
+                                done[name] = value
+
+            else:
+                # Adds unresolved variables to the done dict.
+                # This is disabled when called from distutils.sysconfig
+                if keep_unresolved:
+                    done[name] = value
+                # bogus variable reference (e.g. "prefix=$/opt/python");
+                # just drop it since we can't deal
+                variables.remove(name)
+
+    # strip spurious spaces
+    for k, v in done.items():
+        if isinstance(v, str):
+            done[k] = v.strip()
+
+    # save the results in the global dictionary
+    vars.update(done)
+    return vars
+
+
+def get_makefile_filename():
+    """Return the path of the Makefile."""
+    if _PYTHON_BUILD:
+        return os.path.join(_sys_home or _PROJECT_BASE, "Makefile")
+    if hasattr(sys, 'abiflags'):
+        config_dir_name = f'config-{_PY_VERSION_SHORT}{sys.abiflags}'
+    else:
+        config_dir_name = 'config'
+    if hasattr(sys.implementation, '_multiarch'):
+        config_dir_name += f'-{sys.implementation._multiarch}'
+    return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
+
+
+def _get_sysconfigdata_name():
+    multiarch = getattr(sys.implementation, '_multiarch', '')
+    return os.environ.get(
+        '_PYTHON_SYSCONFIGDATA_NAME',
+        f'_sysconfigdata_{sys.abiflags}_{sys.platform}_{multiarch}',
+    )
+
+
+def _generate_posix_vars():
+    """Generate the Python module containing build-time variables."""
+    import pprint
+    vars = {}
+    # load the installed Makefile:
+    makefile = get_makefile_filename()
+    try:
+        _parse_makefile(makefile, vars)
+    except OSError as e:
+        msg = f"invalid Python installation: unable to open {makefile}"
+        if hasattr(e, "strerror"):
+            msg = f"{msg} ({e.strerror})"
+        raise OSError(msg)
+    # load the installed pyconfig.h:
+    config_h = get_config_h_filename()
+    try:
+        with open(config_h, encoding="utf-8") as f:
+            parse_config_h(f, vars)
+    except OSError as e:
+        msg = f"invalid Python installation: unable to open {config_h}"
+        if hasattr(e, "strerror"):
+            msg = f"{msg} ({e.strerror})"
+        raise OSError(msg)
+    # On AIX, there are wrong paths to the linker scripts in the Makefile
+    # -- these paths are relative to the Python source, but when installed
+    # the scripts are in another directory.
+    if _PYTHON_BUILD:
+        vars['BLDSHARED'] = vars['LDSHARED']
+
+    # There's a chicken-and-egg situation on OS X with regards to the
+    # _sysconfigdata module after the changes introduced by #15298:
+    # get_config_vars() is called by get_platform() as part of the
+    # `make pybuilddir.txt` target -- which is a precursor to the
+    # _sysconfigdata.py module being constructed.  Unfortunately,
+    # get_config_vars() eventually calls _init_posix(), which attempts
+    # to import _sysconfigdata, which we won't have built yet.  In order
+    # for _init_posix() to work, if we're on Darwin, just mock up the
+    # _sysconfigdata module manually and populate it with the build vars.
+    # This is more than sufficient for ensuring the subsequent call to
+    # get_platform() succeeds.
+    name = _get_sysconfigdata_name()
+    if 'darwin' in sys.platform:
+        import types
+        module = types.ModuleType(name)
+        module.build_time_vars = vars
+        sys.modules[name] = module
+
+    pybuilddir = f'build/lib.{get_platform()}-{_PY_VERSION_SHORT}'
+    if hasattr(sys, "gettotalrefcount"):
+        pybuilddir += '-pydebug'
+    os.makedirs(pybuilddir, exist_ok=True)
+    destfile = os.path.join(pybuilddir, name + '.py')
+
+    with open(destfile, 'w', encoding='utf8') as f:
+        f.write('# system configuration generated and used by'
+                ' the sysconfig module\n')
+        f.write('build_time_vars = ')
+        pprint.pprint(vars, stream=f)
+
+    # Create file used for sys.path fixup -- see Modules/getpath.c
+    with open('pybuilddir.txt', 'w', encoding='utf8') as f:
+        f.write(pybuilddir)
+
+def _init_posix(vars):
+    """Initialize the module as appropriate for POSIX systems."""
+    # _sysconfigdata is generated at build time, see _generate_posix_vars()
+    name = _get_sysconfigdata_name()
+    _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0)
+    build_time_vars = _temp.build_time_vars
+    vars.update(build_time_vars)
+
+def _init_non_posix(vars):
+    """Initialize the module as appropriate for NT"""
+    # set basic install directories
+    import _imp
+    vars['LIBDEST'] = get_path('stdlib')
+    vars['BINLIBDEST'] = get_path('platstdlib')
+    vars['INCLUDEPY'] = get_path('include')
+    vars['EXT_SUFFIX'] = _imp.extension_suffixes()[0]
+    vars['EXE'] = '.exe'
+    vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
+    vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
+    vars['TZPATH'] = ''
+
+#
+# public APIs
+#
+
+
+def parse_config_h(fp, vars=None):
+    """Parse a config.h-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    if vars is None:
+        vars = {}
+    import re
+    define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
+    undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
+
+    while True:
+        line = fp.readline()
+        if not line:
+            break
+        m = define_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            try:
+                if n in _ALWAYS_STR:
+                    raise ValueError
+                v = int(v)
+            except ValueError:
+                pass
+            vars[n] = v
+        else:
+            m = undef_rx.match(line)
+            if m:
+                vars[m.group(1)] = 0
+    return vars
+
+
+def get_config_h_filename():
+    """Return the path of pyconfig.h."""
+    if _PYTHON_BUILD:
+        if os.name == "nt":
+            inc_dir = os.path.join(_sys_home or _PROJECT_BASE, "PC")
+        else:
+            inc_dir = _sys_home or _PROJECT_BASE
+    else:
+        inc_dir = get_path('platinclude')
+    return os.path.join(inc_dir, 'pyconfig.h')
+
+
+def get_scheme_names():
+    """Return a tuple containing the schemes names."""
+    return tuple(sorted(_INSTALL_SCHEMES))
+
+
+def get_path_names():
+    """Return a tuple containing the paths names."""
+    return _SCHEME_KEYS
+
+
+def get_paths(scheme=get_default_scheme(), vars=None, expand=True):
+    """Return a mapping containing an install scheme.
+
+    ``scheme`` is the install scheme name. If not provided, it will
+    return the default scheme for the current platform.
+    """
+    if expand:
+        return _expand_vars(scheme, vars)
+    else:
+        return _INSTALL_SCHEMES[scheme]
+
+
+def get_path(name, scheme=get_default_scheme(), vars=None, expand=True):
+    """Return a path corresponding to the scheme.
+
+    ``scheme`` is the install scheme name.
+    """
+    return get_paths(scheme, vars, expand)[name]
+
+
+def get_config_vars(*args):
+    """With no arguments, return a dictionary of all configuration
+    variables relevant for the current platform.
+
+    On Unix, this means every variable defined in Python's installed Makefile;
+    On Windows it's a much smaller set.
+
+    With arguments, return a list of values that result from looking up
+    each argument in the configuration variable dictionary.
+    """
+    global _CONFIG_VARS
+    if _CONFIG_VARS is None:
+        _CONFIG_VARS = {}
+        # Normalized versions of prefix and exec_prefix are handy to have;
+        # in fact, these are the standard versions used most places in the
+        # Distutils.
+        _CONFIG_VARS['prefix'] = _PREFIX
+        _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
+        _CONFIG_VARS['py_version'] = _PY_VERSION
+        _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
+        _CONFIG_VARS['py_version_nodot'] = _PY_VERSION_SHORT_NO_DOT
+        _CONFIG_VARS['installed_base'] = _BASE_PREFIX
+        _CONFIG_VARS['base'] = _PREFIX
+        _CONFIG_VARS['installed_platbase'] = _BASE_EXEC_PREFIX
+        _CONFIG_VARS['platbase'] = _EXEC_PREFIX
+        _CONFIG_VARS['projectbase'] = _PROJECT_BASE
+        _CONFIG_VARS['platlibdir'] = sys.platlibdir
+        try:
+            _CONFIG_VARS['abiflags'] = sys.abiflags
+        except AttributeError:
+            # sys.abiflags may not be defined on all platforms.
+            _CONFIG_VARS['abiflags'] = ''
+        try:
+            _CONFIG_VARS['py_version_nodot_plat'] = sys.winver.replace('.', '')
+        except AttributeError:
+            _CONFIG_VARS['py_version_nodot_plat'] = ''
+
+        if os.name == 'nt':
+            _init_non_posix(_CONFIG_VARS)
+        if os.name == 'posix':
+            _init_posix(_CONFIG_VARS)
+        # For backward compatibility, see issue19555
+        SO = _CONFIG_VARS.get('EXT_SUFFIX')
+        if SO is not None:
+            _CONFIG_VARS['SO'] = SO
+        if _HAS_USER_BASE:
+            # Setting 'userbase' is done below the call to the
+            # init function to enable using 'get_config_var' in
+            # the init-function.
+            _CONFIG_VARS['userbase'] = _getuserbase()
+
+        # Always convert srcdir to an absolute path
+        srcdir = _CONFIG_VARS.get('srcdir', _PROJECT_BASE)
+        if os.name == 'posix':
+            if _PYTHON_BUILD:
+                # If srcdir is a relative path (typically '.' or '..')
+                # then it should be interpreted relative to the directory
+                # containing Makefile.
+                base = os.path.dirname(get_makefile_filename())
+                srcdir = os.path.join(base, srcdir)
+            else:
+                # srcdir is not meaningful since the installation is
+                # spread about the filesystem.  We choose the
+                # directory containing the Makefile since we know it
+                # exists.
+                srcdir = os.path.dirname(get_makefile_filename())
+        _CONFIG_VARS['srcdir'] = _safe_realpath(srcdir)
+
+        # OS X platforms require special customization to handle
+        # multi-architecture, multi-os-version installers
+        if sys.platform == 'darwin':
+            import _osx_support
+            _osx_support.customize_config_vars(_CONFIG_VARS)
+
+    if args:
+        vals = []
+        for name in args:
+            vals.append(_CONFIG_VARS.get(name))
+        return vals
+    else:
+        return _CONFIG_VARS
+
+
+def get_config_var(name):
+    """Return the value of a single variable using the dictionary returned by
+    'get_config_vars()'.
+
+    Equivalent to get_config_vars().get(name)
+    """
+    if name == 'SO':
+        import warnings
+        warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2)
+    return get_config_vars().get(name)
+
+
+def get_platform():
+    """Return a string that identifies the current platform.
+
+    This is used mainly to distinguish platform-specific build directories and
+    platform-specific built distributions.  Typically includes the OS name and
+    version and the architecture (as supplied by 'os.uname()'), although the
+    exact information included depends on the OS; on Linux, the kernel version
+    isn't particularly important.
+
+    Examples of returned values:
+       linux-i586
+       linux-alpha (?)
+       solaris-2.6-sun4u
+
+    Windows will return one of:
+       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+       win32 (all others - specifically, sys.platform is returned)
+
+    For other non-POSIX platforms, currently just returns 'sys.platform'.
+
+    """
+    if os.name == 'nt':
+        if 'amd64' in sys.version.lower():
+            return 'win-amd64'
+        if '(arm)' in sys.version.lower():
+            return 'win-arm32'
+        if '(arm64)' in sys.version.lower():
+            return 'win-arm64'
+        return sys.platform
+
+    if os.name != "posix" or not hasattr(os, 'uname'):
+        # XXX what about the architecture? NT is Intel or Alpha
+        return sys.platform
+
+    # Set for cross builds explicitly
+    if "_PYTHON_HOST_PLATFORM" in os.environ:
+        return os.environ["_PYTHON_HOST_PLATFORM"]
+
+    # Try to distinguish various flavours of Unix
+    osname, host, release, version, machine = os.uname()
+
+    # Convert the OS name to lowercase, remove '/' characters, and translate
+    # spaces (for "Power Macintosh")
+    osname = osname.lower().replace('/', '')
+    machine = machine.replace(' ', '_')
+    machine = machine.replace('/', '-')
+
+    if osname[:5] == "linux":
+        # At least on Linux/Intel, 'machine' is the processor --
+        # i386, etc.
+        # XXX what about Alpha, SPARC, etc?
+        return  f"{osname}-{machine}"
+    elif osname[:5] == "sunos":
+        if release[0] >= "5":           # SunOS 5 == Solaris 2
+            osname = "solaris"
+            release = f"{int(release[0]) - 3}.{release[2:]}"
+            # We can't use "platform.architecture()[0]" because a
+            # bootstrap problem. We use a dict to get an error
+            # if some suspicious happens.
+            bitness = {2147483647:"32bit", 9223372036854775807:"64bit"}
+            machine += f".{bitness[sys.maxsize]}"
+        # fall through to standard osname-release-machine representation
+    elif osname[:3] == "aix":
+        from _aix_support import aix_platform
+        return aix_platform()
+    elif osname[:6] == "cygwin":
+        osname = "cygwin"
+        import re
+        rel_re = re.compile(r'[\d.]+')
+        m = rel_re.match(release)
+        if m:
+            release = m.group()
+    elif osname[:6] == "darwin":
+        import _osx_support
+        osname, release, machine = _osx_support.get_platform_osx(
+                                            get_config_vars(),
+                                            osname, release, machine)
+
+    return f"{osname}-{release}-{machine}"
+
+
+def get_python_version():
+    return _PY_VERSION_SHORT
+
+
+def expand_makefile_vars(s, vars):
+    """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
+    'string' according to 'vars' (a dictionary mapping variable names to
+    values).  Variables not present in 'vars' are silently expanded to the
+    empty string.  The variable values in 'vars' should not contain further
+    variable expansions; if 'vars' is the output of 'parse_makefile()',
+    you're fine.  Returns a variable-expanded version of 's'.
+    """
+    import re
+
+    # This algorithm does multiple expansion, so if vars['foo'] contains
+    # "${bar}", it will expand ${foo} to ${bar}, and then expand
+    # ${bar}... and so forth.  This is fine as long as 'vars' comes from
+    # 'parse_makefile()', which takes care of such expansions eagerly,
+    # according to make's variable expansion semantics.
+
+    while True:
+        m = re.search(_findvar1_rx, s) or re.search(_findvar2_rx, s)
+        if m:
+            (beg, end) = m.span()
+            s = s[0:beg] + vars.get(m.group(1)) + s[end:]
+        else:
+            break
+    return s
+
+
+def _print_dict(title, data):
+    for index, (key, value) in enumerate(sorted(data.items())):
+        if index == 0:
+            print(f'{title}: ')
+        print(f'\t{key} = "{value}"')
+
+
+def _main():
+    """Display all information sysconfig detains."""
+    if '--generate-posix-vars' in sys.argv:
+        _generate_posix_vars()
+        return
+    print(f'Platform: "{get_platform()}"')
+    print(f'Python version: "{get_python_version()}"')
+    print(f'Current installation scheme: "{get_default_scheme()}"')
+    print()
+    _print_dict('Paths', get_paths())
+    print()
+    _print_dict('Variables', get_config_vars())
+
+
+if __name__ == '__main__':
+    _main()
Index: Lib
===================================================================
--- Lib	(nonexistent)
+++ Lib	(revision 5)

Property changes on: Lib
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,73 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~
Index: setup.py
===================================================================
--- setup.py	(nonexistent)
+++ setup.py	(revision 5)
@@ -0,0 +1,2758 @@
+# Autodetecting setup.py script for building the Python extensions
+
+import argparse
+import importlib._bootstrap
+import importlib.machinery
+import importlib.util
+import logging
+import os
+import re
+import sys
+import sysconfig
+import warnings
+from glob import glob, escape
+import _osx_support
+
+
+try:
+    import subprocess
+    del subprocess
+    SUBPROCESS_BOOTSTRAP = False
+except ImportError:
+    # Bootstrap Python: distutils.spawn uses subprocess to build C extensions,
+    # subprocess requires C extensions built by setup.py like _posixsubprocess.
+    #
+    # Use _bootsubprocess which only uses the os module.
+    #
+    # It is dropped from sys.modules as soon as all C extension modules
+    # are built.
+    import _bootsubprocess
+    sys.modules['subprocess'] = _bootsubprocess
+    del _bootsubprocess
+    SUBPROCESS_BOOTSTRAP = True
+
+
+with warnings.catch_warnings():
+    # bpo-41282 (PEP 632) deprecated distutils but setup.py still uses it
+    warnings.filterwarnings(
+        "ignore",
+        "The distutils package is deprecated",
+        DeprecationWarning
+    )
+    warnings.filterwarnings(
+        "ignore",
+        "The distutils.sysconfig module is deprecated, use sysconfig instead",
+        DeprecationWarning
+    )
+
+    from distutils.command.build_ext import build_ext
+    from distutils.command.build_scripts import build_scripts
+    from distutils.command.install import install
+    from distutils.command.install_lib import install_lib
+    from distutils.core import Extension, setup
+    from distutils.errors import CCompilerError, DistutilsError
+    from distutils.spawn import find_executable
+
+
+# Compile extensions used to test Python?
+TEST_EXTENSIONS = (sysconfig.get_config_var('TEST_MODULES') == 'yes')
+
+# This global variable is used to hold the list of modules to be disabled.
+DISABLED_MODULE_LIST = []
+
+# --list-module-names option used by Tools/scripts/generate_module_names.py
+LIST_MODULE_NAMES = False
+
+
+logging.basicConfig(format='%(message)s', level=logging.INFO)
+log = logging.getLogger('setup')
+
+
+def get_platform():
+    # Cross compiling
+    if "_PYTHON_HOST_PLATFORM" in os.environ:
+        return os.environ["_PYTHON_HOST_PLATFORM"]
+
+    # Get value of sys.platform
+    if sys.platform.startswith('osf1'):
+        return 'osf1'
+    return sys.platform
+
+
+CROSS_COMPILING = ("_PYTHON_HOST_PLATFORM" in os.environ)
+HOST_PLATFORM = get_platform()
+MS_WINDOWS = (HOST_PLATFORM == 'win32')
+CYGWIN = (HOST_PLATFORM == 'cygwin')
+MACOS = (HOST_PLATFORM == 'darwin')
+AIX = (HOST_PLATFORM.startswith('aix'))
+VXWORKS = ('vxworks' in HOST_PLATFORM)
+CC = os.environ.get("CC")
+if not CC:
+    CC = sysconfig.get_config_var("CC")
+
+
+SUMMARY = """
+Python is an interpreted, interactive, object-oriented programming
+language. It is often compared to Tcl, Perl, Scheme or Java.
+
+Python combines remarkable power with very clear syntax. It has
+modules, classes, exceptions, very high level dynamic data types, and
+dynamic typing. There are interfaces to many system calls and
+libraries, as well as to various windowing systems (X11, Motif, Tk,
+Mac, MFC). New built-in modules are easily written in C or C++. Python
+is also usable as an extension language for applications that need a
+programmable interface.
+
+The Python implementation is portable: it runs on many brands of UNIX,
+on Windows, DOS, Mac, Amiga... If your favorite system isn't
+listed here, it may still be supported, if there's a C compiler for
+it. Ask around on comp.lang.python -- or just try compiling Python
+yourself.
+"""
+
+CLASSIFIERS = """
+Development Status :: 6 - Mature
+License :: OSI Approved :: Python Software Foundation License
+Natural Language :: English
+Programming Language :: C
+Programming Language :: Python
+Topic :: Software Development
+"""
+
+
+def run_command(cmd):
+    status = os.system(cmd)
+    return os.waitstatus_to_exitcode(status)
+
+
+# Set common compiler and linker flags derived from the Makefile,
+# reserved for building the interpreter and the stdlib modules.
+# See bpo-21121 and bpo-35257
+def set_compiler_flags(compiler_flags, compiler_py_flags_nodist):
+    flags = sysconfig.get_config_var(compiler_flags)
+    py_flags_nodist = sysconfig.get_config_var(compiler_py_flags_nodist)
+    sysconfig.get_config_vars()[compiler_flags] = flags + ' ' + py_flags_nodist
+
+
+def add_dir_to_list(dirlist, dir):
+    """Add the directory 'dir' to the list 'dirlist' (after any relative
+    directories) if:
+
+    1) 'dir' is not already in 'dirlist'
+    2) 'dir' actually exists, and is a directory.
+    """
+    if dir is None or not os.path.isdir(dir) or dir in dirlist:
+        return
+    for i, path in enumerate(dirlist):
+        if not os.path.isabs(path):
+            dirlist.insert(i + 1, dir)
+            return
+    dirlist.insert(0, dir)
+
+
+def sysroot_paths(make_vars, subdirs):
+    """Get the paths of sysroot sub-directories.
+
+    * make_vars: a sequence of names of variables of the Makefile where
+      sysroot may be set.
+    * subdirs: a sequence of names of subdirectories used as the location for
+      headers or libraries.
+    """
+
+    dirs = []
+    for var_name in make_vars:
+        var = sysconfig.get_config_var(var_name)
+        if var is not None:
+            m = re.search(r'--sysroot=([^"]\S*|"[^"]+")', var)
+            if m is not None:
+                sysroot = m.group(1).strip('"')
+                for subdir in subdirs:
+                    if os.path.isabs(subdir):
+                        subdir = subdir[1:]
+                    path = os.path.join(sysroot, subdir)
+                    if os.path.isdir(path):
+                        dirs.append(path)
+                break
+    return dirs
+
+
+MACOS_SDK_ROOT = None
+MACOS_SDK_SPECIFIED = None
+
+def macosx_sdk_root():
+    """Return the directory of the current macOS SDK.
+
+    If no SDK was explicitly configured, call the compiler to find which
+    include files paths are being searched by default.  Use '/' if the
+    compiler is searching /usr/include (meaning system header files are
+    installed) or use the root of an SDK if that is being searched.
+    (The SDK may be supplied via Xcode or via the Command Line Tools).
+    The SDK paths used by Apple-supplied tool chains depend on the
+    setting of various variables; see the xcrun man page for more info.
+    Also sets MACOS_SDK_SPECIFIED for use by macosx_sdk_specified().
+    """
+    global MACOS_SDK_ROOT, MACOS_SDK_SPECIFIED
+
+    # If already called, return cached result.
+    if MACOS_SDK_ROOT:
+        return MACOS_SDK_ROOT
+
+    cflags = sysconfig.get_config_var('CFLAGS')
+    m = re.search(r'-isysroot\s*(\S+)', cflags)
+    if m is not None:
+        MACOS_SDK_ROOT = m.group(1)
+        MACOS_SDK_SPECIFIED = MACOS_SDK_ROOT != '/'
+    else:
+        MACOS_SDK_ROOT = _osx_support._default_sysroot(
+            sysconfig.get_config_var('CC'))
+        MACOS_SDK_SPECIFIED = False
+
+    return MACOS_SDK_ROOT
+
+
+def macosx_sdk_specified():
+    """Returns true if an SDK was explicitly configured.
+
+    True if an SDK was selected at configure time, either by specifying
+    --enable-universalsdk=(something other than no or /) or by adding a
+    -isysroot option to CFLAGS.  In some cases, like when making
+    decisions about macOS Tk framework paths, we need to be able to
+    know whether the user explicitly asked to build with an SDK versus
+    the implicit use of an SDK when header files are no longer
+    installed on a running system by the Command Line Tools.
+    """
+    global MACOS_SDK_SPECIFIED
+
+    # If already called, return cached result.
+    if MACOS_SDK_SPECIFIED:
+        return MACOS_SDK_SPECIFIED
+
+    # Find the sdk root and set MACOS_SDK_SPECIFIED
+    macosx_sdk_root()
+    return MACOS_SDK_SPECIFIED
+
+
+def is_macosx_sdk_path(path):
+    """
+    Returns True if 'path' can be located in a macOS SDK
+    """
+    return ( (path.startswith('/usr/') and not path.startswith('/usr/local'))
+                or path.startswith('/System/Library')
+                or path.startswith('/System/iOSSupport') )
+
+
+def grep_headers_for(function, headers):
+    for header in headers:
+        with open(header, 'r', errors='surrogateescape') as f:
+            if function in f.read():
+                return True
+    return False
+
+
+def find_file(filename, std_dirs, paths):
+    """Searches for the directory where a given file is located,
+    and returns a possibly-empty list of additional directories, or None
+    if the file couldn't be found at all.
+
+    'filename' is the name of a file, such as readline.h or libcrypto.a.
+    'std_dirs' is the list of standard system directories; if the
+        file is found in one of them, no additional directives are needed.
+    'paths' is a list of additional locations to check; if the file is
+        found in one of them, the resulting list will contain the directory.
+    """
+    if MACOS:
+        # Honor the MacOSX SDK setting when one was specified.
+        # An SDK is a directory with the same structure as a real
+        # system, but with only header files and libraries.
+        sysroot = macosx_sdk_root()
+
+    # Check the standard locations
+    for dir_ in std_dirs:
+        f = os.path.join(dir_, filename)
+
+        if MACOS and is_macosx_sdk_path(dir_):
+            f = os.path.join(sysroot, dir_[1:], filename)
+
+        if os.path.exists(f): return []
+
+    # Check the additional directories
+    for dir_ in paths:
+        f = os.path.join(dir_, filename)
+
+        if MACOS and is_macosx_sdk_path(dir_):
+            f = os.path.join(sysroot, dir_[1:], filename)
+
+        if os.path.exists(f):
+            return [dir_]
+
+    # Not found anywhere
+    return None
+
+
+def find_library_file(compiler, libname, std_dirs, paths):
+    result = compiler.find_library_file(std_dirs + paths, libname)
+    if result is None:
+        return None
+
+    if MACOS:
+        sysroot = macosx_sdk_root()
+
+    # Check whether the found file is in one of the standard directories
+    dirname = os.path.dirname(result)
+    for p in std_dirs:
+        # Ensure path doesn't end with path separator
+        p = p.rstrip(os.sep)
+
+        if MACOS and is_macosx_sdk_path(p):
+            # Note that, as of Xcode 7, Apple SDKs may contain textual stub
+            # libraries with .tbd extensions rather than the normal .dylib
+            # shared libraries installed in /.  The Apple compiler tool
+            # chain handles this transparently but it can cause problems
+            # for programs that are being built with an SDK and searching
+            # for specific libraries.  Distutils find_library_file() now
+            # knows to also search for and return .tbd files.  But callers
+            # of find_library_file need to keep in mind that the base filename
+            # of the returned SDK library file might have a different extension
+            # from that of the library file installed on the running system,
+            # for example:
+            #   /Applications/Xcode.app/Contents/Developer/Platforms/
+            #       MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/
+            #       usr/lib/libedit.tbd
+            # vs
+            #   /usr/lib/libedit.dylib
+            if os.path.join(sysroot, p[1:]) == dirname:
+                return [ ]
+
+        if p == dirname:
+            return [ ]
+
+    # Otherwise, it must have been in one of the additional directories,
+    # so we have to figure out which one.
+    for p in paths:
+        # Ensure path doesn't end with path separator
+        p = p.rstrip(os.sep)
+
+        if MACOS and is_macosx_sdk_path(p):
+            if os.path.join(sysroot, p[1:]) == dirname:
+                return [ p ]
+
+        if p == dirname:
+            return [p]
+    else:
+        assert False, "Internal error: Path not found in std_dirs or paths"
+
+
+def validate_tzpath():
+    base_tzpath = sysconfig.get_config_var('TZPATH')
+    if not base_tzpath:
+        return
+
+    tzpaths = base_tzpath.split(os.pathsep)
+    bad_paths = [tzpath for tzpath in tzpaths if not os.path.isabs(tzpath)]
+    if bad_paths:
+        raise ValueError('TZPATH must contain only absolute paths, '
+                         + f'found:\n{tzpaths!r}\nwith invalid paths:\n'
+                         + f'{bad_paths!r}')
+
+
+def find_module_file(module, dirlist):
+    """Find a module in a set of possible folders. If it is not found
+    return the unadorned filename"""
+    dirs = find_file(module, [], dirlist)
+    if not dirs:
+        return module
+    if len(dirs) > 1:
+        log.info(f"WARNING: multiple copies of {module} found")
+    return os.path.join(dirs[0], module)
+
+
+class PyBuildExt(build_ext):
+
+    def __init__(self, dist):
+        build_ext.__init__(self, dist)
+        self.srcdir = None
+        self.lib_dirs = None
+        self.inc_dirs = None
+        self.config_h_vars = None
+        self.failed = []
+        self.failed_on_import = []
+        self.missing = []
+        self.disabled_configure = []
+        if '-j' in os.environ.get('MAKEFLAGS', ''):
+            self.parallel = True
+
+    def add(self, ext):
+        self.extensions.append(ext)
+
+    def set_srcdir(self):
+        self.srcdir = sysconfig.get_config_var('srcdir')
+        if not self.srcdir:
+            # Maybe running on Windows but not using CYGWIN?
+            raise ValueError("No source directory; cannot proceed.")
+        self.srcdir = os.path.abspath(self.srcdir)
+
+    def remove_disabled(self):
+        # Remove modules that are present on the disabled list
+        extensions = [ext for ext in self.extensions
+                      if ext.name not in DISABLED_MODULE_LIST]
+        # move ctypes to the end, it depends on other modules
+        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
+        if "_ctypes" in ext_map:
+            ctypes = extensions.pop(ext_map["_ctypes"])
+            extensions.append(ctypes)
+        self.extensions = extensions
+
+    def update_sources_depends(self):
+        # Fix up the autodetected modules, prefixing all the source files
+        # with Modules/.
+        moddirlist = [os.path.join(self.srcdir, 'Modules')]
+
+        # Fix up the paths for scripts, too
+        self.distribution.scripts = [os.path.join(self.srcdir, filename)
+                                     for filename in self.distribution.scripts]
+
+        # Python header files
+        headers = [sysconfig.get_config_h_filename()]
+        headers += glob(os.path.join(escape(sysconfig.get_path('include')), "*.h"))
+
+        for ext in self.extensions:
+            ext.sources = [ find_module_file(filename, moddirlist)
+                            for filename in ext.sources ]
+            if ext.depends is not None:
+                ext.depends = [find_module_file(filename, moddirlist)
+                               for filename in ext.depends]
+            else:
+                ext.depends = []
+            # re-compile extensions if a header file has been changed
+            ext.depends.extend(headers)
+
+    def remove_configured_extensions(self):
+        # The sysconfig variables built by makesetup that list the already
+        # built modules and the disabled modules as configured by the Setup
+        # files.
+        sysconf_built = sysconfig.get_config_var('MODBUILT_NAMES').split()
+        sysconf_dis = sysconfig.get_config_var('MODDISABLED_NAMES').split()
+
+        mods_built = []
+        mods_disabled = []
+        for ext in self.extensions:
+            # If a module has already been built or has been disabled in the
+            # Setup files, don't build it here.
+            if ext.name in sysconf_built:
+                mods_built.append(ext)
+            if ext.name in sysconf_dis:
+                mods_disabled.append(ext)
+
+        mods_configured = mods_built + mods_disabled
+        if mods_configured:
+            self.extensions = [x for x in self.extensions if x not in
+                               mods_configured]
+            # Remove the shared libraries built by a previous build.
+            for ext in mods_configured:
+                fullpath = self.get_ext_fullpath(ext.name)
+                if os.path.exists(fullpath):
+                    os.unlink(fullpath)
+
+        return (mods_built, mods_disabled)
+
+    def set_compiler_executables(self):
+        # When you run "make CC=altcc" or something similar, you really want
+        # those environment variables passed into the setup.py phase.  Here's
+        # a small set of useful ones.
+        compiler = os.environ.get('CC')
+        args = {}
+        # unfortunately, distutils doesn't let us provide separate C and C++
+        # compilers
+        if compiler is not None:
+            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
+            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
+        self.compiler.set_executables(**args)
+
+    def build_extensions(self):
+        self.set_srcdir()
+        self.set_compiler_executables()
+        self.configure_compiler()
+        self.init_inc_lib_dirs()
+
+        # Detect which modules should be compiled
+        self.detect_modules()
+
+        if not LIST_MODULE_NAMES:
+            self.remove_disabled()
+
+        self.update_sources_depends()
+        mods_built, mods_disabled = self.remove_configured_extensions()
+
+        if LIST_MODULE_NAMES:
+            for ext in self.extensions:
+                print(ext.name)
+            for name in self.missing:
+                print(name)
+            return
+
+        build_ext.build_extensions(self)
+
+        if SUBPROCESS_BOOTSTRAP:
+            # Drop our custom subprocess module:
+            # use the newly built subprocess module
+            del sys.modules['subprocess']
+
+        for ext in self.extensions:
+            self.check_extension_import(ext)
+
+        self.summary(mods_built, mods_disabled)
+
+    def summary(self, mods_built, mods_disabled):
+        longest = max([len(e.name) for e in self.extensions], default=0)
+        if self.failed or self.failed_on_import:
+            all_failed = self.failed + self.failed_on_import
+            longest = max(longest, max([len(name) for name in all_failed]))
+
+        def print_three_column(lst):
+            lst.sort(key=str.lower)
+            # guarantee zip() doesn't drop anything
+            while len(lst) % 3:
+                lst.append("")
+            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
+                print("%-*s   %-*s   %-*s" % (longest, e, longest, f,
+                                              longest, g))
+
+        if self.missing:
+            print()
+            print("The necessary bits to build these optional modules were not "
+                  "found:")
+            print_three_column(self.missing)
+            print("To find the necessary bits, look in setup.py in"
+                  " detect_modules() for the module's name.")
+            print()
+
+        if mods_built:
+            print()
+            print("The following modules found by detect_modules() in"
+            " setup.py, have been")
+            print("built by the Makefile instead, as configured by the"
+            " Setup files:")
+            print_three_column([ext.name for ext in mods_built])
+            print()
+
+        if mods_disabled:
+            print()
+            print("The following modules found by detect_modules() in"
+            " setup.py have not")
+            print("been built, they are *disabled* in the Setup files:")
+            print_three_column([ext.name for ext in mods_disabled])
+            print()
+
+        if self.disabled_configure:
+            print()
+            print("The following modules found by detect_modules() in"
+            " setup.py have not")
+            print("been built, they are *disabled* by configure:")
+            print_three_column(self.disabled_configure)
+            print()
+
+        if self.failed:
+            failed = self.failed[:]
+            print()
+            print("Failed to build these modules:")
+            print_three_column(failed)
+            print()
+
+        if self.failed_on_import:
+            failed = self.failed_on_import[:]
+            print()
+            print("Following modules built successfully"
+                  " but were removed because they could not be imported:")
+            print_three_column(failed)
+            print()
+
+        if any('_ssl' in l
+               for l in (self.missing, self.failed, self.failed_on_import)):
+            print()
+            print("Could not build the ssl module!")
+            print("Python requires a OpenSSL 1.1.1 or newer")
+            if sysconfig.get_config_var("OPENSSL_LDFLAGS"):
+                print("Custom linker flags may require --with-openssl-rpath=auto")
+            print()
+
+        if os.environ.get("PYTHONSTRICTEXTENSIONBUILD") and (self.failed or self.failed_on_import):
+            raise RuntimeError("Failed to build some stdlib modules")
+
+    def build_extension(self, ext):
+
+        if ext.name == '_ctypes':
+            if not self.configure_ctypes(ext):
+                self.failed.append(ext.name)
+                return
+
+        try:
+            build_ext.build_extension(self, ext)
+        except (CCompilerError, DistutilsError) as why:
+            self.announce('WARNING: building of extension "%s" failed: %s' %
+                          (ext.name, why))
+            self.failed.append(ext.name)
+            return
+
+    def check_extension_import(self, ext):
+        # Don't try to import an extension that has failed to compile
+        if ext.name in self.failed:
+            self.announce(
+                'WARNING: skipping import check for failed build "%s"' %
+                ext.name, level=1)
+            return
+
+        # Workaround for Mac OS X: The Carbon-based modules cannot be
+        # reliably imported into a command-line Python
+        if 'Carbon' in ext.extra_link_args:
+            self.announce(
+                'WARNING: skipping import check for Carbon-based "%s"' %
+                ext.name)
+            return
+
+        if MACOS and (
+                sys.maxsize > 2**32 and '-arch' in ext.extra_link_args):
+            # Don't bother doing an import check when an extension was
+            # build with an explicit '-arch' flag on OSX. That's currently
+            # only used to build 32-bit only extensions in a 4-way
+            # universal build and loading 32-bit code into a 64-bit
+            # process will fail.
+            self.announce(
+                'WARNING: skipping import check for "%s"' %
+                ext.name)
+            return
+
+        # Workaround for Cygwin: Cygwin currently has fork issues when many
+        # modules have been imported
+        if CYGWIN:
+            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
+                % ext.name)
+            return
+        ext_filename = os.path.join(
+            self.build_lib,
+            self.get_ext_filename(self.get_ext_fullname(ext.name)))
+
+        # If the build directory didn't exist when setup.py was
+        # started, sys.path_importer_cache has a negative result
+        # cached.  Clear that cache before trying to import.
+        sys.path_importer_cache.clear()
+
+        # Don't try to load extensions for cross builds
+        if CROSS_COMPILING:
+            return
+
+        loader = importlib.machinery.ExtensionFileLoader(ext.name, ext_filename)
+        spec = importlib.util.spec_from_file_location(ext.name, ext_filename,
+                                                      loader=loader)
+        try:
+            importlib._bootstrap._load(spec)
+        except ImportError as why:
+            self.failed_on_import.append(ext.name)
+            self.announce('*** WARNING: renaming "%s" since importing it'
+                          ' failed: %s' % (ext.name, why), level=3)
+            assert not self.inplace
+            basename, tail = os.path.splitext(ext_filename)
+            newname = basename + "_failed" + tail
+            if os.path.exists(newname):
+                os.remove(newname)
+            os.rename(ext_filename, newname)
+
+        except:
+            exc_type, why, tb = sys.exc_info()
+            self.announce('*** WARNING: importing extension "%s" '
+                          'failed with %s: %s' % (ext.name, exc_type, why),
+                          level=3)
+            self.failed.append(ext.name)
+
+    def add_multiarch_paths(self):
+        # Debian/Ubuntu multiarch support.
+        # https://wiki.ubuntu.com/MultiarchSpec
+        tmpfile = os.path.join(self.build_temp, 'multiarch')
+        if not os.path.exists(self.build_temp):
+            os.makedirs(self.build_temp)
+        ret = run_command(
+            '%s -print-multiarch > %s 2> /dev/null' % (CC, tmpfile))
+        multiarch_path_component = ''
+        try:
+            if ret == 0:
+                with open(tmpfile) as fp:
+                    multiarch_path_component = fp.readline().strip()
+        finally:
+            os.unlink(tmpfile)
+
+        if multiarch_path_component != '':
+            add_dir_to_list(self.compiler.library_dirs,
+                            '/usr/lib32/' + multiarch_path_component)
+            add_dir_to_list(self.compiler.include_dirs,
+                            '/usr/include/' + multiarch_path_component)
+            return
+
+        if not find_executable('dpkg-architecture'):
+            return
+        opt = ''
+        if CROSS_COMPILING:
+            opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE')
+        tmpfile = os.path.join(self.build_temp, 'multiarch')
+        if not os.path.exists(self.build_temp):
+            os.makedirs(self.build_temp)
+        ret = run_command(
+            'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
+            (opt, tmpfile))
+        try:
+            if ret == 0:
+                with open(tmpfile) as fp:
+                    multiarch_path_component = fp.readline().strip()
+                add_dir_to_list(self.compiler.library_dirs,
+                                '/usr/lib32/' + multiarch_path_component)
+                add_dir_to_list(self.compiler.include_dirs,
+                                '/usr/include/' + multiarch_path_component)
+        finally:
+            os.unlink(tmpfile)
+
+    def add_wrcc_search_dirs(self):
+        # add library search path by wr-cc, the compiler wrapper
+
+        def convert_mixed_path(path):
+            # convert path like C:\folder1\folder2/folder3/folder4
+            # to msys style /c/folder1/folder2/folder3/folder4
+            drive = path[0].lower()
+            left = path[2:].replace("\\", "/")
+            return "/" + drive + left
+
+        def add_search_path(line):
+            # On Windows building machine, VxWorks does
+            # cross builds under msys2 environment.
+            pathsep = (";" if sys.platform == "msys" else ":")
+            for d in line.strip().split("=")[1].split(pathsep):
+                d = d.strip()
+                if sys.platform == "msys":
+                    # On Windows building machine, compiler
+                    # returns mixed style path like:
+                    # C:\folder1\folder2/folder3/folder4
+                    d = convert_mixed_path(d)
+                d = os.path.normpath(d)
+                add_dir_to_list(self.compiler.library_dirs, d)
+
+        tmpfile = os.path.join(self.build_temp, 'wrccpaths')
+        os.makedirs(self.build_temp, exist_ok=True)
+        try:
+            ret = run_command('%s --print-search-dirs >%s' % (CC, tmpfile))
+            if ret:
+                return
+            with open(tmpfile) as fp:
+                # Parse paths in libraries line. The line is like:
+                # On Linux, "libraries: = path1:path2:path3"
+                # On Windows, "libraries: = path1;path2;path3"
+                for line in fp:
+                    if not line.startswith("libraries"):
+                        continue
+                    add_search_path(line)
+        finally:
+            try:
+                os.unlink(tmpfile)
+            except OSError:
+                pass
+
+    def add_cross_compiling_paths(self):
+        tmpfile = os.path.join(self.build_temp, 'ccpaths')
+        if not os.path.exists(self.build_temp):
+            os.makedirs(self.build_temp)
+        # bpo-38472: With a German locale, GCC returns "gcc-Version 9.1.0
+        # (GCC)", whereas it returns "gcc version 9.1.0" with the C locale.
+        ret = run_command('LC_ALL=C %s -E -v - </dev/null 2>%s 1>/dev/null' % (CC, tmpfile))
+        is_gcc = False
+        is_clang = False
+        in_incdirs = False
+        try:
+            if ret == 0:
+                with open(tmpfile) as fp:
+                    for line in fp.readlines():
+                        if line.startswith("gcc version"):
+                            is_gcc = True
+                        elif line.startswith("clang version"):
+                            is_clang = True
+                        elif line.startswith("#include <...>"):
+                            in_incdirs = True
+                        elif line.startswith("End of search list"):
+                            in_incdirs = False
+                        elif (is_gcc or is_clang) and line.startswith("LIBRARY_PATH"):
+                            for d in line.strip().split("=")[1].split(":"):
+                                d = os.path.normpath(d)
+                                if '/gcc/' not in d:
+                                    add_dir_to_list(self.compiler.library_dirs,
+                                                    d)
+                        elif (is_gcc or is_clang) and in_incdirs and '/gcc/' not in line and '/clang/' not in line:
+                            add_dir_to_list(self.compiler.include_dirs,
+                                            line.strip())
+        finally:
+            os.unlink(tmpfile)
+
+        if VXWORKS:
+            self.add_wrcc_search_dirs()
+
+    def add_ldflags_cppflags(self):
+        # Add paths specified in the environment variables LDFLAGS and
+        # CPPFLAGS for header and library files.
+        # We must get the values from the Makefile and not the environment
+        # directly since an inconsistently reproducible issue comes up where
+        # the environment variable is not set even though the value were passed
+        # into configure and stored in the Makefile (issue found on OS X 10.3).
+        for env_var, arg_name, dir_list in (
+                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
+                ('LDFLAGS', '-L', self.compiler.library_dirs),
+                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
+            env_val = sysconfig.get_config_var(env_var)
+            if env_val:
+                parser = argparse.ArgumentParser()
+                parser.add_argument(arg_name, dest="dirs", action="append")
+
+                # To prevent argparse from raising an exception about any
+                # options in env_val that it mistakes for known option, we
+                # strip out all double dashes and any dashes followed by a
+                # character that is not for the option we are dealing with.
+                #
+                # Please note that order of the regex is important!  We must
+                # strip out double-dashes first so that we don't end up with
+                # substituting "--Long" to "-Long" and thus lead to "ong" being
+                # used for a library directory.
+                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
+                                 ' ', env_val)
+                options, _ = parser.parse_known_args(env_val.split())
+                if options.dirs:
+                    for directory in reversed(options.dirs):
+                        add_dir_to_list(dir_list, directory)
+
+    def configure_compiler(self):
+        # Ensure that /usr/local is always used, but the local build
+        # directories (i.e. '.' and 'Include') must be first.  See issue
+        # 10520.
+        if not CROSS_COMPILING:
+            add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib32')
+            add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
+        # only change this for cross builds for 3.3, issues on Mageia
+        if CROSS_COMPILING:
+            self.add_cross_compiling_paths()
+        self.add_multiarch_paths()
+        self.add_ldflags_cppflags()
+
+    def init_inc_lib_dirs(self):
+        if (not CROSS_COMPILING and
+                os.path.normpath(sys.base_prefix) != '/usr' and
+                not sysconfig.get_config_var('PYTHONFRAMEWORK')):
+            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
+            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
+            # building a framework with different architectures than
+            # the one that is currently installed (issue #7473)
+            add_dir_to_list(self.compiler.library_dirs,
+                            sysconfig.get_config_var("LIBDIR"))
+            add_dir_to_list(self.compiler.include_dirs,
+                            sysconfig.get_config_var("INCLUDEDIR"))
+
+        system_lib_dirs = ['/lib64', '/usr/lib64', '/lib', '/usr/lib']
+        system_include_dirs = ['/usr/include']
+        # lib_dirs and inc_dirs are used to search for files;
+        # if a file is found in one of those directories, it can
+        # be assumed that no additional -I,-L directives are needed.
+        if not CROSS_COMPILING:
+            self.lib_dirs = self.compiler.library_dirs + system_lib_dirs
+            self.inc_dirs = self.compiler.include_dirs + system_include_dirs
+        else:
+            # Add the sysroot paths. 'sysroot' is a compiler option used to
+            # set the logical path of the standard system headers and
+            # libraries.
+            self.lib_dirs = (self.compiler.library_dirs +
+                             sysroot_paths(('LDFLAGS', 'CC'), system_lib_dirs))
+            self.inc_dirs = (self.compiler.include_dirs +
+                             sysroot_paths(('CPPFLAGS', 'CFLAGS', 'CC'),
+                                           system_include_dirs))
+
+        config_h = sysconfig.get_config_h_filename()
+        with open(config_h) as file:
+            self.config_h_vars = sysconfig.parse_config_h(file)
+
+        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
+        if HOST_PLATFORM in ['osf1', 'unixware7', 'openunix8']:
+            self.lib_dirs += ['/usr/ccs/lib']
+
+        # HP-UX11iv3 keeps files in lib/hpux folders.
+        if HOST_PLATFORM == 'hp-ux11':
+            self.lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32']
+
+        if MACOS:
+            # This should work on any unixy platform ;-)
+            # If the user has bothered specifying additional -I and -L flags
+            # in OPT and LDFLAGS we might as well use them here.
+            #
+            # NOTE: using shlex.split would technically be more correct, but
+            # also gives a bootstrap problem. Let's hope nobody uses
+            # directories with whitespace in the name to store libraries.
+            cflags, ldflags = sysconfig.get_config_vars(
+                    'CFLAGS', 'LDFLAGS')
+            for item in cflags.split():
+                if item.startswith('-I'):
+                    self.inc_dirs.append(item[2:])
+
+            for item in ldflags.split():
+                if item.startswith('-L'):
+                    self.lib_dirs.append(item[2:])
+
+    def detect_simple_extensions(self):
+        #
+        # The following modules are all pretty straightforward, and compile
+        # on pretty much any POSIXish platform.
+        #
+
+        # array objects
+        self.add(Extension('array', ['arraymodule.c'],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+
+        # Context Variables
+        self.add(Extension('_contextvars', ['_contextvarsmodule.c']))
+
+        shared_math = 'Modules/_math.o'
+
+        # math library functions, e.g. sin()
+        self.add(Extension('math',  ['mathmodule.c'],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE'],
+                           extra_objects=[shared_math],
+                           depends=['_math.h', shared_math],
+                           libraries=['m']))
+
+        # complex math library functions
+        self.add(Extension('cmath', ['cmathmodule.c'],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE'],
+                           extra_objects=[shared_math],
+                           depends=['_math.h', shared_math],
+                           libraries=['m']))
+
+        # time libraries: librt may be needed for clock_gettime()
+        time_libs = []
+        lib = sysconfig.get_config_var('TIMEMODULE_LIB')
+        if lib:
+            time_libs.append(lib)
+
+        # time operations and variables
+        self.add(Extension('time', ['timemodule.c'],
+                           libraries=time_libs))
+        # libm is needed by delta_new() that uses round() and by accum() that
+        # uses modf().
+        self.add(Extension('_datetime', ['_datetimemodule.c'],
+                           libraries=['m'],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+        # zoneinfo module
+        self.add(Extension('_zoneinfo', ['_zoneinfo.c'],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+        # random number generator implemented in C
+        self.add(Extension("_random", ["_randommodule.c"],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+        # bisect
+        self.add(Extension("_bisect", ["_bisectmodule.c"]))
+        # heapq
+        self.add(Extension("_heapq", ["_heapqmodule.c"],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+        # C-optimized pickle replacement
+        self.add(Extension("_pickle", ["_pickle.c"],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+        # _json speedups
+        self.add(Extension("_json", ["_json.c"],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+
+        # profiler (_lsprof is for cProfile.py)
+        self.add(Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']))
+        # static Unicode character database
+        self.add(Extension('unicodedata', ['unicodedata.c'],
+                           depends=['unicodedata_db.h', 'unicodename_db.h'],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+        # _opcode module
+        self.add(Extension('_opcode', ['_opcode.c']))
+        # asyncio speedups
+        self.add(Extension("_asyncio", ["_asynciomodule.c"],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+        # _abc speedups
+        self.add(Extension("_abc", ["_abc.c"],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+        # _queue module
+        self.add(Extension("_queue", ["_queuemodule.c"],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+        # _statistics module
+        self.add(Extension("_statistics", ["_statisticsmodule.c"]))
+
+        # Modules with some UNIX dependencies -- on by default:
+        # (If you have a really backward UNIX, select and socket may not be
+        # supported...)
+
+        # fcntl(2) and ioctl(2)
+        libs = []
+        if (self.config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
+            # May be necessary on AIX for flock function
+            libs = ['bsd']
+        self.add(Extension('fcntl', ['fcntlmodule.c'],
+                           libraries=libs))
+        # pwd(3)
+        self.add(Extension('pwd', ['pwdmodule.c']))
+        # grp(3)
+        if not VXWORKS:
+            self.add(Extension('grp', ['grpmodule.c']))
+        # spwd, shadow passwords
+        if (self.config_h_vars.get('HAVE_GETSPNAM', False) or
+                self.config_h_vars.get('HAVE_GETSPENT', False)):
+            self.add(Extension('spwd', ['spwdmodule.c']))
+        # AIX has shadow passwords, but access is not via getspent(), etc.
+        # module support is not expected so it not 'missing'
+        elif not AIX:
+            self.missing.append('spwd')
+
+        # select(2); not on ancient System V
+        self.add(Extension('select', ['selectmodule.c']))
+
+        # Memory-mapped files (also works on Win32).
+        self.add(Extension('mmap', ['mmapmodule.c']))
+
+        # Lance Ellinghaus's syslog module
+        # syslog daemon interface
+        self.add(Extension('syslog', ['syslogmodule.c']))
+
+        # Python interface to subinterpreter C-API.
+        self.add(Extension('_xxsubinterpreters', ['_xxsubinterpretersmodule.c']))
+
+        #
+        # Here ends the simple stuff.  From here on, modules need certain
+        # libraries, are platform-specific, or present other surprises.
+        #
+
+        # Multimedia modules
+        # These don't work for 64-bit platforms!!!
+        # These represent audio samples or images as strings:
+        #
+        # Operations on audio samples
+        # According to #993173, this one should actually work fine on
+        # 64-bit platforms.
+        #
+        # audioop needs libm for floor() in multiple functions.
+        self.add(Extension('audioop', ['audioop.c'],
+                           libraries=['m']))
+
+        # CSV files
+        self.add(Extension('_csv', ['_csv.c']))
+
+        # POSIX subprocess module helper.
+        self.add(Extension('_posixsubprocess', ['_posixsubprocess.c'],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+
+    def detect_test_extensions(self):
+        # Python C API test module
+        self.add(Extension('_testcapi', ['_testcapimodule.c'],
+                           depends=['testcapi_long.h']))
+
+        # Python Internal C API test module
+        self.add(Extension('_testinternalcapi', ['_testinternalcapi.c'],
+                           extra_compile_args=['-DPy_BUILD_CORE_MODULE']))
+
+        # Python PEP-3118 (buffer protocol) test module
+        self.add(Extension('_testbuffer', ['_testbuffer.c']))
+
+        # Test loading multiple modules from one compiled file (https://bugs.python.org/issue16421)
+        self.add(Extension('_testimportmultiple', ['_testimportmultiple.c']))
+
+        # Test multi-phase extension module init (PEP 489)
+        self.add(Extension('_testmultiphase', ['_testmultiphase.c']))
+
+        # Fuzz tests.
+        self.add(Extension('_xxtestfuzz',
+                           ['_xxtestfuzz/_xxtestfuzz.c',
+                            '_xxtestfuzz/fuzzer.c']))
+
+    def detect_readline_curses(self):
+        # readline
+        readline_termcap_library = ""
+        curses_library = ""
+        # Cannot use os.popen here in py3k.
+        tmpfile = os.path.join(self.build_temp, 'readline_termcap_lib')
+        if not os.path.exists(self.build_temp):
+            os.makedirs(self.build_temp)
+        # Determine if readline is already linked against curses or tinfo.
+        if sysconfig.get_config_var('HAVE_LIBREADLINE'):
+            if sysconfig.get_config_var('WITH_EDITLINE'):
+                readline_lib = 'edit'
+            else:
+                readline_lib = 'readline'
+            do_readline = self.compiler.find_library_file(self.lib_dirs,
+                readline_lib)
+            if CROSS_COMPILING:
+                ret = run_command("%s -d %s | grep '(NEEDED)' > %s"
+                                % (sysconfig.get_config_var('READELF'),
+                                   do_readline, tmpfile))
+            elif find_executable('ldd'):
+                ret = run_command("ldd %s > %s" % (do_readline, tmpfile))
+            else:
+                ret = 1
+            if ret == 0:
+                with open(tmpfile) as fp:
+                    for ln in fp:
+                        if 'curses' in ln:
+                            readline_termcap_library = re.sub(
+                                r'.*lib(n?cursesw?)\.so.*', r'\1', ln
+                            ).rstrip()
+                            break
+                        # termcap interface split out from ncurses
+                        if 'tinfo' in ln:
+                            readline_termcap_library = 'tinfo'
+                            break
+            if os.path.exists(tmpfile):
+                os.unlink(tmpfile)
+        else:
+            do_readline = False
+        # Issue 7384: If readline is already linked against curses,
+        # use the same library for the readline and curses modules.
+        if 'curses' in readline_termcap_library:
+            curses_library = readline_termcap_library
+        elif self.compiler.find_library_file(self.lib_dirs, 'ncursesw'):
+            curses_library = 'ncursesw'
+        # Issue 36210: OSS provided ncurses does not link on AIX
+        # Use IBM supplied 'curses' for successful build of _curses
+        elif AIX and self.compiler.find_library_file(self.lib_dirs, 'curses'):
+            curses_library = 'curses'
+        elif self.compiler.find_library_file(self.lib_dirs, 'ncurses'):
+            curses_library = 'ncurses'
+        elif self.compiler.find_library_file(self.lib_dirs, 'curses'):
+            curses_library = 'curses'
+
+        if MACOS:
+            os_release = int(os.uname()[2].split('.')[0])
+            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
+            if (dep_target and
+                    (tuple(int(n) for n in dep_target.split('.')[0:2])
+                        < (10, 5) ) ):
+                os_release = 8
+            if os_release < 9:
+                # MacOSX 10.4 has a broken readline. Don't try to build
+                # the readline module unless the user has installed a fixed
+                # readline package
+                if find_file('readline/rlconf.h', self.inc_dirs, []) is None:
+                    do_readline = False
+        if do_readline:
+            if MACOS and os_release < 9:
+                # In every directory on the search path search for a dynamic
+                # library and then a static library, instead of first looking
+                # for dynamic libraries on the entire path.
+                # This way a statically linked custom readline gets picked up
+                # before the (possibly broken) dynamic library in /usr/lib.
+                readline_extra_link_args = ('-Wl,-search_paths_first',)
+            else:
+                readline_extra_link_args = ()
+
+            readline_libs = [readline_lib]
+            if readline_termcap_library:
+                pass # Issue 7384: Already linked against curses or tinfo.
+            elif curses_library:
+                readline_libs.append(curses_library)
+            elif self.compiler.find_library_file(self.lib_dirs +
+                                                     ['/usr/lib32/termcap'],
+                                                     'termcap'):
+                readline_libs.append('termcap')
+            self.add(Extension('readline', ['readline.c'],
+                               library_dirs=['/usr/lib32/termcap'],
+                               extra_link_args=readline_extra_link_args,
+                               libraries=readline_libs))
+        else:
+            self.missing.append('readline')
+
+        # Curses support, requiring the System V version of curses, often
+        # provided by the ncurses library.
+        curses_defines = []
+        curses_includes = []
+        panel_library = 'panel'
+        if curses_library == 'ncursesw':
+            curses_defines.append(('HAVE_NCURSESW', '1'))
+            if not CROSS_COMPILING:
+                curses_includes.append('/usr/include/ncursesw')
+            # Bug 1464056: If _curses.so links with ncursesw,
+            # _curses_panel.so must link with panelw.
+            panel_library = 'panelw'
+            if MACOS:
+                # On OS X, there is no separate /usr/lib/libncursesw nor
+                # libpanelw.  If we are here, we found a locally-supplied
+                # version of libncursesw.  There should also be a
+                # libpanelw.  _XOPEN_SOURCE defines are usually excluded
+                # for OS X but we need _XOPEN_SOURCE_EXTENDED here for
+                # ncurses wide char support
+                curses_defines.append(('_XOPEN_SOURCE_EXTENDED', '1'))
+        elif MACOS and curses_library == 'ncurses':
+            # Building with the system-suppied combined libncurses/libpanel
+            curses_defines.append(('HAVE_NCURSESW', '1'))
+            curses_defines.append(('_XOPEN_SOURCE_EXTENDED', '1'))
+
+        curses_enabled = True
+        if curses_library.startswith('ncurses'):
+            curses_libs = [curses_library]
+            self.add(Extension('_curses', ['_cursesmodule.c'],
+                               extra_compile_args=['-DPy_BUILD_CORE_MODULE'],
+                               include_dirs=curses_includes,
+                               define_macros=curses_defines,
+                               libraries=curses_libs))
+        elif curses_library == 'curses' and not MACOS:
+                # OSX has an old Berkeley curses, not good enough for
+                # the _curses module.
+            if (self.compiler.find_library_file(self.lib_dirs, 'terminfo')):
+                curses_libs = ['curses', 'terminfo']
+            elif (self.compiler.find_library_file(self.lib_dirs, 'termcap')):
+                curses_libs = ['curses', 'termcap']
+            else:
+                curses_libs = ['curses']
+
+            self.add(Extension('_curses', ['_cursesmodule.c'],
+                               extra_compile_args=['-DPy_BUILD_CORE_MODULE'],
+                               define_macros=curses_defines,
+                               libraries=curses_libs))
+        else:
+            curses_enabled = False
+            self.missing.append('_curses')
+
+        # If the curses module is enabled, check for the panel module
+        # _curses_panel needs some form of ncurses
+        skip_curses_panel = True if AIX else False
+        if (curses_enabled and not skip_curses_panel and
+                self.compiler.find_library_file(self.lib_dirs, panel_library)):
+            self.add(Extension('_curses_panel', ['_curses_panel.c'],
+                           include_dirs=curses_includes,
+                           define_macros=curses_defines,
+                           libraries=[panel_library, *curses_libs]))
+        elif not skip_curses_panel:
+            self.missing.append('_curses_panel')
+
+    def detect_crypt(self):
+        # crypt module.
+        if VXWORKS:
+            # bpo-31904: crypt() function is not provided by VxWorks.
+            # DES_crypt() OpenSSL provides is too weak to implement
+            # the encryption.
+            self.missing.append('_crypt')
+            return
+
+        if self.compiler.find_library_file(self.lib_dirs, 'crypt'):
+            libs = ['crypt']
+        else:
+            libs = []
+
+        self.add(Extension('_crypt', ['_cryptmodule.c'], libraries=libs))
+
+    def detect_socket(self):
+        # socket(2)
+        kwargs = {'depends': ['socketmodule.h']}
+        if MACOS:
+            # Issue #35569: Expose RFC 3542 socket options.
+            kwargs['extra_compile_args'] = ['-D__APPLE_USE_RFC_3542']
+
+        self.add(Extension('_socket', ['socketmodule.c'], **kwargs))
+
+    def detect_dbm_gdbm(self):
+        # Modules that provide persistent dictionary-like semantics.  You will
+        # probably want to arrange for at least one of them to be available on
+        # your machine, though none are defined by default because of library
+        # dependencies.  The Python module dbm/__init__.py provides an
+        # implementation independent wrapper for these; dbm/dumb.py provides
+        # similar functionality (but slower of course) implemented in Python.
+
+        # Sleepycat^WOracle Berkeley DB interface.
+        #  https://www.oracle.com/database/technologies/related/berkeleydb.html
+        #
+        # This requires the Sleepycat^WOracle DB code. The supported versions
+        # are set below.  Visit the URL above to download
+        # a release.  Most open source OSes come with one or more
+        # versions of BerkeleyDB already installed.
+
+        max_db_ver = (5, 3)
+        min_db_ver = (3, 3)
+        db_setup_debug = False   # verbose debug prints from this script?
+
+        def allow_db_ver(db_ver):
+            """Returns a boolean if the given BerkeleyDB version is acceptable.
+
+            Args:
+              db_ver: A tuple of the version to verify.
+            """
+            if not (min_db_ver <= db_ver <= max_db_ver):
+                return False
+            return True
+
+        def gen_db_minor_ver_nums(major):
+            if major == 4:
+                for x in range(max_db_ver[1]+1):
+                    if allow_db_ver((4, x)):
+                        yield x
+            elif major == 3:
+                for x in (3,):
+                    if allow_db_ver((3, x)):
+                        yield x
+            else:
+                raise ValueError("unknown major BerkeleyDB version", major)
+
+        # construct a list of paths to look for the header file in on
+        # top of the normal inc_dirs.
+        db_inc_paths = [
+            '/usr/include/db4',
+            '/usr/local/include/db4',
+            '/opt/sfw/include/db4',
+            '/usr/include/db3',
+            '/usr/local/include/db3',
+            '/opt/sfw/include/db3',
+            # Fink defaults (https://www.finkproject.org/)
+            '/sw/include/db4',
+            '/sw/include/db3',
+        ]
+        # 4.x minor number specific paths
+        for x in gen_db_minor_ver_nums(4):
+            db_inc_paths.append('/usr/include/db4%d' % x)
+            db_inc_paths.append('/usr/include/db4.%d' % x)
+            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
+            db_inc_paths.append('/usr/local/include/db4%d' % x)
+            db_inc_paths.append('/pkg/db-4.%d/include' % x)
+            db_inc_paths.append('/opt/db-4.%d/include' % x)
+            # MacPorts default (https://www.macports.org/)
+            db_inc_paths.append('/opt/local/include/db4%d' % x)
+        # 3.x minor number specific paths
+        for x in gen_db_minor_ver_nums(3):
+            db_inc_paths.append('/usr/include/db3%d' % x)
+            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
+            db_inc_paths.append('/usr/local/include/db3%d' % x)
+            db_inc_paths.append('/pkg/db-3.%d/include' % x)
+            db_inc_paths.append('/opt/db-3.%d/include' % x)
+
+        if CROSS_COMPILING:
+            db_inc_paths = []
+
+        # Add some common subdirectories for Sleepycat DB to the list,
+        # based on the standard include directories. This way DB3/4 gets
+        # picked up when it is installed in a non-standard prefix and
+        # the user has added that prefix into inc_dirs.
+        std_variants = []
+        for dn in self.inc_dirs:
+            std_variants.append(os.path.join(dn, 'db3'))
+            std_variants.append(os.path.join(dn, 'db4'))
+            for x in gen_db_minor_ver_nums(4):
+                std_variants.append(os.path.join(dn, "db4%d"%x))
+                std_variants.append(os.path.join(dn, "db4.%d"%x))
+            for x in gen_db_minor_ver_nums(3):
+                std_variants.append(os.path.join(dn, "db3%d"%x))
+                std_variants.append(os.path.join(dn, "db3.%d"%x))
+
+        db_inc_paths = std_variants + db_inc_paths
+        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
+
+        db_ver_inc_map = {}
+
+        if MACOS:
+            sysroot = macosx_sdk_root()
+
+        class db_found(Exception): pass
+        try:
+            # See whether there is a Sleepycat header in the standard
+            # search path.
+            for d in self.inc_dirs + db_inc_paths:
+                f = os.path.join(d, "db.h")
+                if MACOS and is_macosx_sdk_path(d):
+                    f = os.path.join(sysroot, d[1:], "db.h")
+
+                if db_setup_debug: print("db: looking for db.h in", f)
+                if os.path.exists(f):
+                    with open(f, 'rb') as file:
+                        f = file.read()
+                    m = re.search(br"#define\WDB_VERSION_MAJOR\W(\d+)", f)
+                    if m:
+                        db_major = int(m.group(1))
+                        m = re.search(br"#define\WDB_VERSION_MINOR\W(\d+)", f)
+                        db_minor = int(m.group(1))
+                        db_ver = (db_major, db_minor)
+
+                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
+                        if db_ver == (4, 6):
+                            m = re.search(br"#define\WDB_VERSION_PATCH\W(\d+)", f)
+                            db_patch = int(m.group(1))
+                            if db_patch < 21:
+                                print("db.h:", db_ver, "patch", db_patch,
+                                      "being ignored (4.6.x must be >= 4.6.21)")
+                                continue
+
+                        if ( (db_ver not in db_ver_inc_map) and
+                            allow_db_ver(db_ver) ):
+                            # save the include directory with the db.h version
+                            # (first occurrence only)
+                            db_ver_inc_map[db_ver] = d
+                            if db_setup_debug:
+                                print("db.h: found", db_ver, "in", d)
+                        else:
+                            # we already found a header for this library version
+                            if db_setup_debug: print("db.h: ignoring", d)
+                    else:
+                        # ignore this header, it didn't contain a version number
+                        if db_setup_debug:
+                            print("db.h: no version number version in", d)
+
+            db_found_vers = list(db_ver_inc_map.keys())
+            db_found_vers.sort()
+
+            while db_found_vers:
+                db_ver = db_found_vers.pop()
+                db_incdir = db_ver_inc_map[db_ver]
+
+                # check lib directories parallel to the location of the header
+                db_dirs_to_check = [
+                    db_incdir.replace("include", 'lib64'),
+                    db_incdir.replace("include", 'lib'),
+                ]
+
+                if not MACOS:
+                    db_dirs_to_check = list(filter(os.path.isdir, db_dirs_to_check))
+
+                else:
+                    # Same as other branch, but takes OSX SDK into account
+                    tmp = []
+                    for dn in db_dirs_to_check:
+                        if is_macosx_sdk_path(dn):
+                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
+                                tmp.append(dn)
+                        else:
+                            if os.path.isdir(dn):
+                                tmp.append(dn)
+                    db_dirs_to_check = tmp
+
+                    db_dirs_to_check = tmp
+
+                # Look for a version specific db-X.Y before an ambiguous dbX
+                # XXX should we -ever- look for a dbX name?  Do any
+                # systems really not name their library by version and
+                # symlink to more general names?
+                for dblib in (('db-%d.%d' % db_ver),
+                              ('db%d%d' % db_ver),
+                              ('db%d' % db_ver[0])):
+                    dblib_file = self.compiler.find_library_file(
+                                    db_dirs_to_check + self.lib_dirs, dblib )
+                    if dblib_file:
+                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
+                        raise db_found
+                    else:
+                        if db_setup_debug: print("db lib: ", dblib, "not found")
+
+        except db_found:
+            if db_setup_debug:
+                print("bsddb using BerkeleyDB lib:", db_ver, dblib)
+                print("bsddb lib dir:", dblib_dir, " inc dir:", db_incdir)
+            dblibs = [dblib]
+            # Only add the found library and include directories if they aren't
+            # already being searched. This avoids an explicit runtime library
+            # dependency.
+            if db_incdir in self.inc_dirs:
+                db_incs = None
+            else:
+                db_incs = [db_incdir]
+            if dblib_dir[0] in self.lib_dirs:
+                dblib_dir = None
+        else:
+            if db_setup_debug: print("db: no appropriate library found")
+            db_incs = None
+            dblibs = []
+            dblib_dir = None
+
+        dbm_setup_debug = False   # verbose debug prints from this script?
+        dbm_order = ['gdbm']
+        # The standard Unix dbm module:
+        if not CYGWIN:
+            config_args = [arg.strip("'")
+                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
+            dbm_args = [arg for arg in config_args
+                        if arg.startswith('--with-dbmliborder=')]
+            if dbm_args:
+                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
+            else:
+                dbm_order = "ndbm:gdbm:bdb".split(":")
+            dbmext = None
+            for cand in dbm_order:
+                if cand == "ndbm":
+                    if find_file("ndbm.h", self.inc_dirs, []) is not None:
+                        # Some systems have -lndbm, others have -lgdbm_compat,
+                        # others don't have either
+                        if self.compiler.find_library_file(self.lib_dirs,
+                                                               'ndbm'):
+                            ndbm_libs = ['ndbm']
+                        elif self.compiler.find_library_file(self.lib_dirs,
+                                                             'gdbm_compat'):
+                            ndbm_libs = ['gdbm_compat']
+                        else:
+                            ndbm_libs = []
+                        if dbm_setup_debug: print("building dbm using ndbm")
+                        dbmext = Extension('_dbm', ['_dbmmodule.c'],
+                                           define_macros=[
+                                               ('HAVE_NDBM_H',None),
+                                               ],
+                                           libraries=ndbm_libs)
+                        break
+
+                elif cand == "gdbm":
+                    if self.compiler.find_library_file(self.lib_dirs, 'gdbm'):
+                        gdbm_libs = ['gdbm']
+                        if self.compiler.find_library_file(self.lib_dirs,
+                                                               'gdbm_compat'):
+                            gdbm_libs.append('gdbm_compat')
+                        if find_file("gdbm/ndbm.h", self.inc_dirs, []) is not None:
+                            if dbm_setup_debug: print("building dbm using gdbm")
+                            dbmext = Extension(
+                                '_dbm', ['_dbmmodule.c'],
+                                define_macros=[
+                                    ('HAVE_GDBM_NDBM_H', None),
+                                    ],
+                                libraries = gdbm_libs)
+                            break
+                        if find_file("gdbm-ndbm.h", self.inc_dirs, []) is not None:
+                            if dbm_setup_debug: print("building dbm using gdbm")
+                            dbmext = Extension(
+                                '_dbm', ['_dbmmodule.c'],
+                                define_macros=[
+                                    ('HAVE_GDBM_DASH_NDBM_H', None),
+                                    ],
+                                libraries = gdbm_libs)
+                            break
+                elif cand == "bdb":
+                    if dblibs:
+                        if dbm_setup_debug: print("building dbm using bdb")
+                        dbmext = Extension('_dbm', ['_dbmmodule.c'],
+                                           library_dirs=dblib_dir,
+                                           runtime_library_dirs=dblib_dir,
+                                           include_dirs=db_incs,
+                                           define_macros=[
+                                               ('HAVE_BERKDB_H', None),
+                                               ('DB_DBM_HSEARCH', None),
+                                               ],
+                                           libraries=dblibs)
+                        break
+            if dbmext is not None:
+                self.add(dbmext)
+            else:
+                self.missing.append('_dbm')
+
+        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
+        if ('gdbm' in dbm_order and
+            self.compiler.find_library_file(self.lib_dirs, 'gdbm')):
+            self.add(Extension('_gdbm', ['_gdbmmodule.c'],
+                               libraries=['gdbm']))
+        else:
+            self.missing.append('_gdbm')
+
+    def detect_sqlite(self):
+        # The sqlite interface
+        sqlite_setup_debug = False   # verbose debug prints from this script?
+
+        # We hunt for #define SQLITE_VERSION "n.n.n"
+        sqlite_incdir = sqlite_libdir = None
+        sqlite_inc_paths = [ '/usr/include',
+                             '/usr/include/sqlite',
+                             '/usr/include/sqlite3',
+                             '/usr/local/include',
+                             '/usr/local/include/sqlite',
+                             '/usr/local/include/sqlite3',
+                             ]
+        if CROSS_COMPILING:
+            sqlite_inc_paths = []
+        MIN_SQLITE_VERSION_NUMBER = (3, 7, 15)  # Issue 40810
+        MIN_SQLITE_VERSION = ".".join([str(x)
+                                    for x in MIN_SQLITE_VERSION_NUMBER])
+
+        # Scan the default include directories before the SQLite specific
+        # ones. This allows one to override the copy of sqlite on OSX,
+        # where /usr/include contains an old version of sqlite.
+        if MACOS:
+            sysroot = macosx_sdk_root()
+
+        for d_ in self.inc_dirs + sqlite_inc_paths:
+            d = d_
+            if MACOS and is_macosx_sdk_path(d):
+                d = os.path.join(sysroot, d[1:])
+
+            f = os.path.join(d, "sqlite3.h")
+            if os.path.exists(f):
+                if sqlite_setup_debug: print("sqlite: found %s"%f)
+                with open(f) as file:
+                    incf = file.read()
+                m = re.search(
+                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf)
+                if m:
+                    sqlite_version = m.group(1)
+                    sqlite_version_tuple = tuple([int(x)
+                                        for x in sqlite_version.split(".")])
+                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
+                        # we win!
+                        if sqlite_setup_debug:
+                            print("%s/sqlite3.h: version %s"%(d, sqlite_version))
+                        sqlite_incdir = d
+                        break
+                    else:
+                        if sqlite_setup_debug:
+                            print("%s: version %s is too old, need >= %s"%(d,
+                                        sqlite_version, MIN_SQLITE_VERSION))
+                elif sqlite_setup_debug:
+                    print("sqlite: %s had no SQLITE_VERSION"%(f,))
+
+        if sqlite_incdir:
+            sqlite_dirs_to_check = [
+                os.path.join(sqlite_incdir, '..', 'lib64'),
+                os.path.join(sqlite_incdir, '..', 'lib'),
+                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
+                os.path.join(sqlite_incdir, '..', '..', 'lib'),
+            ]
+            sqlite_libfile = self.compiler.find_library_file(
+                                sqlite_dirs_to_check + self.lib_dirs, 'sqlite3')
+            if sqlite_libfile:
+                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
+
+        if sqlite_incdir and sqlite_libdir:
+            sqlite_srcs = ['_sqlite/cache.c',
+                '_sqlite/connection.c',
+                '_sqlite/cursor.c',
+                '_sqlite/microprotocols.c',
+                '_sqlite/module.c',
+                '_sqlite/prepare_protocol.c',
+                '_sqlite/row.c',
+                '_sqlite/statement.c',
+                '_sqlite/util.c', ]
+            sqlite_defines = []
+
+            # Enable support for loadable extensions in the sqlite3 module
+            # if --enable-loadable-sqlite-extensions configure option is used.
+            if '--enable-loadable-sqlite-extensions' not in sysconfig.get_config_var("CONFIG_ARGS"):
+                sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
+            elif MACOS and sqlite_incdir == os.path.join(MACOS_SDK_ROOT, "usr/include"):
+                raise DistutilsError("System version of SQLite does not support loadable extensions")
+
+            if MACOS:
+                # In every directory on the search path search for a dynamic
+                # library and then a static library, instead of first looking
+                # for dynamic libraries on the entire path.
+                # This way a statically linked custom sqlite gets picked up
+                # before the dynamic library in /usr/lib.
+                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
+            else:
+                sqlite_extra_link_args = ()
+
+            include_dirs = ["Modules/_sqlite"]
+            # Only include the directory where sqlite was found if it does
+            # not already exist in set include directories, otherwise you
+            # can end up with a bad search path order.
+            if sqlite_incdir not in self.compiler.include_dirs:
+                include_dirs.append(sqlite_incdir)
+            # avoid a runtime library path for a system library dir
+            if sqlite_libdir and sqlite_libdir[0] in self.lib_dirs:
+                sqlite_libdir = None
+            self.add(Extension('_sqlite3', sqlite_srcs,
+                               define_macros=sqlite_defines,
+                               include_dirs=include_dirs,
+                               library_dirs=sqlite_libdir,
+                               extra_link_args=sqlite_extra_link_args,
+                               libraries=["sqlite3",]))
+        else:
+            self.missing.append('_sqlite3')
+
+    def detect_platform_specific_exts(self):
+        # Unix-only modules
+        if not MS_WINDOWS:
+            if not VXWORKS:
+                # Steen Lumholt's termios module
+                self.add(Extension('termios', ['termios.c']))
+                # Jeremy Hylton's rlimit interface
+            self.add(Extension('resource', ['resource.c']))
+        else:
+            self.missing.extend(['resource', 'termios'])
+
+        # Platform-specific libraries
+        if HOST_PLATFORM.startswith(('linux', 'freebsd', 'gnukfreebsd')):
+            self.add(Extension('ossaudiodev', ['ossaudiodev.c']))
+        elif HOST_PLATFORM.startswith(('netbsd')):
+            self.add(Extension('ossaudiodev', ['ossaudiodev.c'],
+                               libraries=["ossaudio"]))
+        elif not AIX:
+            self.missing.append('ossaudiodev')
+
+        if MACOS:
+            self.add(Extension('_scproxy', ['_scproxy.c'],
+                               extra_link_args=[
+                                   '-framework', 'SystemConfiguration',
+                                   '-framework', 'CoreFoundation']))
+
+    def detect_compress_exts(self):
+        # Andrew Kuchling's zlib module.  Note that some versions of zlib
+        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
+        # http://www.cert.org/advisories/CA-2002-07.html
+        #
+        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
+        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
+        # now, we still accept 1.1.3, because we think it's difficult to
+        # exploit this in Python, and we'd rather make it RedHat's problem
+        # than our problem <wink>.
+        #
+        # You can upgrade zlib to version 1.1.4 yourself by going to
+        # http://www.gzip.org/zlib/
+        zlib_inc = find_file('zlib.h', [], self.inc_dirs)
+        have_zlib = False
+        if zlib_inc is not None:
+            zlib_h = zlib_inc[0] + '/zlib.h'
+            version = '"0.0.0"'
+            version_req = '"1.1.3"'
+            if MACOS and is_macosx_sdk_path(zlib_h):
+                zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:])
+            with open(zlib_h) as fp:
+                while 1:
+                    line = fp.readline()
+                    if not line:
+                        break
+                    if line.startswith('#define ZLIB_VERSION'):
+                        version = line.split()[2]
+                        break
+            if version >= version_req:
+                if (self.compiler.find_library_file(self.lib_dirs, 'z')):
+                    if MACOS:
+                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
+                    else:
+                        zlib_extra_link_args = ()
+                    self.add(Extension('zlib', ['zlibmodule.c'],
+                                       libraries=['z'],
+                                       extra_link_args=zlib_extra_link_args))
+                    have_zlib = True
+                else:
+                    self.missing.append('zlib')
+            else:
+                self.missing.append('zlib')
+        else:
+            self.missing.append('zlib')
+
+        # Helper module for various ascii-encoders.  Uses zlib for an optimized
+        # crc32 if we have it.  Otherwise binascii uses its own.
+        if have_zlib:
+            extra_compile_args = ['-DUSE_ZLIB_CRC32']
+            libraries = ['z']
+            extra_link_args = zlib_extra_link_args
+        else:
+            extra_compile_args = []
+            libraries = []
+            extra_link_args = []
+        self.add(Extension('binascii', ['binascii.c'],
+                           extra_compile_args=extra_compile_args,
+                           libraries=libraries,
+                           extra_link_args=extra_link_args))
+
+        # Gustavo Niemeyer's bz2 module.
+        if (self.compiler.find_library_file(self.lib_dirs, 'bz2')):
+            if MACOS:
+                bz2_extra_link_args = ('-Wl,-search_paths_first',)
+            else:
+                bz2_extra_link_args = ()
+            self.add(Extension('_bz2', ['_bz2module.c'],
+                               libraries=['bz2'],
+                               extra_link_args=bz2_extra_link_args))
+        else:
+            self.missing.append('_bz2')
+
+        # LZMA compression support.
+        if self.compiler.find_library_file(self.lib_dirs, 'lzma'):
+            self.add(Extension('_lzma', ['_lzmamodule.c'],
+                               libraries=['lzma']))
+        else:
+            self.missing.append('_lzma')
+
+    def detect_expat_elementtree(self):
+        # Interface to the Expat XML parser
+        #
+        # Expat was written by James Clark and is now maintained by a group of
+        # developers on SourceForge; see www.libexpat.org for more information.
+        # The pyexpat module was written by Paul Prescod after a prototype by
+        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
+        # of a system shared libexpat.so is possible with --with-system-expat
+        # configure option.
+        #
+        # More information on Expat can be found at www.libexpat.org.
+        #
+        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
+            expat_inc = []
+            define_macros = []
+            extra_compile_args = []
+            expat_lib = ['expat']
+            expat_sources = []
+            expat_depends = []
+        else:
+            expat_inc = [os.path.join(self.srcdir, 'Modules', 'expat')]
+            define_macros = [
+                ('HAVE_EXPAT_CONFIG_H', '1'),
+                # bpo-30947: Python uses best available entropy sources to
+                # call XML_SetHashSalt(), expat entropy sources are not needed
+                ('XML_POOR_ENTROPY', '1'),
+            ]
+            extra_compile_args = []
+            # bpo-44394: libexpat uses isnan() of math.h and needs linkage
+            # against the libm
+            expat_lib = ['m']
+            expat_sources = ['expat/xmlparse.c',
+                             'expat/xmlrole.c',
+                             'expat/xmltok.c']
+            expat_depends = ['expat/ascii.h',
+                             'expat/asciitab.h',
+                             'expat/expat.h',
+                             'expat/expat_config.h',
+                             'expat/expat_external.h',
+                             'expat/internal.h',
+                             'expat/latin1tab.h',
+                             'expat/utf8tab.h',
+                             'expat/xmlrole.h',
+                             'expat/xmltok.h',
+                             'expat/xmltok_impl.h'
+                             ]
+
+            cc = sysconfig.get_config_var('CC').split()[0]
+            ret = run_command(
+                      '"%s" -Werror -Wno-unreachable-code -E -xc /dev/null >/dev/null 2>&1' % cc)
+            if ret == 0:
+                extra_compile_args.append('-Wno-unreachable-code')
+
+        self.add(Extension('pyexpat',
+                           define_macros=define_macros,
+                           extra_compile_args=extra_compile_args,
+                           include_dirs=expat_inc,
+                           libraries=expat_lib,
+                           sources=['pyexpat.c'] + expat_sources,
+                           depends=expat_depends))
+
+        # Fredrik Lundh's cElementTree module.  Note that this also
+        # uses expat (via the CAPI hook in pyexpat).
+
+        if os.path.isfile(os.path.join(self.srcdir, 'Modules', '_elementtree.c')):
+            define_macros.append(('USE_PYEXPAT_CAPI', None))
+            self.add(Extension('_elementtree',
+                               define_macros=define_macros,
+                               include_dirs=expat_inc,
+                               libraries=expat_lib,
+                               sources=['_elementtree.c'],
+                               depends=['pyexpat.c', *expat_sources,
+                                        *expat_depends]))
+        else:
+            self.missing.append('_elementtree')
+
+    def detect_multibytecodecs(self):
+        # Hye-Shik Chang's CJKCodecs modules.
+        self.add(Extension('_multibytecodec',
+                           ['cjkcodecs/multibytecodec.c']))
+        for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
+            self.add(Extension('_codecs_%s' % loc,
+                               ['cjkcodecs/_codecs_%s.c' % loc]))
+
+    def detect_multiprocessing(self):
+        # Richard Oudkerk's multiprocessing module
+        if MS_WINDOWS:
+            multiprocessing_srcs = ['_multiprocessing/multiprocessing.c',
+                                    '_multiprocessing/semaphore.c']
+        else:
+            multiprocessing_srcs = ['_multiprocessing/multiprocessing.c']
+            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
+                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
+                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
+        self.add(Extension('_multiprocessing', multiprocessing_srcs,
+                           include_dirs=["Modules/_multiprocessing"]))
+
+        if (not MS_WINDOWS and
+           sysconfig.get_config_var('HAVE_SHM_OPEN') and
+           sysconfig.get_config_var('HAVE_SHM_UNLINK')):
+            posixshmem_srcs = ['_multiprocessing/posixshmem.c']
+            libs = []
+            if sysconfig.get_config_var('SHM_NEEDS_LIBRT'):
+                # need to link with librt to get shm_open()
+                libs.append('rt')
+            self.add(Extension('_posixshmem', posixshmem_srcs,
+                               define_macros={},
+                               libraries=libs,
+                               include_dirs=["Modules/_multiprocessing"]))
+        else:
+            self.missing.append('_posixshmem')
+
+    def detect_uuid(self):
+        # Build the _uuid module if possible
+        uuid_h = sysconfig.get_config_var("HAVE_UUID_H")
+        uuid_uuid_h = sysconfig.get_config_var("HAVE_UUID_UUID_H")
+        if uuid_h or uuid_uuid_h:
+            if sysconfig.get_config_var("HAVE_LIBUUID"):
+                uuid_libs = ["uuid"]
+            else:
+                uuid_libs = []
+            self.add(Extension('_uuid', ['_uuidmodule.c'],
+                               libraries=uuid_libs))
+        else:
+            self.missing.append('_uuid')
+
+    def detect_modules(self):
+        self.detect_simple_extensions()
+        if TEST_EXTENSIONS:
+            self.detect_test_extensions()
+        self.detect_readline_curses()
+        self.detect_crypt()
+        self.detect_socket()
+        self.detect_openssl_hashlib()
+        self.detect_hash_builtins()
+        self.detect_dbm_gdbm()
+        self.detect_sqlite()
+        self.detect_platform_specific_exts()
+        self.detect_nis()
+        self.detect_compress_exts()
+        self.detect_expat_elementtree()
+        self.detect_multibytecodecs()
+        self.detect_decimal()
+        self.detect_ctypes()
+        self.detect_multiprocessing()
+        if not self.detect_tkinter():
+            self.missing.append('_tkinter')
+        self.detect_uuid()
+
+##         # Uncomment these lines if you want to play with xxmodule.c
+##         self.add(Extension('xx', ['xxmodule.c']))
+
+        # The limited C API is not compatible with the Py_TRACE_REFS macro.
+        if not sysconfig.get_config_var('Py_TRACE_REFS'):
+            self.add(Extension('xxlimited', ['xxlimited.c']))
+            self.add(Extension('xxlimited_35', ['xxlimited_35.c']))
+
+    def detect_tkinter_fromenv(self):
+        # Build _tkinter using the Tcl/Tk locations specified by
+        # the _TCLTK_INCLUDES and _TCLTK_LIBS environment variables.
+        # This method is meant to be invoked by detect_tkinter().
+        #
+        # The variables can be set via one of the following ways.
+        #
+        # - Automatically, at configuration time, by using pkg-config.
+        #   The tool is called by the configure script.
+        #   Additional pkg-config configuration paths can be set via the
+        #   PKG_CONFIG_PATH environment variable.
+        #
+        #     PKG_CONFIG_PATH=".../lib/pkgconfig" ./configure ...
+        #
+        # - Explicitly, at configuration time by setting both
+        #   --with-tcltk-includes and --with-tcltk-libs.
+        #
+        #     ./configure ... \
+        #     --with-tcltk-includes="-I/path/to/tclincludes \
+        #                            -I/path/to/tkincludes"
+        #     --with-tcltk-libs="-L/path/to/tcllibs -ltclm.n \
+        #                        -L/path/to/tklibs -ltkm.n"
+        #
+        #  - Explicitly, at compile time, by passing TCLTK_INCLUDES and
+        #    TCLTK_LIBS to the make target.
+        #    This will override any configuration-time option.
+        #
+        #      make TCLTK_INCLUDES="..." TCLTK_LIBS="..."
+        #
+        # This can be useful for building and testing tkinter with multiple
+        # versions of Tcl/Tk.  Note that a build of Tk depends on a particular
+        # build of Tcl so you need to specify both arguments and use care when
+        # overriding.
+
+        # The _TCLTK variables are created in the Makefile sharedmods target.
+        tcltk_includes = os.environ.get('_TCLTK_INCLUDES')
+        tcltk_libs = os.environ.get('_TCLTK_LIBS')
+        if not (tcltk_includes and tcltk_libs):
+            # Resume default configuration search.
+            return False
+
+        extra_compile_args = tcltk_includes.split()
+        extra_link_args = tcltk_libs.split()
+        self.add(Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
+                           define_macros=[('WITH_APPINIT', 1)],
+                           extra_compile_args = extra_compile_args,
+                           extra_link_args = extra_link_args))
+        return True
+
+    def detect_tkinter_darwin(self):
+        # Build default _tkinter on macOS using Tcl and Tk frameworks.
+        # This method is meant to be invoked by detect_tkinter().
+        #
+        # The macOS native Tk (AKA Aqua Tk) and Tcl are most commonly
+        # built and installed as macOS framework bundles.  However,
+        # for several reasons, we cannot take full advantage of the
+        # Apple-supplied compiler chain's -framework options here.
+        # Instead, we need to find and pass to the compiler the
+        # absolute paths of the Tcl and Tk headers files we want to use
+        # and the absolute path to the directory containing the Tcl
+        # and Tk frameworks for linking.
+        #
+        # We want to handle here two common use cases on macOS:
+        # 1. Build and link with system-wide third-party or user-built
+        #    Tcl and Tk frameworks installed in /Library/Frameworks.
+        # 2. Build and link using a user-specified macOS SDK so that the
+        #    built Python can be exported to other systems.  In this case,
+        #    search only the SDK's /Library/Frameworks (normally empty)
+        #    and /System/Library/Frameworks.
+        #
+        # Any other use cases are handled either by detect_tkinter_fromenv(),
+        # or detect_tkinter(). The former handles non-standard locations of
+        # Tcl/Tk, defined via the _TCLTK_INCLUDES and _TCLTK_LIBS environment
+        # variables. The latter handles any Tcl/Tk versions installed in
+        # standard Unix directories.
+        #
+        # It would be desirable to also handle here the case where
+        # you want to build and link with a framework build of Tcl and Tk
+        # that is not in /Library/Frameworks, say, in your private
+        # $HOME/Library/Frameworks directory or elsewhere. It turns
+        # out to be difficult to make that work automatically here
+        # without bringing into play more tools and magic. That case
+        # can be handled using a recipe with the right arguments
+        # to detect_tkinter_fromenv().
+        #
+        # Note also that the fallback case here is to try to use the
+        # Apple-supplied Tcl and Tk frameworks in /System/Library but
+        # be forewarned that they are deprecated by Apple and typically
+        # out-of-date and buggy; their use should be avoided if at
+        # all possible by installing a newer version of Tcl and Tk in
+        # /Library/Frameworks before building Python without
+        # an explicit SDK or by configuring build arguments explicitly.
+
+        from os.path import join, exists
+
+        sysroot = macosx_sdk_root() # path to the SDK or '/'
+
+        if macosx_sdk_specified():
+            # Use case #2: an SDK other than '/' was specified.
+            # Only search there.
+            framework_dirs = [
+                join(sysroot, 'Library', 'Frameworks'),
+                join(sysroot, 'System', 'Library', 'Frameworks'),
+            ]
+        else:
+            # Use case #1: no explicit SDK selected.
+            # Search the local system-wide /Library/Frameworks,
+            # not the one in the default SDK, otherwise fall back to
+            # /System/Library/Frameworks whose header files may be in
+            # the default SDK or, on older systems, actually installed.
+            framework_dirs = [
+                join('/', 'Library', 'Frameworks'),
+                join(sysroot, 'System', 'Library', 'Frameworks'),
+            ]
+
+        # Find the directory that contains the Tcl.framework and
+        # Tk.framework bundles.
+        for F in framework_dirs:
+            # both Tcl.framework and Tk.framework should be present
+            for fw in 'Tcl', 'Tk':
+                if not exists(join(F, fw + '.framework')):
+                    break
+            else:
+                # ok, F is now directory with both frameworks. Continue
+                # building
+                break
+        else:
+            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
+            # will now resume.
+            return False
+
+        include_dirs = [
+            join(F, fw + '.framework', H)
+            for fw in ('Tcl', 'Tk')
+            for H in ('Headers',)
+        ]
+
+        # Add the base framework directory as well
+        compile_args = ['-F', F]
+
+        # Do not build tkinter for archs that this Tk was not built with.
+        cflags = sysconfig.get_config_vars('CFLAGS')[0]
+        archs = re.findall(r'-arch\s+(\w+)', cflags)
+
+        tmpfile = os.path.join(self.build_temp, 'tk.arch')
+        if not os.path.exists(self.build_temp):
+            os.makedirs(self.build_temp)
+
+        run_command(
+            "file {}/Tk.framework/Tk | grep 'for architecture' > {}".format(F, tmpfile)
+        )
+        with open(tmpfile) as fp:
+            detected_archs = []
+            for ln in fp:
+                a = ln.split()[-1]
+                if a in archs:
+                    detected_archs.append(ln.split()[-1])
+        os.unlink(tmpfile)
+
+        arch_args = []
+        for a in detected_archs:
+            arch_args.append('-arch')
+            arch_args.append(a)
+
+        compile_args += arch_args
+        link_args = [','.join(['-Wl', '-F', F, '-framework', 'Tcl', '-framework', 'Tk']), *arch_args]
+
+        # The X11/xlib.h file bundled in the Tk sources can cause function
+        # prototype warnings from the compiler. Since we cannot easily fix
+        # that, suppress the warnings here instead.
+        if '-Wstrict-prototypes' in cflags.split():
+            compile_args.append('-Wno-strict-prototypes')
+
+        self.add(Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
+                           define_macros=[('WITH_APPINIT', 1)],
+                           include_dirs=include_dirs,
+                           libraries=[],
+                           extra_compile_args=compile_args,
+                           extra_link_args=link_args))
+        return True
+
+    def detect_tkinter(self):
+        # The _tkinter module.
+        #
+        # Detection of Tcl/Tk is attempted in the following order:
+        #   - Through environment variables.
+        #   - Platform specific detection of Tcl/Tk (currently only macOS).
+        #   - Search of various standard Unix header/library paths.
+        #
+        # Detection stops at the first successful method.
+
+        # Check for Tcl and Tk at the locations indicated by _TCLTK_INCLUDES
+        # and _TCLTK_LIBS environment variables.
+        if self.detect_tkinter_fromenv():
+            return True
+
+        # Rather than complicate the code below, detecting and building
+        # AquaTk is a separate method. Only one Tkinter will be built on
+        # Darwin - either AquaTk, if it is found, or X11 based Tk.
+        if (MACOS and self.detect_tkinter_darwin()):
+            return True
+
+        # Assume we haven't found any of the libraries or include files
+        # The versions with dots are used on Unix, and the versions without
+        # dots on Windows, for detection by cygwin.
+        tcllib = tklib = tcl_includes = tk_includes = None
+        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
+                        '8.2', '82', '8.1', '81', '8.0', '80']:
+            tklib = self.compiler.find_library_file(self.lib_dirs,
+                                                        'tk' + version)
+            tcllib = self.compiler.find_library_file(self.lib_dirs,
+                                                         'tcl' + version)
+            if tklib and tcllib:
+                # Exit the loop when we've found the Tcl/Tk libraries
+                break
+
+        # Now check for the header files
+        if tklib and tcllib:
+            # Check for the include files on Debian and {Free,Open}BSD, where
+            # they're put in /usr/include/{tcl,tk}X.Y
+            dotversion = version
+            if '.' not in dotversion and "bsd" in HOST_PLATFORM.lower():
+                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
+                # but the include subdirs are named like .../include/tcl8.3.
+                dotversion = dotversion[:-1] + '.' + dotversion[-1]
+            tcl_include_sub = []
+            tk_include_sub = []
+            for dir in self.inc_dirs:
+                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
+                tk_include_sub += [dir + os.sep + "tk" + dotversion]
+            tk_include_sub += tcl_include_sub
+            tcl_includes = find_file('tcl.h', self.inc_dirs, tcl_include_sub)
+            tk_includes = find_file('tk.h', self.inc_dirs, tk_include_sub)
+
+        if (tcllib is None or tklib is None or
+            tcl_includes is None or tk_includes is None):
+            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
+            return False
+
+        # OK... everything seems to be present for Tcl/Tk.
+
+        include_dirs = []
+        libs = []
+        defs = []
+        added_lib_dirs = []
+        for dir in tcl_includes + tk_includes:
+            if dir not in include_dirs:
+                include_dirs.append(dir)
+
+        # Check for various platform-specific directories
+        if HOST_PLATFORM == 'sunos5':
+            include_dirs.append('/usr/openwin/include')
+            added_lib_dirs.append('/usr/openwin/lib')
+        elif os.path.exists('/usr/X11R6/include'):
+            include_dirs.append('/usr/X11R6/include')
+            added_lib_dirs.append('/usr/X11R6/lib64')
+            added_lib_dirs.append('/usr/X11R6/lib')
+        elif os.path.exists('/usr/X11R5/include'):
+            include_dirs.append('/usr/X11R5/include')
+            added_lib_dirs.append('/usr/X11R5/lib')
+        else:
+            # Assume default location for X11
+            include_dirs.append('/usr/X11/include')
+            added_lib_dirs.append('/usr/X11/lib')
+
+        # If Cygwin, then verify that X is installed before proceeding
+        if CYGWIN:
+            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
+            if x11_inc is None:
+                return False
+
+        # Check for BLT extension
+        if self.compiler.find_library_file(self.lib_dirs + added_lib_dirs,
+                                               'BLT8.0'):
+            defs.append( ('WITH_BLT', 1) )
+            libs.append('BLT8.0')
+        elif self.compiler.find_library_file(self.lib_dirs + added_lib_dirs,
+                                                'BLT'):
+            defs.append( ('WITH_BLT', 1) )
+            libs.append('BLT')
+
+        # Add the Tcl/Tk libraries
+        libs.append('tk'+ version)
+        libs.append('tcl'+ version)
+
+        # Finally, link with the X11 libraries (not appropriate on cygwin)
+        if not CYGWIN:
+            libs.append('X11')
+
+        # XXX handle these, but how to detect?
+        # *** Uncomment and edit for PIL (TkImaging) extension only:
+        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
+        # *** Uncomment and edit for TOGL extension only:
+        #       -DWITH_TOGL togl.c \
+        # *** Uncomment these for TOGL extension only:
+        #       -lGL -lGLU -lXext -lXmu \
+
+        self.add(Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
+                           define_macros=[('WITH_APPINIT', 1)] + defs,
+                           include_dirs=include_dirs,
+                           libraries=libs,
+                           library_dirs=added_lib_dirs))
+        return True
+
+    def configure_ctypes(self, ext):
+        return True
+
+    def detect_ctypes(self):
+        # Thomas Heller's _ctypes module
+
+        if (not sysconfig.get_config_var("LIBFFI_INCLUDEDIR") and MACOS):
+            self.use_system_libffi = True
+        else:
+            self.use_system_libffi = '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS")
+
+        include_dirs = []
+        extra_compile_args = ['-DPy_BUILD_CORE_MODULE']
+        extra_link_args = []
+        sources = ['_ctypes/_ctypes.c',
+                   '_ctypes/callbacks.c',
+                   '_ctypes/callproc.c',
+                   '_ctypes/stgdict.c',
+                   '_ctypes/cfield.c']
+        depends = ['_ctypes/ctypes.h']
+
+        if MACOS:
+            sources.append('_ctypes/malloc_closure.c')
+            extra_compile_args.append('-DUSING_MALLOC_CLOSURE_DOT_C=1')
+            extra_compile_args.append('-DMACOSX')
+            include_dirs.append('_ctypes/darwin')
+
+        elif HOST_PLATFORM == 'sunos5':
+            # XXX This shouldn't be necessary; it appears that some
+            # of the assembler code is non-PIC (i.e. it has relocations
+            # when it shouldn't. The proper fix would be to rewrite
+            # the assembler code to be PIC.
+            # This only works with GCC; the Sun compiler likely refuses
+            # this option. If you want to compile ctypes with the Sun
+            # compiler, please research a proper solution, instead of
+            # finding some -z option for the Sun compiler.
+            extra_link_args.append('-mimpure-text')
+
+        elif HOST_PLATFORM.startswith('hp-ux'):
+            extra_link_args.append('-fPIC')
+
+        ext = Extension('_ctypes',
+                        include_dirs=include_dirs,
+                        extra_compile_args=extra_compile_args,
+                        extra_link_args=extra_link_args,
+                        libraries=[],
+                        sources=sources,
+                        depends=depends)
+        self.add(ext)
+        if TEST_EXTENSIONS:
+            # function my_sqrt() needs libm for sqrt()
+            self.add(Extension('_ctypes_test',
+                               sources=['_ctypes/_ctypes_test.c'],
+                               libraries=['m']))
+
+        ffi_inc = sysconfig.get_config_var("LIBFFI_INCLUDEDIR")
+        ffi_lib = None
+
+        ffi_inc_dirs = self.inc_dirs.copy()
+        if MACOS:
+            ffi_in_sdk = os.path.join(macosx_sdk_root(), "usr/include/ffi")
+
+            if not ffi_inc:
+                if os.path.exists(ffi_in_sdk):
+                    ext.extra_compile_args.append("-DUSING_APPLE_OS_LIBFFI=1")
+                    ffi_inc = ffi_in_sdk
+                    ffi_lib = 'ffi'
+                else:
+                    # OS X 10.5 comes with libffi.dylib; the include files are
+                    # in /usr/include/ffi
+                    ffi_inc_dirs.append('/usr/include/ffi')
+
+        if not ffi_inc:
+            found = find_file('ffi.h', [], ffi_inc_dirs)
+            if found:
+                ffi_inc = found[0]
+        if ffi_inc:
+            ffi_h = ffi_inc + '/ffi.h'
+            if not os.path.exists(ffi_h):
+                ffi_inc = None
+                print('Header file {} does not exist'.format(ffi_h))
+        if ffi_lib is None and ffi_inc:
+            for lib_name in ('ffi', 'ffi_pic'):
+                if (self.compiler.find_library_file(self.lib_dirs, lib_name)):
+                    ffi_lib = lib_name
+                    break
+
+        if ffi_inc and ffi_lib:
+            ffi_headers = glob(os.path.join(ffi_inc, '*.h'))
+            if grep_headers_for('ffi_prep_cif_var', ffi_headers):
+                ext.extra_compile_args.append("-DHAVE_FFI_PREP_CIF_VAR=1")
+            if grep_headers_for('ffi_prep_closure_loc', ffi_headers):
+                ext.extra_compile_args.append("-DHAVE_FFI_PREP_CLOSURE_LOC=1")
+            if grep_headers_for('ffi_closure_alloc', ffi_headers):
+                ext.extra_compile_args.append("-DHAVE_FFI_CLOSURE_ALLOC=1")
+
+            ext.include_dirs.append(ffi_inc)
+            ext.libraries.append(ffi_lib)
+            self.use_system_libffi = True
+
+        if sysconfig.get_config_var('HAVE_LIBDL'):
+            # for dlopen, see bpo-32647
+            ext.libraries.append('dl')
+
+    def detect_decimal(self):
+        # Stefan Krah's _decimal module
+        extra_compile_args = []
+        undef_macros = []
+        if '--with-system-libmpdec' in sysconfig.get_config_var("CONFIG_ARGS"):
+            include_dirs = []
+            libraries = ['mpdec']
+            sources = ['_decimal/_decimal.c']
+            depends = ['_decimal/docstrings.h']
+        else:
+            include_dirs = [os.path.abspath(os.path.join(self.srcdir,
+                                                         'Modules',
+                                                         '_decimal',
+                                                         'libmpdec'))]
+            libraries = ['m']
+            sources = [
+              '_decimal/_decimal.c',
+              '_decimal/libmpdec/basearith.c',
+              '_decimal/libmpdec/constants.c',
+              '_decimal/libmpdec/context.c',
+              '_decimal/libmpdec/convolute.c',
+              '_decimal/libmpdec/crt.c',
+              '_decimal/libmpdec/difradix2.c',
+              '_decimal/libmpdec/fnt.c',
+              '_decimal/libmpdec/fourstep.c',
+              '_decimal/libmpdec/io.c',
+              '_decimal/libmpdec/mpalloc.c',
+              '_decimal/libmpdec/mpdecimal.c',
+              '_decimal/libmpdec/numbertheory.c',
+              '_decimal/libmpdec/sixstep.c',
+              '_decimal/libmpdec/transpose.c',
+              ]
+            depends = [
+              '_decimal/docstrings.h',
+              '_decimal/libmpdec/basearith.h',
+              '_decimal/libmpdec/bits.h',
+              '_decimal/libmpdec/constants.h',
+              '_decimal/libmpdec/convolute.h',
+              '_decimal/libmpdec/crt.h',
+              '_decimal/libmpdec/difradix2.h',
+              '_decimal/libmpdec/fnt.h',
+              '_decimal/libmpdec/fourstep.h',
+              '_decimal/libmpdec/io.h',
+              '_decimal/libmpdec/mpalloc.h',
+              '_decimal/libmpdec/mpdecimal.h',
+              '_decimal/libmpdec/numbertheory.h',
+              '_decimal/libmpdec/sixstep.h',
+              '_decimal/libmpdec/transpose.h',
+              '_decimal/libmpdec/typearith.h',
+              '_decimal/libmpdec/umodarith.h',
+              ]
+
+        config = {
+          'x64':     [('CONFIG_64','1'), ('ASM','1')],
+          'uint128': [('CONFIG_64','1'), ('ANSI','1'), ('HAVE_UINT128_T','1')],
+          'ansi64':  [('CONFIG_64','1'), ('ANSI','1')],
+          'ppro':    [('CONFIG_32','1'), ('PPRO','1'), ('ASM','1')],
+          'ansi32':  [('CONFIG_32','1'), ('ANSI','1')],
+          'ansi-legacy': [('CONFIG_32','1'), ('ANSI','1'),
+                          ('LEGACY_COMPILER','1')],
+          'universal':   [('UNIVERSAL','1')]
+        }
+
+        cc = sysconfig.get_config_var('CC')
+        sizeof_size_t = sysconfig.get_config_var('SIZEOF_SIZE_T')
+        machine = os.environ.get('PYTHON_DECIMAL_WITH_MACHINE')
+
+        if machine:
+            # Override automatic configuration to facilitate testing.
+            define_macros = config[machine]
+        elif MACOS:
+            # Universal here means: build with the same options Python
+            # was built with.
+            define_macros = config['universal']
+        elif sizeof_size_t == 8:
+            if sysconfig.get_config_var('HAVE_GCC_ASM_FOR_X64'):
+                define_macros = config['x64']
+            elif sysconfig.get_config_var('HAVE_GCC_UINT128_T'):
+                define_macros = config['uint128']
+            else:
+                define_macros = config['ansi64']
+        elif sizeof_size_t == 4:
+            ppro = sysconfig.get_config_var('HAVE_GCC_ASM_FOR_X87')
+            if ppro and ('gcc' in cc or 'clang' in cc) and \
+               not 'sunos' in HOST_PLATFORM:
+                # solaris: problems with register allocation.
+                # icc >= 11.0 works as well.
+                define_macros = config['ppro']
+                extra_compile_args.append('-Wno-unknown-pragmas')
+            else:
+                define_macros = config['ansi32']
+        else:
+            raise DistutilsError("_decimal: unsupported architecture")
+
+        # Workarounds for toolchain bugs:
+        if sysconfig.get_config_var('HAVE_IPA_PURE_CONST_BUG'):
+            # Some versions of gcc miscompile inline asm:
+            # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=46491
+            # https://gcc.gnu.org/ml/gcc/2010-11/msg00366.html
+            extra_compile_args.append('-fno-ipa-pure-const')
+        if sysconfig.get_config_var('HAVE_GLIBC_MEMMOVE_BUG'):
+            # _FORTIFY_SOURCE wrappers for memmove and bcopy are incorrect:
+            # https://sourceware.org/ml/libc-alpha/2010-12/msg00009.html
+            undef_macros.append('_FORTIFY_SOURCE')
+
+        # Uncomment for extra functionality:
+        #define_macros.append(('EXTRA_FUNCTIONALITY', 1))
+        self.add(Extension('_decimal',
+                           include_dirs=include_dirs,
+                           libraries=libraries,
+                           define_macros=define_macros,
+                           undef_macros=undef_macros,
+                           extra_compile_args=extra_compile_args,
+                           sources=sources,
+                           depends=depends))
+
+    def detect_openssl_hashlib(self):
+        # Detect SSL support for the socket module (via _ssl)
+        config_vars = sysconfig.get_config_vars()
+
+        def split_var(name, sep):
+            # poor man's shlex, the re module is not available yet.
+            value = config_vars.get(name)
+            if not value:
+                return ()
+            # This trick works because ax_check_openssl uses --libs-only-L,
+            # --libs-only-l, and --cflags-only-I.
+            value = ' ' + value
+            sep = ' ' + sep
+            return [v.strip() for v in value.split(sep) if v.strip()]
+
+        openssl_includes = split_var('OPENSSL_INCLUDES', '-I')
+        openssl_libdirs = split_var('OPENSSL_LDFLAGS', '-L')
+        openssl_libs = split_var('OPENSSL_LIBS', '-l')
+        openssl_rpath = config_vars.get('OPENSSL_RPATH')
+        if not openssl_libs:
+            # libssl and libcrypto not found
+            self.missing.extend(['_ssl', '_hashlib'])
+            return None, None
+
+        # Find OpenSSL includes
+        ssl_incs = find_file(
+            'openssl/ssl.h', self.inc_dirs, openssl_includes
+        )
+        if ssl_incs is None:
+            self.missing.extend(['_ssl', '_hashlib'])
+            return None, None
+
+        if openssl_rpath == 'auto':
+            runtime_library_dirs = openssl_libdirs[:]
+        elif not openssl_rpath:
+            runtime_library_dirs = []
+        else:
+            runtime_library_dirs = [openssl_rpath]
+
+        openssl_extension_kwargs = dict(
+            include_dirs=openssl_includes,
+            library_dirs=openssl_libdirs,
+            libraries=openssl_libs,
+            runtime_library_dirs=runtime_library_dirs,
+        )
+
+        # This static linking is NOT OFFICIALLY SUPPORTED.
+        # Requires static OpenSSL build with position-independent code. Some
+        # features like DSO engines or external OSSL providers don't work.
+        # Only tested on GCC and clang on X86_64.
+        if os.environ.get("PY_UNSUPPORTED_OPENSSL_BUILD") == "static":
+            extra_linker_args = []
+            for lib in openssl_extension_kwargs["libraries"]:
+                # link statically
+                extra_linker_args.append(f"-l:lib{lib}.a")
+                # don't export symbols
+                extra_linker_args.append(f"-Wl,--exclude-libs,lib{lib}.a")
+            openssl_extension_kwargs["extra_link_args"] = extra_linker_args
+            # don't link OpenSSL shared libraries.
+            # include libz for OpenSSL build flavors with compression support
+            openssl_extension_kwargs["libraries"] = ["z"]
+
+        self.add(
+            Extension(
+                '_ssl',
+                ['_ssl.c'],
+                depends=[
+                    'socketmodule.h',
+                    '_ssl.h',
+                    '_ssl/debughelpers.c',
+                    '_ssl/misc.c',
+                    '_ssl/cert.c',
+                ],
+                **openssl_extension_kwargs
+            )
+        )
+        self.add(
+            Extension(
+                '_hashlib',
+                ['_hashopenssl.c'],
+                depends=['hashlib.h'],
+                **openssl_extension_kwargs,
+            )
+        )
+
+    def detect_hash_builtins(self):
+        # By default we always compile these even when OpenSSL is available
+        # (issue #14693). It's harmless and the object code is tiny
+        # (40-50 KiB per module, only loaded when actually used).  Modules can
+        # be disabled via the --with-builtin-hashlib-hashes configure flag.
+        supported = {"md5", "sha1", "sha256", "sha512", "sha3", "blake2"}
+
+        configured = sysconfig.get_config_var("PY_BUILTIN_HASHLIB_HASHES")
+        configured = configured.strip('"').lower()
+        configured = {
+            m.strip() for m in configured.split(",")
+        }
+
+        self.disabled_configure.extend(
+            sorted(supported.difference(configured))
+        )
+
+        if "sha256" in configured:
+            self.add(Extension(
+                '_sha256', ['sha256module.c'],
+                extra_compile_args=['-DPy_BUILD_CORE_MODULE'],
+                depends=['hashlib.h']
+            ))
+
+        if "sha512" in configured:
+            self.add(Extension(
+                '_sha512', ['sha512module.c'],
+                extra_compile_args=['-DPy_BUILD_CORE_MODULE'],
+                depends=['hashlib.h']
+            ))
+
+        if "md5" in configured:
+            self.add(Extension(
+                '_md5', ['md5module.c'],
+                depends=['hashlib.h']
+            ))
+
+        if "sha1" in configured:
+            self.add(Extension(
+                '_sha1', ['sha1module.c'],
+                depends=['hashlib.h']
+            ))
+
+        if "blake2" in configured:
+            blake2_deps = glob(
+                os.path.join(escape(self.srcdir), 'Modules/_blake2/impl/*')
+            )
+            blake2_deps.append('hashlib.h')
+            self.add(Extension(
+                '_blake2',
+                [
+                    '_blake2/blake2module.c',
+                    '_blake2/blake2b_impl.c',
+                    '_blake2/blake2s_impl.c'
+                ],
+                depends=blake2_deps
+            ))
+
+        if "sha3" in configured:
+            sha3_deps = glob(
+                os.path.join(escape(self.srcdir), 'Modules/_sha3/kcp/*')
+            )
+            sha3_deps.append('hashlib.h')
+            self.add(Extension(
+                '_sha3',
+                ['_sha3/sha3module.c'],
+                depends=sha3_deps
+            ))
+
+    def detect_nis(self):
+        if MS_WINDOWS or CYGWIN or HOST_PLATFORM == 'qnx6':
+            self.missing.append('nis')
+            return
+
+        libs = []
+        library_dirs = []
+        includes_dirs = []
+
+        # bpo-32521: glibc has deprecated Sun RPC for some time. Fedora 28
+        # moved headers and libraries to libtirpc and libnsl. The headers
+        # are in tircp and nsl sub directories.
+        rpcsvc_inc = find_file(
+            'rpcsvc/yp_prot.h', self.inc_dirs,
+            [os.path.join(inc_dir, 'nsl') for inc_dir in self.inc_dirs]
+        )
+        rpc_inc = find_file(
+            'rpc/rpc.h', self.inc_dirs,
+            [os.path.join(inc_dir, 'tirpc') for inc_dir in self.inc_dirs]
+        )
+        if rpcsvc_inc is None or rpc_inc is None:
+            # not found
+            self.missing.append('nis')
+            return
+        includes_dirs.extend(rpcsvc_inc)
+        includes_dirs.extend(rpc_inc)
+
+        if self.compiler.find_library_file(self.lib_dirs, 'nsl'):
+            libs.append('nsl')
+        else:
+            # libnsl-devel: check for libnsl in nsl/ subdirectory
+            nsl_dirs = [os.path.join(lib_dir, 'nsl') for lib_dir in self.lib_dirs]
+            libnsl = self.compiler.find_library_file(nsl_dirs, 'nsl')
+            if libnsl is not None:
+                library_dirs.append(os.path.dirname(libnsl))
+                libs.append('nsl')
+
+        if self.compiler.find_library_file(self.lib_dirs, 'tirpc'):
+            libs.append('tirpc')
+
+        self.add(Extension('nis', ['nismodule.c'],
+                           libraries=libs,
+                           library_dirs=library_dirs,
+                           include_dirs=includes_dirs))
+
+
+class PyBuildInstall(install):
+    # Suppress the warning about installation into the lib_dynload
+    # directory, which is not in sys.path when running Python during
+    # installation:
+    def initialize_options (self):
+        install.initialize_options(self)
+        self.warn_dir=0
+
+    # Customize subcommands to not install an egg-info file for Python
+    sub_commands = [('install_lib', install.has_lib),
+                    ('install_headers', install.has_headers),
+                    ('install_scripts', install.has_scripts),
+                    ('install_data', install.has_data)]
+
+
+class PyBuildInstallLib(install_lib):
+    # Do exactly what install_lib does but make sure correct access modes get
+    # set on installed directories and files. All installed files with get
+    # mode 644 unless they are a shared library in which case they will get
+    # mode 755. All installed directories will get mode 755.
+
+    # this is works for EXT_SUFFIX too, which ends with SHLIB_SUFFIX
+    shlib_suffix = sysconfig.get_config_var("SHLIB_SUFFIX")
+
+    def install(self):
+        outfiles = install_lib.install(self)
+        self.set_file_modes(outfiles, 0o644, 0o755)
+        self.set_dir_modes(self.install_dir, 0o755)
+        return outfiles
+
+    def set_file_modes(self, files, defaultMode, sharedLibMode):
+        if not files: return
+
+        for filename in files:
+            if os.path.islink(filename): continue
+            mode = defaultMode
+            if filename.endswith(self.shlib_suffix): mode = sharedLibMode
+            log.info("changing mode of %s to %o", filename, mode)
+            if not self.dry_run: os.chmod(filename, mode)
+
+    def set_dir_modes(self, dirname, mode):
+        for dirpath, dirnames, fnames in os.walk(dirname):
+            if os.path.islink(dirpath):
+                continue
+            log.info("changing mode of %s to %o", dirpath, mode)
+            if not self.dry_run: os.chmod(dirpath, mode)
+
+
+class PyBuildScripts(build_scripts):
+    def copy_scripts(self):
+        outfiles, updated_files = build_scripts.copy_scripts(self)
+        fullversion = '-{0[0]}.{0[1]}'.format(sys.version_info)
+        minoronly = '.{0[1]}'.format(sys.version_info)
+        newoutfiles = []
+        newupdated_files = []
+        for filename in outfiles:
+            if filename.endswith('2to3'):
+                newfilename = filename + fullversion
+            else:
+                newfilename = filename + minoronly
+            log.info(f'renaming {filename} to {newfilename}')
+            os.rename(filename, newfilename)
+            newoutfiles.append(newfilename)
+            if filename in updated_files:
+                newupdated_files.append(newfilename)
+        return newoutfiles, newupdated_files
+
+
+def main():
+    global LIST_MODULE_NAMES
+
+    if "--list-module-names" in sys.argv:
+        LIST_MODULE_NAMES = True
+        sys.argv.remove("--list-module-names")
+
+    set_compiler_flags('CFLAGS', 'PY_CFLAGS_NODIST')
+    set_compiler_flags('LDFLAGS', 'PY_LDFLAGS_NODIST')
+
+    class DummyProcess:
+        """Hack for parallel build"""
+        ProcessPoolExecutor = None
+
+    sys.modules['concurrent.futures.process'] = DummyProcess
+    validate_tzpath()
+
+    # turn off warnings when deprecated modules are imported
+    import warnings
+    warnings.filterwarnings("ignore",category=DeprecationWarning)
+    setup(# PyPI Metadata (PEP 301)
+          name = "Python",
+          version = sys.version.split()[0],
+          url = "https://www.python.org/%d.%d" % sys.version_info[:2],
+          maintainer = "Guido van Rossum and the Python community",
+          maintainer_email = "python-dev@python.org",
+          description = "A high-level object-oriented programming language",
+          long_description = SUMMARY.strip(),
+          license = "PSF license",
+          classifiers = [x for x in CLASSIFIERS.split("\n") if x],
+          platforms = ["Many"],
+
+          # Build info
+          cmdclass = {'build_ext': PyBuildExt,
+                      'build_scripts': PyBuildScripts,
+                      'install': PyBuildInstall,
+                      'install_lib': PyBuildInstallLib},
+          # The struct module is defined here, because build_ext won't be
+          # called unless there's at least one extension module defined.
+          ext_modules=[Extension('_struct', ['_struct.c'],
+                                 extra_compile_args=['-DPy_BUILD_CORE_MODULE'])],
+
+          # If you change the scripts installed here, you also need to
+          # check the PyBuildScripts command above, and change the links
+          # created by the bininstall target in Makefile.pre.in
+          scripts = ["Tools/scripts/pydoc3", "Tools/scripts/idle3",
+                     "Tools/scripts/2to3"]
+        )
+
+# --install-platlib
+if __name__ == '__main__':
+    main()
Index: .
===================================================================
--- .	(nonexistent)
+++ .	(revision 5)

Property changes on: .
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,73 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~