diff DEPENDENCIES/mingw32/Python27/Lib/site-packages/numpy/distutils/misc_util.py @ 87:2a2c65a20a8b

Add Python libs and headers
author Chris Cannam
date Wed, 25 Feb 2015 14:05:22 +0000
parents
children
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DEPENDENCIES/mingw32/Python27/Lib/site-packages/numpy/distutils/misc_util.py	Wed Feb 25 14:05:22 2015 +0000
@@ -0,0 +1,2271 @@
+from __future__ import division, absolute_import, print_function
+
+import os
+import re
+import sys
+import imp
+import copy
+import glob
+import atexit
+import tempfile
+import subprocess
+import shutil
+
+import distutils
+from distutils.errors import DistutilsError
+
+try:
+    set
+except NameError:
+    from sets import Set as set
+
+from numpy.distutils.compat import get_exception
+
+__all__ = ['Configuration', 'get_numpy_include_dirs', 'default_config_dict',
+           'dict_append', 'appendpath', 'generate_config_py',
+           'get_cmd', 'allpath', 'get_mathlibs',
+           'terminal_has_colors', 'red_text', 'green_text', 'yellow_text',
+           'blue_text', 'cyan_text', 'cyg2win32', 'mingw32', 'all_strings',
+           'has_f_sources', 'has_cxx_sources', 'filter_sources',
+           'get_dependencies', 'is_local_src_dir', 'get_ext_source_files',
+           'get_script_files', 'get_lib_source_files', 'get_data_files',
+           'dot_join', 'get_frame', 'minrelpath', 'njoin',
+           'is_sequence', 'is_string', 'as_list', 'gpaths', 'get_language',
+           'quote_args', 'get_build_architecture', 'get_info', 'get_pkg_info']
+
+class InstallableLib(object):
+    """
+    Container to hold information on an installable library.
+
+    Parameters
+    ----------
+    name : str
+        Name of the installed library.
+    build_info : dict
+        Dictionary holding build information.
+    target_dir : str
+        Absolute path specifying where to install the library.
+
+    See Also
+    --------
+    Configuration.add_installed_library
+
+    Notes
+    -----
+    The three parameters are stored as attributes with the same names.
+
+    """
+    def __init__(self, name, build_info, target_dir):
+        self.name = name
+        self.build_info = build_info
+        self.target_dir = target_dir
+
+def quote_args(args):
+    # don't used _nt_quote_args as it does not check if
+    # args items already have quotes or not.
+    args = list(args)
+    for i in range(len(args)):
+        a = args[i]
+        if ' ' in a and a[0] not in '"\'':
+            args[i] = '"%s"' % (a)
+    return args
+
+def allpath(name):
+    "Convert a /-separated pathname to one using the OS's path separator."
+    splitted = name.split('/')
+    return os.path.join(*splitted)
+
+def rel_path(path, parent_path):
+    """Return path relative to parent_path.
+    """
+    pd = os.path.abspath(parent_path)
+    apath = os.path.abspath(path)
+    if len(apath)<len(pd):
+        return path
+    if apath==pd:
+        return ''
+    if pd == apath[:len(pd)]:
+        assert apath[len(pd)] in [os.sep], repr((path, apath[len(pd)]))
+        path = apath[len(pd)+1:]
+    return path
+
+def get_path_from_frame(frame, parent_path=None):
+    """Return path of the module given a frame object from the call stack.
+
+    Returned path is relative to parent_path when given,
+    otherwise it is absolute path.
+    """
+
+    # First, try to find if the file name is in the frame.
+    try:
+        caller_file = eval('__file__', frame.f_globals, frame.f_locals)
+        d = os.path.dirname(os.path.abspath(caller_file))
+    except NameError:
+        # __file__ is not defined, so let's try __name__. We try this second
+        # because setuptools spoofs __name__ to be '__main__' even though
+        # sys.modules['__main__'] might be something else, like easy_install(1).
+        caller_name = eval('__name__', frame.f_globals, frame.f_locals)
+        __import__(caller_name)
+        mod = sys.modules[caller_name]
+        if hasattr(mod, '__file__'):
+            d = os.path.dirname(os.path.abspath(mod.__file__))
+        else:
+            # we're probably running setup.py as execfile("setup.py")
+            # (likely we're building an egg)
+            d = os.path.abspath('.')
+            # hmm, should we use sys.argv[0] like in __builtin__ case?
+
+    if parent_path is not None:
+        d = rel_path(d, parent_path)
+
+    return d or '.'
+
+def njoin(*path):
+    """Join two or more pathname components +
+    - convert a /-separated pathname to one using the OS's path separator.
+    - resolve `..` and `.` from path.
+
+    Either passing n arguments as in njoin('a','b'), or a sequence
+    of n names as in njoin(['a','b']) is handled, or a mixture of such arguments.
+    """
+    paths = []
+    for p in path:
+        if is_sequence(p):
+            # njoin(['a', 'b'], 'c')
+            paths.append(njoin(*p))
+        else:
+            assert is_string(p)
+            paths.append(p)
+    path = paths
+    if not path:
+        # njoin()
+        joined = ''
+    else:
+        # njoin('a', 'b')
+        joined = os.path.join(*path)
+    if os.path.sep != '/':
+        joined = joined.replace('/', os.path.sep)
+    return minrelpath(joined)
+
+def get_mathlibs(path=None):
+    """Return the MATHLIB line from numpyconfig.h
+    """
+    if path is not None:
+        config_file = os.path.join(path, '_numpyconfig.h')
+    else:
+        # Look for the file in each of the numpy include directories.
+        dirs = get_numpy_include_dirs()
+        for path in dirs:
+            fn = os.path.join(path, '_numpyconfig.h')
+            if os.path.exists(fn):
+                config_file = fn
+                break
+        else:
+            raise DistutilsError('_numpyconfig.h not found in numpy include '
+                'dirs %r' % (dirs,))
+
+    fid = open(config_file)
+    mathlibs = []
+    s = '#define MATHLIB'
+    for line in fid:
+        if line.startswith(s):
+            value = line[len(s):].strip()
+            if value:
+                mathlibs.extend(value.split(','))
+    fid.close()
+    return mathlibs
+
+def minrelpath(path):
+    """Resolve `..` and '.' from path.
+    """
+    if not is_string(path):
+        return path
+    if '.' not in path:
+        return path
+    l = path.split(os.sep)
+    while l:
+        try:
+            i = l.index('.', 1)
+        except ValueError:
+            break
+        del l[i]
+    j = 1
+    while l:
+        try:
+            i = l.index('..', j)
+        except ValueError:
+            break
+        if l[i-1]=='..':
+            j += 1
+        else:
+            del l[i], l[i-1]
+            j = 1
+    if not l:
+        return ''
+    return os.sep.join(l)
+
+def _fix_paths(paths, local_path, include_non_existing):
+    assert is_sequence(paths), repr(type(paths))
+    new_paths = []
+    assert not is_string(paths), repr(paths)
+    for n in paths:
+        if is_string(n):
+            if '*' in n or '?' in n:
+                p = glob.glob(n)
+                p2 = glob.glob(njoin(local_path, n))
+                if p2:
+                    new_paths.extend(p2)
+                elif p:
+                    new_paths.extend(p)
+                else:
+                    if include_non_existing:
+                        new_paths.append(n)
+                    print('could not resolve pattern in %r: %r' %
+                            (local_path, n))
+            else:
+                n2 = njoin(local_path, n)
+                if os.path.exists(n2):
+                    new_paths.append(n2)
+                else:
+                    if os.path.exists(n):
+                        new_paths.append(n)
+                    elif include_non_existing:
+                        new_paths.append(n)
+                    if not os.path.exists(n):
+                        print('non-existing path in %r: %r' %
+                                (local_path, n))
+
+        elif is_sequence(n):
+            new_paths.extend(_fix_paths(n, local_path, include_non_existing))
+        else:
+            new_paths.append(n)
+    return [minrelpath(p) for p in new_paths]
+
+def gpaths(paths, local_path='', include_non_existing=True):
+    """Apply glob to paths and prepend local_path if needed.
+    """
+    if is_string(paths):
+        paths = (paths,)
+    return _fix_paths(paths, local_path, include_non_existing)
+
+
+_temporary_directory = None
+def clean_up_temporary_directory():
+    global _temporary_directory
+    if not _temporary_directory:
+        return
+    try:
+        shutil.rmtree(_temporary_directory)
+    except OSError:
+        pass
+    _temporary_directory = None
+
+def make_temp_file(suffix='', prefix='', text=True):
+    global _temporary_directory
+    if not _temporary_directory:
+        _temporary_directory = tempfile.mkdtemp()
+        atexit.register(clean_up_temporary_directory)
+    fid, name = tempfile.mkstemp(suffix=suffix,
+                                 prefix=prefix,
+                                 dir=_temporary_directory,
+                                 text=text)
+    fo = os.fdopen(fid, 'w')
+    return fo, name
+
+# Hooks for colored terminal output.
+# See also http://www.livinglogic.de/Python/ansistyle
+def terminal_has_colors():
+    if sys.platform=='cygwin' and 'USE_COLOR' not in os.environ:
+        # Avoid importing curses that causes illegal operation
+        # with a message:
+        #  PYTHON2 caused an invalid page fault in
+        #  module CYGNURSES7.DLL as 015f:18bbfc28
+        # Details: Python 2.3.3 [GCC 3.3.1 (cygming special)]
+        #          ssh to Win32 machine from debian
+        #          curses.version is 2.2
+        #          CYGWIN_98-4.10, release 1.5.7(0.109/3/2))
+        return 0
+    if hasattr(sys.stdout, 'isatty') and sys.stdout.isatty():
+        try:
+            import curses
+            curses.setupterm()
+            if (curses.tigetnum("colors") >= 0
+                and curses.tigetnum("pairs") >= 0
+                and ((curses.tigetstr("setf") is not None
+                      and curses.tigetstr("setb") is not None)
+                     or (curses.tigetstr("setaf") is not None
+                         and curses.tigetstr("setab") is not None)
+                     or curses.tigetstr("scp") is not None)):
+                return 1
+        except Exception:
+            pass
+    return 0
+
+if terminal_has_colors():
+    _colour_codes = dict(black=0, red=1, green=2, yellow=3,
+                         blue=4, magenta=5, cyan=6, white=7, default=9)
+    def colour_text(s, fg=None, bg=None, bold=False):
+        seq = []
+        if bold:
+            seq.append('1')
+        if fg:
+            fgcode = 30 + _colour_codes.get(fg.lower(), 0)
+            seq.append(str(fgcode))
+        if bg:
+            bgcode = 40 + _colour_codes.get(fg.lower(), 7)
+            seq.append(str(bgcode))
+        if seq:
+            return '\x1b[%sm%s\x1b[0m' % (';'.join(seq), s)
+        else:
+            return s
+else:
+    def colour_text(s, fg=None, bg=None):
+        return s
+
+def default_text(s):
+    return colour_text(s, 'default')
+def red_text(s):
+    return colour_text(s, 'red')
+def green_text(s):
+    return colour_text(s, 'green')
+def yellow_text(s):
+    return colour_text(s, 'yellow')
+def cyan_text(s):
+    return colour_text(s, 'cyan')
+def blue_text(s):
+    return colour_text(s, 'blue')
+
+#########################
+
+def cyg2win32(path):
+    if sys.platform=='cygwin' and path.startswith('/cygdrive'):
+        path = path[10] + ':' + os.path.normcase(path[11:])
+    return path
+
+def mingw32():
+    """Return true when using mingw32 environment.
+    """
+    if sys.platform=='win32':
+        if os.environ.get('OSTYPE', '')=='msys':
+            return True
+        if os.environ.get('MSYSTEM', '')=='MINGW32':
+            return True
+    return False
+
+def msvc_runtime_library():
+    "Return name of MSVC runtime library if Python was built with MSVC >= 7"
+    msc_pos = sys.version.find('MSC v.')
+    if msc_pos != -1:
+        msc_ver = sys.version[msc_pos+6:msc_pos+10]
+        lib = {'1300': 'msvcr70',    # MSVC 7.0
+               '1310': 'msvcr71',    # MSVC 7.1
+               '1400': 'msvcr80',    # MSVC 8
+               '1500': 'msvcr90',    # MSVC 9 (VS 2008)
+               '1600': 'msvcr100',   # MSVC 10 (aka 2010)
+              }.get(msc_ver, None)
+    else:
+        lib = None
+    return lib
+
+
+#########################
+
+#XXX need support for .C that is also C++
+cxx_ext_match = re.compile(r'.*[.](cpp|cxx|cc)\Z', re.I).match
+fortran_ext_match = re.compile(r'.*[.](f90|f95|f77|for|ftn|f)\Z', re.I).match
+f90_ext_match = re.compile(r'.*[.](f90|f95)\Z', re.I).match
+f90_module_name_match = re.compile(r'\s*module\s*(?P<name>[\w_]+)', re.I).match
+def _get_f90_modules(source):
+    """Return a list of Fortran f90 module names that
+    given source file defines.
+    """
+    if not f90_ext_match(source):
+        return []
+    modules = []
+    f = open(source, 'r')
+    for line in f:
+        m = f90_module_name_match(line)
+        if m:
+            name = m.group('name')
+            modules.append(name)
+            # break  # XXX can we assume that there is one module per file?
+    f.close()
+    return modules
+
+def is_string(s):
+    return isinstance(s, str)
+
+def all_strings(lst):
+    """Return True if all items in lst are string objects. """
+    for item in lst:
+        if not is_string(item):
+            return False
+    return True
+
+def is_sequence(seq):
+    if is_string(seq):
+        return False
+    try:
+        len(seq)
+    except:
+        return False
+    return True
+
+def is_glob_pattern(s):
+    return is_string(s) and ('*' in s or '?' is s)
+
+def as_list(seq):
+    if is_sequence(seq):
+        return list(seq)
+    else:
+        return [seq]
+
+def get_language(sources):
+    # not used in numpy/scipy packages, use build_ext.detect_language instead
+    """Determine language value (c,f77,f90) from sources """
+    language = None
+    for source in sources:
+        if isinstance(source, str):
+            if f90_ext_match(source):
+                language = 'f90'
+                break
+            elif fortran_ext_match(source):
+                language = 'f77'
+    return language
+
+def has_f_sources(sources):
+    """Return True if sources contains Fortran files """
+    for source in sources:
+        if fortran_ext_match(source):
+            return True
+    return False
+
+def has_cxx_sources(sources):
+    """Return True if sources contains C++ files """
+    for source in sources:
+        if cxx_ext_match(source):
+            return True
+    return False
+
+def filter_sources(sources):
+    """Return four lists of filenames containing
+    C, C++, Fortran, and Fortran 90 module sources,
+    respectively.
+    """
+    c_sources = []
+    cxx_sources = []
+    f_sources = []
+    fmodule_sources = []
+    for source in sources:
+        if fortran_ext_match(source):
+            modules = _get_f90_modules(source)
+            if modules:
+                fmodule_sources.append(source)
+            else:
+                f_sources.append(source)
+        elif cxx_ext_match(source):
+            cxx_sources.append(source)
+        else:
+            c_sources.append(source)
+    return c_sources, cxx_sources, f_sources, fmodule_sources
+
+
+def _get_headers(directory_list):
+    # get *.h files from list of directories
+    headers = []
+    for d in directory_list:
+        head = glob.glob(os.path.join(d, "*.h")) #XXX: *.hpp files??
+        headers.extend(head)
+    return headers
+
+def _get_directories(list_of_sources):
+    # get unique directories from list of sources.
+    direcs = []
+    for f in list_of_sources:
+        d = os.path.split(f)
+        if d[0] != '' and not d[0] in direcs:
+            direcs.append(d[0])
+    return direcs
+
+def get_dependencies(sources):
+    #XXX scan sources for include statements
+    return _get_headers(_get_directories(sources))
+
+def is_local_src_dir(directory):
+    """Return true if directory is local directory.
+    """
+    if not is_string(directory):
+        return False
+    abs_dir = os.path.abspath(directory)
+    c = os.path.commonprefix([os.getcwd(), abs_dir])
+    new_dir = abs_dir[len(c):].split(os.sep)
+    if new_dir and not new_dir[0]:
+        new_dir = new_dir[1:]
+    if new_dir and new_dir[0]=='build':
+        return False
+    new_dir = os.sep.join(new_dir)
+    return os.path.isdir(new_dir)
+
+def general_source_files(top_path):
+    pruned_directories = {'CVS':1, '.svn':1, 'build':1}
+    prune_file_pat = re.compile(r'(?:[~#]|\.py[co]|\.o)$')
+    for dirpath, dirnames, filenames in os.walk(top_path, topdown=True):
+        pruned = [ d for d in dirnames if d not in pruned_directories ]
+        dirnames[:] = pruned
+        for f in filenames:
+            if not prune_file_pat.search(f):
+                yield os.path.join(dirpath, f)
+
+def general_source_directories_files(top_path):
+    """Return a directory name relative to top_path and
+    files contained.
+    """
+    pruned_directories = ['CVS', '.svn', 'build']
+    prune_file_pat = re.compile(r'(?:[~#]|\.py[co]|\.o)$')
+    for dirpath, dirnames, filenames in os.walk(top_path, topdown=True):
+        pruned = [ d for d in dirnames if d not in pruned_directories ]
+        dirnames[:] = pruned
+        for d in dirnames:
+            dpath = os.path.join(dirpath, d)
+            rpath = rel_path(dpath, top_path)
+            files = []
+            for f in os.listdir(dpath):
+                fn = os.path.join(dpath, f)
+                if os.path.isfile(fn) and not prune_file_pat.search(fn):
+                    files.append(fn)
+            yield rpath, files
+    dpath = top_path
+    rpath = rel_path(dpath, top_path)
+    filenames = [os.path.join(dpath, f) for f in os.listdir(dpath) \
+                 if not prune_file_pat.search(f)]
+    files = [f for f in filenames if os.path.isfile(f)]
+    yield rpath, files
+
+
+def get_ext_source_files(ext):
+    # Get sources and any include files in the same directory.
+    filenames = []
+    sources = [_m for _m in ext.sources if is_string(_m)]
+    filenames.extend(sources)
+    filenames.extend(get_dependencies(sources))
+    for d in ext.depends:
+        if is_local_src_dir(d):
+            filenames.extend(list(general_source_files(d)))
+        elif os.path.isfile(d):
+            filenames.append(d)
+    return filenames
+
+def get_script_files(scripts):
+    scripts = [_m for _m in scripts if is_string(_m)]
+    return scripts
+
+def get_lib_source_files(lib):
+    filenames = []
+    sources = lib[1].get('sources', [])
+    sources = [_m for _m in sources if is_string(_m)]
+    filenames.extend(sources)
+    filenames.extend(get_dependencies(sources))
+    depends = lib[1].get('depends', [])
+    for d in depends:
+        if is_local_src_dir(d):
+            filenames.extend(list(general_source_files(d)))
+        elif os.path.isfile(d):
+            filenames.append(d)
+    return filenames
+
+def get_shared_lib_extension(is_python_ext=False):
+    """Return the correct file extension for shared libraries.
+
+    Parameters
+    ----------
+    is_python_ext : bool, optional
+        Whether the shared library is a Python extension.  Default is False.
+
+    Returns
+    -------
+    so_ext : str
+        The shared library extension.
+
+    Notes
+    -----
+    For Python shared libs, `so_ext` will typically be '.so' on Linux and OS X,
+    and '.pyd' on Windows.  For Python >= 3.2 `so_ext` has a tag prepended on
+    POSIX systems according to PEP 3149.  For Python 3.2 this is implemented on
+    Linux, but not on OS X.
+
+    """
+    confvars = distutils.sysconfig.get_config_vars()
+    # SO is deprecated in 3.3.1, use EXT_SUFFIX instead
+    so_ext = confvars.get('EXT_SUFFIX', None)
+    if so_ext is None:
+        so_ext = confvars.get('SO', '')
+
+    if not is_python_ext:
+        # hardcode known values, config vars (including SHLIB_SUFFIX) are
+        # unreliable (see #3182)
+        # darwin, windows and debug linux are wrong in 3.3.1 and older
+        if (sys.platform.startswith('linux') or
+            sys.platform.startswith('gnukfreebsd')):
+            so_ext = '.so'
+        elif sys.platform.startswith('darwin'):
+            so_ext = '.dylib'
+        elif sys.platform.startswith('win'):
+            so_ext = '.dll'
+        else:
+            # fall back to config vars for unknown platforms
+            # fix long extension for Python >=3.2, see PEP 3149.
+            if 'SOABI' in confvars:
+                # Does nothing unless SOABI config var exists
+                so_ext = so_ext.replace('.' + confvars.get('SOABI'), '', 1)
+
+    return so_ext
+
+def get_data_files(data):
+    if is_string(data):
+        return [data]
+    sources = data[1]
+    filenames = []
+    for s in sources:
+        if hasattr(s, '__call__'):
+            continue
+        if is_local_src_dir(s):
+            filenames.extend(list(general_source_files(s)))
+        elif is_string(s):
+            if os.path.isfile(s):
+                filenames.append(s)
+            else:
+                print('Not existing data file:', s)
+        else:
+            raise TypeError(repr(s))
+    return filenames
+
+def dot_join(*args):
+    return '.'.join([a for a in args if a])
+
+def get_frame(level=0):
+    """Return frame object from call stack with given level.
+    """
+    try:
+        return sys._getframe(level+1)
+    except AttributeError:
+        frame = sys.exc_info()[2].tb_frame
+        for _ in range(level+1):
+            frame = frame.f_back
+        return frame
+
+
+######################
+
+class Configuration(object):
+
+    _list_keys = ['packages', 'ext_modules', 'data_files', 'include_dirs',
+                  'libraries', 'headers', 'scripts', 'py_modules',
+                  'installed_libraries', 'define_macros']
+    _dict_keys = ['package_dir', 'installed_pkg_config']
+    _extra_keys = ['name', 'version']
+
+    numpy_include_dirs = []
+
+    def __init__(self,
+                 package_name=None,
+                 parent_name=None,
+                 top_path=None,
+                 package_path=None,
+                 caller_level=1,
+                 setup_name='setup.py',
+                 **attrs):
+        """Construct configuration instance of a package.
+
+        package_name -- name of the package
+                        Ex.: 'distutils'
+        parent_name  -- name of the parent package
+                        Ex.: 'numpy'
+        top_path     -- directory of the toplevel package
+                        Ex.: the directory where the numpy package source sits
+        package_path -- directory of package. Will be computed by magic from the
+                        directory of the caller module if not specified
+                        Ex.: the directory where numpy.distutils is
+        caller_level -- frame level to caller namespace, internal parameter.
+        """
+        self.name = dot_join(parent_name, package_name)
+        self.version = None
+
+        caller_frame = get_frame(caller_level)
+        self.local_path = get_path_from_frame(caller_frame, top_path)
+        # local_path -- directory of a file (usually setup.py) that
+        #               defines a configuration() function.
+        # local_path -- directory of a file (usually setup.py) that
+        #               defines a configuration() function.
+        if top_path is None:
+            top_path = self.local_path
+            self.local_path = ''
+        if package_path is None:
+            package_path = self.local_path
+        elif os.path.isdir(njoin(self.local_path, package_path)):
+            package_path = njoin(self.local_path, package_path)
+        if not os.path.isdir(package_path or '.'):
+            raise ValueError("%r is not a directory" % (package_path,))
+        self.top_path = top_path
+        self.package_path = package_path
+        # this is the relative path in the installed package
+        self.path_in_package = os.path.join(*self.name.split('.'))
+
+        self.list_keys = self._list_keys[:]
+        self.dict_keys = self._dict_keys[:]
+
+        for n in self.list_keys:
+            v = copy.copy(attrs.get(n, []))
+            setattr(self, n, as_list(v))
+
+        for n in self.dict_keys:
+            v = copy.copy(attrs.get(n, {}))
+            setattr(self, n, v)
+
+        known_keys = self.list_keys + self.dict_keys
+        self.extra_keys = self._extra_keys[:]
+        for n in attrs.keys():
+            if n in known_keys:
+                continue
+            a = attrs[n]
+            setattr(self, n, a)
+            if isinstance(a, list):
+                self.list_keys.append(n)
+            elif isinstance(a, dict):
+                self.dict_keys.append(n)
+            else:
+                self.extra_keys.append(n)
+
+        if os.path.exists(njoin(package_path, '__init__.py')):
+            self.packages.append(self.name)
+            self.package_dir[self.name] = package_path
+
+        self.options = dict(
+            ignore_setup_xxx_py = False,
+            assume_default_configuration = False,
+            delegate_options_to_subpackages = False,
+            quiet = False,
+            )
+
+        caller_instance = None
+        for i in range(1, 3):
+            try:
+                f = get_frame(i)
+            except ValueError:
+                break
+            try:
+                caller_instance = eval('self', f.f_globals, f.f_locals)
+                break
+            except NameError:
+                pass
+        if isinstance(caller_instance, self.__class__):
+            if caller_instance.options['delegate_options_to_subpackages']:
+                self.set_options(**caller_instance.options)
+
+        self.setup_name = setup_name
+
+    def todict(self):
+        """
+        Return a dictionary compatible with the keyword arguments of distutils
+        setup function.
+
+        Examples
+        --------
+        >>> setup(**config.todict())                           #doctest: +SKIP
+        """
+
+        self._optimize_data_files()
+        d = {}
+        known_keys = self.list_keys + self.dict_keys + self.extra_keys
+        for n in known_keys:
+            a = getattr(self, n)
+            if a:
+                d[n] = a
+        return d
+
+    def info(self, message):
+        if not self.options['quiet']:
+            print(message)
+
+    def warn(self, message):
+        sys.stderr.write('Warning: %s' % (message,))
+
+    def set_options(self, **options):
+        """
+        Configure Configuration instance.
+
+        The following options are available:
+         - ignore_setup_xxx_py
+         - assume_default_configuration
+         - delegate_options_to_subpackages
+         - quiet
+
+        """
+        for key, value in options.items():
+            if key in self.options:
+                self.options[key] = value
+            else:
+                raise ValueError('Unknown option: '+key)
+
+    def get_distribution(self):
+        """Return the distutils distribution object for self."""
+        from numpy.distutils.core import get_distribution
+        return get_distribution()
+
+    def _wildcard_get_subpackage(self, subpackage_name,
+                                 parent_name,
+                                 caller_level = 1):
+        l = subpackage_name.split('.')
+        subpackage_path = njoin([self.local_path]+l)
+        dirs = [_m for _m in glob.glob(subpackage_path) if os.path.isdir(_m)]
+        config_list = []
+        for d in dirs:
+            if not os.path.isfile(njoin(d, '__init__.py')):
+                continue
+            if 'build' in d.split(os.sep):
+                continue
+            n = '.'.join(d.split(os.sep)[-len(l):])
+            c = self.get_subpackage(n,
+                                    parent_name = parent_name,
+                                    caller_level = caller_level+1)
+            config_list.extend(c)
+        return config_list
+
+    def _get_configuration_from_setup_py(self, setup_py,
+                                         subpackage_name,
+                                         subpackage_path,
+                                         parent_name,
+                                         caller_level = 1):
+        # In case setup_py imports local modules:
+        sys.path.insert(0, os.path.dirname(setup_py))
+        try:
+            fo_setup_py = open(setup_py, 'U')
+            setup_name = os.path.splitext(os.path.basename(setup_py))[0]
+            n = dot_join(self.name, subpackage_name, setup_name)
+            setup_module = imp.load_module('_'.join(n.split('.')),
+                                           fo_setup_py,
+                                           setup_py,
+                                           ('.py', 'U', 1))
+            fo_setup_py.close()
+            if not hasattr(setup_module, 'configuration'):
+                if not self.options['assume_default_configuration']:
+                    self.warn('Assuming default configuration '\
+                              '(%s does not define configuration())'\
+                              % (setup_module))
+                config = Configuration(subpackage_name, parent_name,
+                                       self.top_path, subpackage_path,
+                                       caller_level = caller_level + 1)
+            else:
+                pn = dot_join(*([parent_name] + subpackage_name.split('.')[:-1]))
+                args = (pn,)
+                def fix_args_py2(args):
+                    if setup_module.configuration.__code__.co_argcount > 1:
+                        args = args + (self.top_path,)
+                    return args
+                def fix_args_py3(args):
+                    if setup_module.configuration.__code__.co_argcount > 1:
+                        args = args + (self.top_path,)
+                    return args
+                if sys.version_info[0] < 3:
+                    args = fix_args_py2(args)
+                else:
+                    args = fix_args_py3(args)
+                config = setup_module.configuration(*args)
+            if config.name!=dot_join(parent_name, subpackage_name):
+                self.warn('Subpackage %r configuration returned as %r' % \
+                          (dot_join(parent_name, subpackage_name), config.name))
+        finally:
+            del sys.path[0]
+        return config
+
+    def get_subpackage(self,subpackage_name,
+                       subpackage_path=None,
+                       parent_name=None,
+                       caller_level = 1):
+        """Return list of subpackage configurations.
+
+        Parameters
+        ----------
+        subpackage_name : str or None
+            Name of the subpackage to get the configuration. '*' in
+            subpackage_name is handled as a wildcard.
+        subpackage_path : str
+            If None, then the path is assumed to be the local path plus the
+            subpackage_name. If a setup.py file is not found in the
+            subpackage_path, then a default configuration is used.
+        parent_name : str
+            Parent name.
+        """
+        if subpackage_name is None:
+            if subpackage_path is None:
+                raise ValueError(
+                    "either subpackage_name or subpackage_path must be specified")
+            subpackage_name = os.path.basename(subpackage_path)
+
+        # handle wildcards
+        l = subpackage_name.split('.')
+        if subpackage_path is None and '*' in subpackage_name:
+            return self._wildcard_get_subpackage(subpackage_name,
+                                                 parent_name,
+                                                 caller_level = caller_level+1)
+        assert '*' not in subpackage_name, repr((subpackage_name, subpackage_path, parent_name))
+        if subpackage_path is None:
+            subpackage_path = njoin([self.local_path] + l)
+        else:
+            subpackage_path = njoin([subpackage_path] + l[:-1])
+            subpackage_path = self.paths([subpackage_path])[0]
+        setup_py = njoin(subpackage_path, self.setup_name)
+        if not self.options['ignore_setup_xxx_py']:
+            if not os.path.isfile(setup_py):
+                setup_py = njoin(subpackage_path,
+                                 'setup_%s.py' % (subpackage_name))
+        if not os.path.isfile(setup_py):
+            if not self.options['assume_default_configuration']:
+                self.warn('Assuming default configuration '\
+                          '(%s/{setup_%s,setup}.py was not found)' \
+                          % (os.path.dirname(setup_py), subpackage_name))
+            config = Configuration(subpackage_name, parent_name,
+                                   self.top_path, subpackage_path,
+                                   caller_level = caller_level+1)
+        else:
+            config = self._get_configuration_from_setup_py(
+                setup_py,
+                subpackage_name,
+                subpackage_path,
+                parent_name,
+                caller_level = caller_level + 1)
+        if config:
+            return [config]
+        else:
+            return []
+
+    def add_subpackage(self,subpackage_name,
+                       subpackage_path=None,
+                       standalone = False):
+        """Add a sub-package to the current Configuration instance.
+
+        This is useful in a setup.py script for adding sub-packages to a
+        package.
+
+        Parameters
+        ----------
+        subpackage_name : str
+            name of the subpackage
+        subpackage_path : str
+            if given, the subpackage path such as the subpackage is in
+            subpackage_path / subpackage_name. If None,the subpackage is
+            assumed to be located in the local path / subpackage_name.
+        standalone : bool
+        """
+
+        if standalone:
+            parent_name = None
+        else:
+            parent_name = self.name
+        config_list = self.get_subpackage(subpackage_name, subpackage_path,
+                                          parent_name = parent_name,
+                                          caller_level = 2)
+        if not config_list:
+            self.warn('No configuration returned, assuming unavailable.')
+        for config in config_list:
+            d = config
+            if isinstance(config, Configuration):
+                d = config.todict()
+            assert isinstance(d, dict), repr(type(d))
+
+            self.info('Appending %s configuration to %s' \
+                      % (d.get('name'), self.name))
+            self.dict_append(**d)
+
+        dist = self.get_distribution()
+        if dist is not None:
+            self.warn('distutils distribution has been initialized,'\
+                      ' it may be too late to add a subpackage '+ subpackage_name)
+
+    def add_data_dir(self, data_path):
+        """Recursively add files under data_path to data_files list.
+
+        Recursively add files under data_path to the list of data_files to be
+        installed (and distributed). The data_path can be either a relative
+        path-name, or an absolute path-name, or a 2-tuple where the first
+        argument shows where in the install directory the data directory
+        should be installed to.
+
+        Parameters
+        ----------
+        data_path : seq or str
+            Argument can be either
+
+                * 2-sequence (<datadir suffix>, <path to data directory>)
+                * path to data directory where python datadir suffix defaults
+                  to package dir.
+
+        Notes
+        -----
+        Rules for installation paths:
+          foo/bar -> (foo/bar, foo/bar) -> parent/foo/bar
+          (gun, foo/bar) -> parent/gun
+          foo/* -> (foo/a, foo/a), (foo/b, foo/b) -> parent/foo/a, parent/foo/b
+          (gun, foo/*) -> (gun, foo/a), (gun, foo/b) -> gun
+          (gun/*, foo/*) -> parent/gun/a, parent/gun/b
+          /foo/bar -> (bar, /foo/bar) -> parent/bar
+          (gun, /foo/bar) -> parent/gun
+          (fun/*/gun/*, sun/foo/bar) -> parent/fun/foo/gun/bar
+
+        Examples
+        --------
+        For example suppose the source directory contains fun/foo.dat and
+        fun/bar/car.dat::
+
+            >>> self.add_data_dir('fun')                       #doctest: +SKIP
+            >>> self.add_data_dir(('sun', 'fun'))              #doctest: +SKIP
+            >>> self.add_data_dir(('gun', '/full/path/to/fun'))#doctest: +SKIP
+
+        Will install data-files to the locations::
+
+            <package install directory>/
+              fun/
+                foo.dat
+                bar/
+                  car.dat
+              sun/
+                foo.dat
+                bar/
+                  car.dat
+              gun/
+                foo.dat
+                car.dat
+        """
+        if is_sequence(data_path):
+            d, data_path = data_path
+        else:
+            d = None
+        if is_sequence(data_path):
+            [self.add_data_dir((d, p)) for p in data_path]
+            return
+        if not is_string(data_path):
+            raise TypeError("not a string: %r" % (data_path,))
+        if d is None:
+            if os.path.isabs(data_path):
+                return self.add_data_dir((os.path.basename(data_path), data_path))
+            return self.add_data_dir((data_path, data_path))
+        paths = self.paths(data_path, include_non_existing=False)
+        if is_glob_pattern(data_path):
+            if is_glob_pattern(d):
+                pattern_list = allpath(d).split(os.sep)
+                pattern_list.reverse()
+                # /a/*//b/ -> /a/*/b
+                rl = list(range(len(pattern_list)-1)); rl.reverse()
+                for i in rl:
+                    if not pattern_list[i]:
+                        del pattern_list[i]
+                #
+                for path in paths:
+                    if not os.path.isdir(path):
+                        print('Not a directory, skipping', path)
+                        continue
+                    rpath = rel_path(path, self.local_path)
+                    path_list = rpath.split(os.sep)
+                    path_list.reverse()
+                    target_list = []
+                    i = 0
+                    for s in pattern_list:
+                        if is_glob_pattern(s):
+                            if i>=len(path_list):
+                                raise ValueError('cannot fill pattern %r with %r' \
+                                      % (d, path))
+                            target_list.append(path_list[i])
+                        else:
+                            assert s==path_list[i], repr((s, path_list[i], data_path, d, path, rpath))
+                            target_list.append(s)
+                        i += 1
+                    if path_list[i:]:
+                        self.warn('mismatch of pattern_list=%s and path_list=%s'\
+                                  % (pattern_list, path_list))
+                    target_list.reverse()
+                    self.add_data_dir((os.sep.join(target_list), path))
+            else:
+                for path in paths:
+                    self.add_data_dir((d, path))
+            return
+        assert not is_glob_pattern(d), repr(d)
+
+        dist = self.get_distribution()
+        if dist is not None and dist.data_files is not None:
+            data_files = dist.data_files
+        else:
+            data_files = self.data_files
+
+        for path in paths:
+            for d1, f in list(general_source_directories_files(path)):
+                target_path = os.path.join(self.path_in_package, d, d1)
+                data_files.append((target_path, f))
+
+    def _optimize_data_files(self):
+        data_dict = {}
+        for p, files in self.data_files:
+            if p not in data_dict:
+                data_dict[p] = set()
+            for f in files:
+                data_dict[p].add(f)
+        self.data_files[:] = [(p, list(files)) for p, files in data_dict.items()]
+
+    def add_data_files(self,*files):
+        """Add data files to configuration data_files.
+
+        Parameters
+        ----------
+        files : sequence
+            Argument(s) can be either
+
+                * 2-sequence (<datadir prefix>,<path to data file(s)>)
+                * paths to data files where python datadir prefix defaults
+                  to package dir.
+
+        Notes
+        -----
+        The form of each element of the files sequence is very flexible
+        allowing many combinations of where to get the files from the package
+        and where they should ultimately be installed on the system. The most
+        basic usage is for an element of the files argument sequence to be a
+        simple filename. This will cause that file from the local path to be
+        installed to the installation path of the self.name package (package
+        path). The file argument can also be a relative path in which case the
+        entire relative path will be installed into the package directory.
+        Finally, the file can be an absolute path name in which case the file
+        will be found at the absolute path name but installed to the package
+        path.
+
+        This basic behavior can be augmented by passing a 2-tuple in as the
+        file argument. The first element of the tuple should specify the
+        relative path (under the package install directory) where the
+        remaining sequence of files should be installed to (it has nothing to
+        do with the file-names in the source distribution). The second element
+        of the tuple is the sequence of files that should be installed. The
+        files in this sequence can be filenames, relative paths, or absolute
+        paths. For absolute paths the file will be installed in the top-level
+        package installation directory (regardless of the first argument).
+        Filenames and relative path names will be installed in the package
+        install directory under the path name given as the first element of
+        the tuple.
+
+        Rules for installation paths:
+
+          #. file.txt -> (., file.txt)-> parent/file.txt
+          #. foo/file.txt -> (foo, foo/file.txt) -> parent/foo/file.txt
+          #. /foo/bar/file.txt -> (., /foo/bar/file.txt) -> parent/file.txt
+          #. *.txt -> parent/a.txt, parent/b.txt
+          #. foo/*.txt -> parent/foo/a.txt, parent/foo/b.txt
+          #. */*.txt -> (*, */*.txt) -> parent/c/a.txt, parent/d/b.txt
+          #. (sun, file.txt) -> parent/sun/file.txt
+          #. (sun, bar/file.txt) -> parent/sun/file.txt
+          #. (sun, /foo/bar/file.txt) -> parent/sun/file.txt
+          #. (sun, *.txt) -> parent/sun/a.txt, parent/sun/b.txt
+          #. (sun, bar/*.txt) -> parent/sun/a.txt, parent/sun/b.txt
+          #. (sun/*, */*.txt) -> parent/sun/c/a.txt, parent/d/b.txt
+
+        An additional feature is that the path to a data-file can actually be
+        a function that takes no arguments and returns the actual path(s) to
+        the data-files. This is useful when the data files are generated while
+        building the package.
+
+        Examples
+        --------
+        Add files to the list of data_files to be included with the package.
+
+            >>> self.add_data_files('foo.dat',
+            ...     ('fun', ['gun.dat', 'nun/pun.dat', '/tmp/sun.dat']),
+            ...     'bar/cat.dat',
+            ...     '/full/path/to/can.dat')                   #doctest: +SKIP
+
+        will install these data files to::
+
+            <package install directory>/
+             foo.dat
+             fun/
+               gun.dat
+               nun/
+                 pun.dat
+             sun.dat
+             bar/
+               car.dat
+             can.dat
+
+        where <package install directory> is the package (or sub-package)
+        directory such as '/usr/lib/python2.4/site-packages/mypackage' ('C:
+        \\Python2.4 \\Lib \\site-packages \\mypackage') or
+        '/usr/lib/python2.4/site- packages/mypackage/mysubpackage' ('C:
+        \\Python2.4 \\Lib \\site-packages \\mypackage \\mysubpackage').
+        """
+
+        if len(files)>1:
+            for f in files:
+                self.add_data_files(f)
+            return
+        assert len(files)==1
+        if is_sequence(files[0]):
+            d, files = files[0]
+        else:
+            d = None
+        if is_string(files):
+            filepat = files
+        elif is_sequence(files):
+            if len(files)==1:
+                filepat = files[0]
+            else:
+                for f in files:
+                    self.add_data_files((d, f))
+                return
+        else:
+            raise TypeError(repr(type(files)))
+
+        if d is None:
+            if hasattr(filepat, '__call__'):
+                d = ''
+            elif os.path.isabs(filepat):
+                d = ''
+            else:
+                d = os.path.dirname(filepat)
+            self.add_data_files((d, files))
+            return
+
+        paths = self.paths(filepat, include_non_existing=False)
+        if is_glob_pattern(filepat):
+            if is_glob_pattern(d):
+                pattern_list = d.split(os.sep)
+                pattern_list.reverse()
+                for path in paths:
+                    path_list = path.split(os.sep)
+                    path_list.reverse()
+                    path_list.pop() # filename
+                    target_list = []
+                    i = 0
+                    for s in pattern_list:
+                        if is_glob_pattern(s):
+                            target_list.append(path_list[i])
+                            i += 1
+                        else:
+                            target_list.append(s)
+                    target_list.reverse()
+                    self.add_data_files((os.sep.join(target_list), path))
+            else:
+                self.add_data_files((d, paths))
+            return
+        assert not is_glob_pattern(d), repr((d, filepat))
+
+        dist = self.get_distribution()
+        if dist is not None and dist.data_files is not None:
+            data_files = dist.data_files
+        else:
+            data_files = self.data_files
+
+        data_files.append((os.path.join(self.path_in_package, d), paths))
+
+    ### XXX Implement add_py_modules
+
+    def add_define_macros(self, macros):
+        """Add define macros to configuration
+
+        Add the given sequence of macro name and value duples to the beginning
+        of the define_macros list This list will be visible to all extension
+        modules of the current package.
+        """
+        dist = self.get_distribution()
+        if dist is not None:
+            if not hasattr(dist, 'define_macros'):
+                dist.define_macros = []
+            dist.define_macros.extend(macros)
+        else:
+            self.define_macros.extend(macros)
+
+
+    def add_include_dirs(self,*paths):
+        """Add paths to configuration include directories.
+
+        Add the given sequence of paths to the beginning of the include_dirs
+        list. This list will be visible to all extension modules of the
+        current package.
+        """
+        include_dirs = self.paths(paths)
+        dist = self.get_distribution()
+        if dist is not None:
+            if dist.include_dirs is None:
+                dist.include_dirs = []
+            dist.include_dirs.extend(include_dirs)
+        else:
+            self.include_dirs.extend(include_dirs)
+
+    def add_headers(self,*files):
+        """Add installable headers to configuration.
+
+        Add the given sequence of files to the beginning of the headers list.
+        By default, headers will be installed under <python-
+        include>/<self.name.replace('.','/')>/ directory. If an item of files
+        is a tuple, then its first argument specifies the actual installation
+        location relative to the <python-include> path.
+
+        Parameters
+        ----------
+        files : str or seq
+            Argument(s) can be either:
+
+                * 2-sequence (<includedir suffix>,<path to header file(s)>)
+                * path(s) to header file(s) where python includedir suffix will
+                  default to package name.
+        """
+        headers = []
+        for path in files:
+            if is_string(path):
+                [headers.append((self.name, p)) for p in self.paths(path)]
+            else:
+                if not isinstance(path, (tuple, list)) or len(path) != 2:
+                    raise TypeError(repr(path))
+                [headers.append((path[0], p)) for p in self.paths(path[1])]
+        dist = self.get_distribution()
+        if dist is not None:
+            if dist.headers is None:
+                dist.headers = []
+            dist.headers.extend(headers)
+        else:
+            self.headers.extend(headers)
+
+    def paths(self,*paths,**kws):
+        """Apply glob to paths and prepend local_path if needed.
+
+        Applies glob.glob(...) to each path in the sequence (if needed) and
+        pre-pends the local_path if needed. Because this is called on all
+        source lists, this allows wildcard characters to be specified in lists
+        of sources for extension modules and libraries and scripts and allows
+        path-names be relative to the source directory.
+
+        """
+        include_non_existing = kws.get('include_non_existing', True)
+        return gpaths(paths,
+                      local_path = self.local_path,
+                      include_non_existing=include_non_existing)
+
+    def _fix_paths_dict(self, kw):
+        for k in kw.keys():
+            v = kw[k]
+            if k in ['sources', 'depends', 'include_dirs', 'library_dirs',
+                     'module_dirs', 'extra_objects']:
+                new_v = self.paths(v)
+                kw[k] = new_v
+
+    def add_extension(self,name,sources,**kw):
+        """Add extension to configuration.
+
+        Create and add an Extension instance to the ext_modules list. This
+        method also takes the following optional keyword arguments that are
+        passed on to the Extension constructor.
+
+        Parameters
+        ----------
+        name : str
+            name of the extension
+        sources : seq
+            list of the sources. The list of sources may contain functions
+            (called source generators) which must take an extension instance
+            and a build directory as inputs and return a source file or list of
+            source files or None. If None is returned then no sources are
+            generated. If the Extension instance has no sources after
+            processing all source generators, then no extension module is
+            built.
+        include_dirs :
+        define_macros :
+        undef_macros :
+        library_dirs :
+        libraries :
+        runtime_library_dirs :
+        extra_objects :
+        extra_compile_args :
+        extra_link_args :
+        extra_f77_compile_args :
+        extra_f90_compile_args :
+        export_symbols :
+        swig_opts :
+        depends :
+            The depends list contains paths to files or directories that the
+            sources of the extension module depend on. If any path in the
+            depends list is newer than the extension module, then the module
+            will be rebuilt.
+        language :
+        f2py_options :
+        module_dirs :
+        extra_info : dict or list
+            dict or list of dict of keywords to be appended to keywords.
+
+        Notes
+        -----
+        The self.paths(...) method is applied to all lists that may contain
+        paths.
+        """
+        ext_args = copy.copy(kw)
+        ext_args['name'] = dot_join(self.name, name)
+        ext_args['sources'] = sources
+
+        if 'extra_info' in ext_args:
+            extra_info = ext_args['extra_info']
+            del ext_args['extra_info']
+            if isinstance(extra_info, dict):
+                extra_info = [extra_info]
+            for info in extra_info:
+                assert isinstance(info, dict), repr(info)
+                dict_append(ext_args,**info)
+
+        self._fix_paths_dict(ext_args)
+
+        # Resolve out-of-tree dependencies
+        libraries = ext_args.get('libraries', [])
+        libnames = []
+        ext_args['libraries'] = []
+        for libname in libraries:
+            if isinstance(libname, tuple):
+                self._fix_paths_dict(libname[1])
+
+            # Handle library names of the form libname@relative/path/to/library
+            if '@' in libname:
+                lname, lpath = libname.split('@', 1)
+                lpath = os.path.abspath(njoin(self.local_path, lpath))
+                if os.path.isdir(lpath):
+                    c = self.get_subpackage(None, lpath,
+                                            caller_level = 2)
+                    if isinstance(c, Configuration):
+                        c = c.todict()
+                    for l in [l[0] for l in c.get('libraries', [])]:
+                        llname = l.split('__OF__', 1)[0]
+                        if llname == lname:
+                            c.pop('name', None)
+                            dict_append(ext_args,**c)
+                            break
+                    continue
+            libnames.append(libname)
+
+        ext_args['libraries'] = libnames + ext_args['libraries']
+        ext_args['define_macros'] = \
+            self.define_macros + ext_args.get('define_macros', [])
+
+        from numpy.distutils.core import Extension
+        ext = Extension(**ext_args)
+        self.ext_modules.append(ext)
+
+        dist = self.get_distribution()
+        if dist is not None:
+            self.warn('distutils distribution has been initialized,'\
+                      ' it may be too late to add an extension '+name)
+        return ext
+
+    def add_library(self,name,sources,**build_info):
+        """
+        Add library to configuration.
+
+        Parameters
+        ----------
+        name : str
+            Name of the extension.
+        sources : sequence
+            List of the sources. The list of sources may contain functions
+            (called source generators) which must take an extension instance
+            and a build directory as inputs and return a source file or list of
+            source files or None. If None is returned then no sources are
+            generated. If the Extension instance has no sources after
+            processing all source generators, then no extension module is
+            built.
+        build_info : dict, optional
+            The following keys are allowed:
+
+                * depends
+                * macros
+                * include_dirs
+                * extra_compiler_args
+                * extra_f77_compiler_args
+                * extra_f90_compiler_args
+                * f2py_options
+                * language
+
+        """
+        self._add_library(name, sources, None, build_info)
+
+        dist = self.get_distribution()
+        if dist is not None:
+            self.warn('distutils distribution has been initialized,'\
+                      ' it may be too late to add a library '+ name)
+
+    def _add_library(self, name, sources, install_dir, build_info):
+        """Common implementation for add_library and add_installed_library. Do
+        not use directly"""
+        build_info = copy.copy(build_info)
+        name = name #+ '__OF__' + self.name
+        build_info['sources'] = sources
+
+        # Sometimes, depends is not set up to an empty list by default, and if
+        # depends is not given to add_library, distutils barfs (#1134)
+        if not 'depends' in build_info:
+            build_info['depends'] = []
+
+        self._fix_paths_dict(build_info)
+
+        # Add to libraries list so that it is build with build_clib
+        self.libraries.append((name, build_info))
+
+    def add_installed_library(self, name, sources, install_dir, build_info=None):
+        """
+        Similar to add_library, but the specified library is installed.
+
+        Most C libraries used with `distutils` are only used to build python
+        extensions, but libraries built through this method will be installed
+        so that they can be reused by third-party packages.
+
+        Parameters
+        ----------
+        name : str
+            Name of the installed library.
+        sources : sequence
+            List of the library's source files. See `add_library` for details.
+        install_dir : str
+            Path to install the library, relative to the current sub-package.
+        build_info : dict, optional
+            The following keys are allowed:
+
+                * depends
+                * macros
+                * include_dirs
+                * extra_compiler_args
+                * extra_f77_compiler_args
+                * extra_f90_compiler_args
+                * f2py_options
+                * language
+
+        Returns
+        -------
+        None
+
+        See Also
+        --------
+        add_library, add_npy_pkg_config, get_info
+
+        Notes
+        -----
+        The best way to encode the options required to link against the specified
+        C libraries is to use a "libname.ini" file, and use `get_info` to
+        retrieve the required options (see `add_npy_pkg_config` for more
+        information).
+
+        """
+        if not build_info:
+            build_info = {}
+
+        install_dir = os.path.join(self.package_path, install_dir)
+        self._add_library(name, sources, install_dir, build_info)
+        self.installed_libraries.append(InstallableLib(name, build_info, install_dir))
+
+    def add_npy_pkg_config(self, template, install_dir, subst_dict=None):
+        """
+        Generate and install a npy-pkg config file from a template.
+
+        The config file generated from `template` is installed in the
+        given install directory, using `subst_dict` for variable substitution.
+
+        Parameters
+        ----------
+        template : str
+            The path of the template, relatively to the current package path.
+        install_dir : str
+            Where to install the npy-pkg config file, relatively to the current
+            package path.
+        subst_dict : dict, optional
+            If given, any string of the form ``@key@`` will be replaced by
+            ``subst_dict[key]`` in the template file when installed. The install
+            prefix is always available through the variable ``@prefix@``, since the
+            install prefix is not easy to get reliably from setup.py.
+
+        See also
+        --------
+        add_installed_library, get_info
+
+        Notes
+        -----
+        This works for both standard installs and in-place builds, i.e. the
+        ``@prefix@`` refer to the source directory for in-place builds.
+
+        Examples
+        --------
+        ::
+
+            config.add_npy_pkg_config('foo.ini.in', 'lib', {'foo': bar})
+
+        Assuming the foo.ini.in file has the following content::
+
+            [meta]
+            Name=@foo@
+            Version=1.0
+            Description=dummy description
+
+            [default]
+            Cflags=-I@prefix@/include
+            Libs=
+
+        The generated file will have the following content::
+
+            [meta]
+            Name=bar
+            Version=1.0
+            Description=dummy description
+
+            [default]
+            Cflags=-Iprefix_dir/include
+            Libs=
+
+        and will be installed as foo.ini in the 'lib' subpath.
+
+        """
+        if subst_dict is None:
+            subst_dict = {}
+        basename = os.path.splitext(template)[0]
+        template = os.path.join(self.package_path, template)
+
+        if self.name in self.installed_pkg_config:
+            self.installed_pkg_config[self.name].append((template, install_dir,
+                subst_dict))
+        else:
+            self.installed_pkg_config[self.name] = [(template, install_dir,
+                subst_dict)]
+
+
+    def add_scripts(self,*files):
+        """Add scripts to configuration.
+
+        Add the sequence of files to the beginning of the scripts list.
+        Scripts will be installed under the <prefix>/bin/ directory.
+
+        """
+        scripts = self.paths(files)
+        dist = self.get_distribution()
+        if dist is not None:
+            if dist.scripts is None:
+                dist.scripts = []
+            dist.scripts.extend(scripts)
+        else:
+            self.scripts.extend(scripts)
+
+    def dict_append(self,**dict):
+        for key in self.list_keys:
+            a = getattr(self, key)
+            a.extend(dict.get(key, []))
+        for key in self.dict_keys:
+            a = getattr(self, key)
+            a.update(dict.get(key, {}))
+        known_keys = self.list_keys + self.dict_keys + self.extra_keys
+        for key in dict.keys():
+            if key not in known_keys:
+                a = getattr(self, key, None)
+                if a and a==dict[key]: continue
+                self.warn('Inheriting attribute %r=%r from %r' \
+                          % (key, dict[key], dict.get('name', '?')))
+                setattr(self, key, dict[key])
+                self.extra_keys.append(key)
+            elif key in self.extra_keys:
+                self.info('Ignoring attempt to set %r (from %r to %r)' \
+                          % (key, getattr(self, key), dict[key]))
+            elif key in known_keys:
+                # key is already processed above
+                pass
+            else:
+                raise ValueError("Don't know about key=%r" % (key))
+
+    def __str__(self):
+        from pprint import pformat
+        known_keys = self.list_keys + self.dict_keys + self.extra_keys
+        s = '<'+5*'-' + '\n'
+        s += 'Configuration of '+self.name+':\n'
+        known_keys.sort()
+        for k in known_keys:
+            a = getattr(self, k, None)
+            if a:
+                s += '%s = %s\n' % (k, pformat(a))
+        s += 5*'-' + '>'
+        return s
+
+    def get_config_cmd(self):
+        """
+        Returns the numpy.distutils config command instance.
+        """
+        cmd = get_cmd('config')
+        cmd.ensure_finalized()
+        cmd.dump_source = 0
+        cmd.noisy = 0
+        old_path = os.environ.get('PATH')
+        if old_path:
+            path = os.pathsep.join(['.', old_path])
+            os.environ['PATH'] = path
+        return cmd
+
+    def get_build_temp_dir(self):
+        """
+        Return a path to a temporary directory where temporary files should be
+        placed.
+        """
+        cmd = get_cmd('build')
+        cmd.ensure_finalized()
+        return cmd.build_temp
+
+    def have_f77c(self):
+        """Check for availability of Fortran 77 compiler.
+
+        Use it inside source generating function to ensure that
+        setup distribution instance has been initialized.
+
+        Notes
+        -----
+        True if a Fortran 77 compiler is available (because a simple Fortran 77
+        code was able to be compiled successfully).
+        """
+        simple_fortran_subroutine = '''
+        subroutine simple
+        end
+        '''
+        config_cmd = self.get_config_cmd()
+        flag = config_cmd.try_compile(simple_fortran_subroutine, lang='f77')
+        return flag
+
+    def have_f90c(self):
+        """Check for availability of Fortran 90 compiler.
+
+        Use it inside source generating function to ensure that
+        setup distribution instance has been initialized.
+
+        Notes
+        -----
+        True if a Fortran 90 compiler is available (because a simple Fortran
+        90 code was able to be compiled successfully)
+        """
+        simple_fortran_subroutine = '''
+        subroutine simple
+        end
+        '''
+        config_cmd = self.get_config_cmd()
+        flag = config_cmd.try_compile(simple_fortran_subroutine, lang='f90')
+        return flag
+
+    def append_to(self, extlib):
+        """Append libraries, include_dirs to extension or library item.
+        """
+        if is_sequence(extlib):
+            lib_name, build_info = extlib
+            dict_append(build_info,
+                        libraries=self.libraries,
+                        include_dirs=self.include_dirs)
+        else:
+            from numpy.distutils.core import Extension
+            assert isinstance(extlib, Extension), repr(extlib)
+            extlib.libraries.extend(self.libraries)
+            extlib.include_dirs.extend(self.include_dirs)
+
+    def _get_svn_revision(self, path):
+        """Return path's SVN revision number.
+        """
+        revision = None
+        m = None
+        cwd =  os.getcwd()
+        try:
+            os.chdir(path or '.')
+            p = subprocess.Popen(['svnversion'], shell=True,
+                    stdout=subprocess.PIPE, stderr=None,
+                    close_fds=True)
+            sout = p.stdout
+            m = re.match(r'(?P<revision>\d+)', sout.read())
+        except:
+            pass
+        os.chdir(cwd)
+        if m:
+            revision = int(m.group('revision'))
+            return revision
+        if sys.platform=='win32' and os.environ.get('SVN_ASP_DOT_NET_HACK', None):
+            entries = njoin(path, '_svn', 'entries')
+        else:
+            entries = njoin(path, '.svn', 'entries')
+        if os.path.isfile(entries):
+            f = open(entries)
+            fstr = f.read()
+            f.close()
+            if fstr[:5] == '<?xml':  # pre 1.4
+                m = re.search(r'revision="(?P<revision>\d+)"', fstr)
+                if m:
+                    revision = int(m.group('revision'))
+            else:  # non-xml entries file --- check to be sure that
+                m = re.search(r'dir[\n\r]+(?P<revision>\d+)', fstr)
+                if m:
+                    revision = int(m.group('revision'))
+        return revision
+
+    def _get_hg_revision(self, path):
+        """Return path's Mercurial revision number.
+        """
+        revision = None
+        m = None
+        cwd =  os.getcwd()
+        try:
+            os.chdir(path or '.')
+            p = subprocess.Popen(['hg identify --num'], shell=True,
+                    stdout=subprocess.PIPE, stderr=None,
+                    close_fds=True)
+            sout = p.stdout
+            m = re.match(r'(?P<revision>\d+)', sout.read())
+        except:
+            pass
+        os.chdir(cwd)
+        if m:
+            revision = int(m.group('revision'))
+            return revision
+        branch_fn = njoin(path, '.hg', 'branch')
+        branch_cache_fn = njoin(path, '.hg', 'branch.cache')
+
+        if os.path.isfile(branch_fn):
+            branch0 = None
+            f = open(branch_fn)
+            revision0 = f.read().strip()
+            f.close()
+
+            branch_map = {}
+            for line in file(branch_cache_fn, 'r'):
+                branch1, revision1  = line.split()[:2]
+                if revision1==revision0:
+                    branch0 = branch1
+                try:
+                    revision1 = int(revision1)
+                except ValueError:
+                    continue
+                branch_map[branch1] = revision1
+
+            revision = branch_map.get(branch0)
+        return revision
+
+
+    def get_version(self, version_file=None, version_variable=None):
+        """Try to get version string of a package.
+
+        Return a version string of the current package or None if the version
+        information could not be detected.
+
+        Notes
+        -----
+        This method scans files named
+        __version__.py, <packagename>_version.py, version.py, and
+        __svn_version__.py for string variables version, __version\__, and
+        <packagename>_version, until a version number is found.
+        """
+        version = getattr(self, 'version', None)
+        if version is not None:
+            return version
+
+        # Get version from version file.
+        if version_file is None:
+            files = ['__version__.py',
+                     self.name.split('.')[-1]+'_version.py',
+                     'version.py',
+                     '__svn_version__.py',
+                     '__hg_version__.py']
+        else:
+            files = [version_file]
+        if version_variable is None:
+            version_vars = ['version',
+                            '__version__',
+                            self.name.split('.')[-1]+'_version']
+        else:
+            version_vars = [version_variable]
+        for f in files:
+            fn = njoin(self.local_path, f)
+            if os.path.isfile(fn):
+                info = (open(fn), fn, ('.py', 'U', 1))
+                name = os.path.splitext(os.path.basename(fn))[0]
+                n = dot_join(self.name, name)
+                try:
+                    version_module = imp.load_module('_'.join(n.split('.')),*info)
+                except ImportError:
+                    msg = get_exception()
+                    self.warn(str(msg))
+                    version_module = None
+                if version_module is None:
+                    continue
+
+                for a in version_vars:
+                    version = getattr(version_module, a, None)
+                    if version is not None:
+                        break
+                if version is not None:
+                    break
+
+        if version is not None:
+            self.version = version
+            return version
+
+        # Get version as SVN or Mercurial revision number
+        revision = self._get_svn_revision(self.local_path)
+        if revision is None:
+            revision = self._get_hg_revision(self.local_path)
+
+        if revision is not None:
+            version = str(revision)
+            self.version = version
+
+        return version
+
+    def make_svn_version_py(self, delete=True):
+        """Appends a data function to the data_files list that will generate
+        __svn_version__.py file to the current package directory.
+
+        Generate package __svn_version__.py file from SVN revision number,
+        it will be removed after python exits but will be available
+        when sdist, etc commands are executed.
+
+        Notes
+        -----
+        If __svn_version__.py existed before, nothing is done.
+
+        This is
+        intended for working with source directories that are in an SVN
+        repository.
+        """
+        target = njoin(self.local_path, '__svn_version__.py')
+        revision = self._get_svn_revision(self.local_path)
+        if os.path.isfile(target) or revision is None:
+            return
+        else:
+            def generate_svn_version_py():
+                if not os.path.isfile(target):
+                    version = str(revision)
+                    self.info('Creating %s (version=%r)' % (target, version))
+                    f = open(target, 'w')
+                    f.write('version = %r\n' % (version))
+                    f.close()
+
+                import atexit
+                def rm_file(f=target,p=self.info):
+                    if delete:
+                        try: os.remove(f); p('removed '+f)
+                        except OSError: pass
+                        try: os.remove(f+'c'); p('removed '+f+'c')
+                        except OSError: pass
+
+                atexit.register(rm_file)
+
+                return target
+
+            self.add_data_files(('', generate_svn_version_py()))
+
+    def make_hg_version_py(self, delete=True):
+        """Appends a data function to the data_files list that will generate
+        __hg_version__.py file to the current package directory.
+
+        Generate package __hg_version__.py file from Mercurial revision,
+        it will be removed after python exits but will be available
+        when sdist, etc commands are executed.
+
+        Notes
+        -----
+        If __hg_version__.py existed before, nothing is done.
+
+        This is intended for working with source directories that are
+        in an Mercurial repository.
+        """
+        target = njoin(self.local_path, '__hg_version__.py')
+        revision = self._get_hg_revision(self.local_path)
+        if os.path.isfile(target) or revision is None:
+            return
+        else:
+            def generate_hg_version_py():
+                if not os.path.isfile(target):
+                    version = str(revision)
+                    self.info('Creating %s (version=%r)' % (target, version))
+                    f = open(target, 'w')
+                    f.write('version = %r\n' % (version))
+                    f.close()
+
+                import atexit
+                def rm_file(f=target,p=self.info):
+                    if delete:
+                        try: os.remove(f); p('removed '+f)
+                        except OSError: pass
+                        try: os.remove(f+'c'); p('removed '+f+'c')
+                        except OSError: pass
+
+                atexit.register(rm_file)
+
+                return target
+
+            self.add_data_files(('', generate_hg_version_py()))
+
+    def make_config_py(self,name='__config__'):
+        """Generate package __config__.py file containing system_info
+        information used during building the package.
+
+        This file is installed to the
+        package installation directory.
+
+        """
+        self.py_modules.append((self.name, name, generate_config_py))
+
+
+    def get_info(self,*names):
+        """Get resources information.
+
+        Return information (from system_info.get_info) for all of the names in
+        the argument list in a single dictionary.
+        """
+        from .system_info import get_info, dict_append
+        info_dict = {}
+        for a in names:
+            dict_append(info_dict,**get_info(a))
+        return info_dict
+
+
+def get_cmd(cmdname, _cache={}):
+    if cmdname not in _cache:
+        import distutils.core
+        dist = distutils.core._setup_distribution
+        if dist is None:
+            from distutils.errors import DistutilsInternalError
+            raise DistutilsInternalError(
+                  'setup distribution instance not initialized')
+        cmd = dist.get_command_obj(cmdname)
+        _cache[cmdname] = cmd
+    return _cache[cmdname]
+
+def get_numpy_include_dirs():
+    # numpy_include_dirs are set by numpy/core/setup.py, otherwise []
+    include_dirs = Configuration.numpy_include_dirs[:]
+    if not include_dirs:
+        import numpy
+        include_dirs = [ numpy.get_include() ]
+    # else running numpy/core/setup.py
+    return include_dirs
+
+def get_npy_pkg_dir():
+    """Return the path where to find the npy-pkg-config directory."""
+    # XXX: import here for bootstrapping reasons
+    import numpy
+    d = os.path.join(os.path.dirname(numpy.__file__),
+            'core', 'lib', 'npy-pkg-config')
+    return d
+
+def get_pkg_info(pkgname, dirs=None):
+    """
+    Return library info for the given package.
+
+    Parameters
+    ----------
+    pkgname : str
+        Name of the package (should match the name of the .ini file, without
+        the extension, e.g. foo for the file foo.ini).
+    dirs : sequence, optional
+        If given, should be a sequence of additional directories where to look
+        for npy-pkg-config files. Those directories are searched prior to the
+        NumPy directory.
+
+    Returns
+    -------
+    pkginfo : class instance
+        The `LibraryInfo` instance containing the build information.
+
+    Raises
+    ------
+    PkgNotFound
+        If the package is not found.
+
+    See Also
+    --------
+    Configuration.add_npy_pkg_config, Configuration.add_installed_library,
+    get_info
+
+    """
+    from numpy.distutils.npy_pkg_config import read_config
+
+    if dirs:
+        dirs.append(get_npy_pkg_dir())
+    else:
+        dirs = [get_npy_pkg_dir()]
+    return read_config(pkgname, dirs)
+
+def get_info(pkgname, dirs=None):
+    """
+    Return an info dict for a given C library.
+
+    The info dict contains the necessary options to use the C library.
+
+    Parameters
+    ----------
+    pkgname : str
+        Name of the package (should match the name of the .ini file, without
+        the extension, e.g. foo for the file foo.ini).
+    dirs : sequence, optional
+        If given, should be a sequence of additional directories where to look
+        for npy-pkg-config files. Those directories are searched prior to the
+        NumPy directory.
+
+    Returns
+    -------
+    info : dict
+        The dictionary with build information.
+
+    Raises
+    ------
+    PkgNotFound
+        If the package is not found.
+
+    See Also
+    --------
+    Configuration.add_npy_pkg_config, Configuration.add_installed_library,
+    get_pkg_info
+
+    Examples
+    --------
+    To get the necessary information for the npymath library from NumPy:
+
+    >>> npymath_info = np.distutils.misc_util.get_info('npymath')
+    >>> npymath_info                                    #doctest: +SKIP
+    {'define_macros': [], 'libraries': ['npymath'], 'library_dirs':
+    ['.../numpy/core/lib'], 'include_dirs': ['.../numpy/core/include']}
+
+    This info dict can then be used as input to a `Configuration` instance::
+
+      config.add_extension('foo', sources=['foo.c'], extra_info=npymath_info)
+
+    """
+    from numpy.distutils.npy_pkg_config import parse_flags
+    pkg_info = get_pkg_info(pkgname, dirs)
+
+    # Translate LibraryInfo instance into a build_info dict
+    info = parse_flags(pkg_info.cflags())
+    for k, v in parse_flags(pkg_info.libs()).items():
+        info[k].extend(v)
+
+    # add_extension extra_info argument is ANAL
+    info['define_macros'] = info['macros']
+    del info['macros']
+    del info['ignored']
+
+    return info
+
+def is_bootstrapping():
+    if sys.version_info[0] >= 3:
+        import builtins
+    else:
+        import __builtin__ as builtins
+
+    try:
+        builtins.__NUMPY_SETUP__
+        return True
+    except AttributeError:
+        return False
+        __NUMPY_SETUP__ = False
+
+
+#########################
+
+def default_config_dict(name = None, parent_name = None, local_path=None):
+    """Return a configuration dictionary for usage in
+    configuration() function defined in file setup_<name>.py.
+    """
+    import warnings
+    warnings.warn('Use Configuration(%r,%r,top_path=%r) instead of '\
+                  'deprecated default_config_dict(%r,%r,%r)'
+                  % (name, parent_name, local_path,
+                     name, parent_name, local_path,
+                     ))
+    c = Configuration(name, parent_name, local_path)
+    return c.todict()
+
+
+def dict_append(d, **kws):
+    for k, v in kws.items():
+        if k in d:
+            ov = d[k]
+            if isinstance(ov, str):
+                d[k] = v
+            else:
+                d[k].extend(v)
+        else:
+            d[k] = v
+
+def appendpath(prefix, path):
+    if os.path.sep != '/':
+        prefix = prefix.replace('/', os.path.sep)
+        path = path.replace('/', os.path.sep)
+    drive = ''
+    if os.path.isabs(path):
+        drive = os.path.splitdrive(prefix)[0]
+        absprefix = os.path.splitdrive(os.path.abspath(prefix))[1]
+        pathdrive, path = os.path.splitdrive(path)
+        d = os.path.commonprefix([absprefix, path])
+        if os.path.join(absprefix[:len(d)], absprefix[len(d):]) != absprefix \
+           or os.path.join(path[:len(d)], path[len(d):]) != path:
+            # Handle invalid paths
+            d = os.path.dirname(d)
+        subpath = path[len(d):]
+        if os.path.isabs(subpath):
+            subpath = subpath[1:]
+    else:
+        subpath = path
+    return os.path.normpath(njoin(drive + prefix, subpath))
+
+def generate_config_py(target):
+    """Generate config.py file containing system_info information
+    used during building the package.
+
+    Usage:
+        config['py_modules'].append((packagename, '__config__',generate_config_py))
+    """
+    from numpy.distutils.system_info import system_info
+    from distutils.dir_util import mkpath
+    mkpath(os.path.dirname(target))
+    f = open(target, 'w')
+    f.write('# This file is generated by %s\n' % (os.path.abspath(sys.argv[0])))
+    f.write('# It contains system_info results at the time of building this package.\n')
+    f.write('__all__ = ["get_info","show"]\n\n')
+    for k, i in system_info.saved_results.items():
+        f.write('%s=%r\n' % (k, i))
+    f.write(r'''
+def get_info(name):
+    g = globals()
+    return g.get(name, g.get(name + "_info", {}))
+
+def show():
+    for name,info_dict in globals().items():
+        if name[0] == "_" or type(info_dict) is not type({}): continue
+        print(name + ":")
+        if not info_dict:
+            print("  NOT AVAILABLE")
+        for k,v in info_dict.items():
+            v = str(v)
+            if k == "sources" and len(v) > 200:
+                v = v[:60] + " ...\n... " + v[-60:]
+            print("    %s = %s" % (k,v))
+    ''')
+
+    f.close()
+    return target
+
+def msvc_version(compiler):
+    """Return version major and minor of compiler instance if it is
+    MSVC, raise an exception otherwise."""
+    if not compiler.compiler_type == "msvc":
+        raise ValueError("Compiler instance is not msvc (%s)"\
+                         % compiler.compiler_type)
+    return compiler._MSVCCompiler__version
+
+if sys.version[:3] >= '2.5':
+    def get_build_architecture():
+        from distutils.msvccompiler import get_build_architecture
+        return get_build_architecture()
+else:
+    #copied from python 2.5.1 distutils/msvccompiler.py
+    def get_build_architecture():
+        """Return the processor architecture.
+
+        Possible results are "Intel", "Itanium", or "AMD64".
+        """
+        prefix = " bit ("
+        i = sys.version.find(prefix)
+        if i == -1:
+            return "Intel"
+        j = sys.version.find(")", i)
+        return sys.version[i+len(prefix):j]