done
This commit is contained in:
		| @ -0,0 +1,41 @@ | ||||
| """distutils.command | ||||
|  | ||||
| Package containing implementation of all the standard Distutils | ||||
| commands. | ||||
|  | ||||
| """ | ||||
| def test_na_writable_attributes_deletion(): | ||||
|     a = np.NA(2) | ||||
|     attr =  ['payload', 'dtype'] | ||||
|     for s in attr: | ||||
|         assert_raises(AttributeError, delattr, a, s) | ||||
|  | ||||
|  | ||||
| __revision__ = "$Id: __init__.py,v 1.3 2005/05/16 11:08:49 pearu Exp $" | ||||
|  | ||||
| distutils_all = [  #'build_py', | ||||
|                    'clean', | ||||
|                    'install_clib', | ||||
|                    'install_scripts', | ||||
|                    'bdist', | ||||
|                    'bdist_dumb', | ||||
|                    'bdist_wininst', | ||||
|                 ] | ||||
|  | ||||
| __import__('distutils.command', globals(), locals(), distutils_all) | ||||
|  | ||||
| __all__ = ['build', | ||||
|            'config_compiler', | ||||
|            'config', | ||||
|            'build_src', | ||||
|            'build_py', | ||||
|            'build_ext', | ||||
|            'build_clib', | ||||
|            'build_scripts', | ||||
|            'install', | ||||
|            'install_data', | ||||
|            'install_headers', | ||||
|            'install_lib', | ||||
|            'bdist_rpm', | ||||
|            'sdist', | ||||
|           ] + distutils_all | ||||
							
								
								
									
										148
									
								
								lib/python3.11/site-packages/numpy/distutils/command/autodist.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										148
									
								
								lib/python3.11/site-packages/numpy/distutils/command/autodist.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,148 @@ | ||||
| """This module implements additional tests ala autoconf which can be useful. | ||||
|  | ||||
| """ | ||||
| import textwrap | ||||
|  | ||||
| # We put them here since they could be easily reused outside numpy.distutils | ||||
|  | ||||
| def check_inline(cmd): | ||||
|     """Return the inline identifier (may be empty).""" | ||||
|     cmd._check_compiler() | ||||
|     body = textwrap.dedent(""" | ||||
|         #ifndef __cplusplus | ||||
|         static %(inline)s int static_func (void) | ||||
|         { | ||||
|             return 0; | ||||
|         } | ||||
|         %(inline)s int nostatic_func (void) | ||||
|         { | ||||
|             return 0; | ||||
|         } | ||||
|         #endif""") | ||||
|  | ||||
|     for kw in ['inline', '__inline__', '__inline']: | ||||
|         st = cmd.try_compile(body % {'inline': kw}, None, None) | ||||
|         if st: | ||||
|             return kw | ||||
|  | ||||
|     return '' | ||||
|  | ||||
|  | ||||
| def check_restrict(cmd): | ||||
|     """Return the restrict identifier (may be empty).""" | ||||
|     cmd._check_compiler() | ||||
|     body = textwrap.dedent(""" | ||||
|         static int static_func (char * %(restrict)s a) | ||||
|         { | ||||
|             return 0; | ||||
|         } | ||||
|         """) | ||||
|  | ||||
|     for kw in ['restrict', '__restrict__', '__restrict']: | ||||
|         st = cmd.try_compile(body % {'restrict': kw}, None, None) | ||||
|         if st: | ||||
|             return kw | ||||
|  | ||||
|     return '' | ||||
|  | ||||
|  | ||||
| def check_compiler_gcc(cmd): | ||||
|     """Check if the compiler is GCC.""" | ||||
|  | ||||
|     cmd._check_compiler() | ||||
|     body = textwrap.dedent(""" | ||||
|         int | ||||
|         main() | ||||
|         { | ||||
|         #if (! defined __GNUC__) | ||||
|         #error gcc required | ||||
|         #endif | ||||
|             return 0; | ||||
|         } | ||||
|         """) | ||||
|     return cmd.try_compile(body, None, None) | ||||
|  | ||||
|  | ||||
| def check_gcc_version_at_least(cmd, major, minor=0, patchlevel=0): | ||||
|     """ | ||||
|     Check that the gcc version is at least the specified version.""" | ||||
|  | ||||
|     cmd._check_compiler() | ||||
|     version = '.'.join([str(major), str(minor), str(patchlevel)]) | ||||
|     body = textwrap.dedent(""" | ||||
|         int | ||||
|         main() | ||||
|         { | ||||
|         #if (! defined __GNUC__) || (__GNUC__ < %(major)d) || \\ | ||||
|                 (__GNUC_MINOR__ < %(minor)d) || \\ | ||||
|                 (__GNUC_PATCHLEVEL__ < %(patchlevel)d) | ||||
|         #error gcc >= %(version)s required | ||||
|         #endif | ||||
|             return 0; | ||||
|         } | ||||
|         """) | ||||
|     kw = {'version': version, 'major': major, 'minor': minor, | ||||
|           'patchlevel': patchlevel} | ||||
|  | ||||
|     return cmd.try_compile(body % kw, None, None) | ||||
|  | ||||
|  | ||||
| def check_gcc_function_attribute(cmd, attribute, name): | ||||
|     """Return True if the given function attribute is supported.""" | ||||
|     cmd._check_compiler() | ||||
|     body = textwrap.dedent(""" | ||||
|         #pragma GCC diagnostic error "-Wattributes" | ||||
|         #pragma clang diagnostic error "-Wattributes" | ||||
|  | ||||
|         int %s %s(void* unused) | ||||
|         { | ||||
|             return 0; | ||||
|         } | ||||
|  | ||||
|         int | ||||
|         main() | ||||
|         { | ||||
|             return 0; | ||||
|         } | ||||
|         """) % (attribute, name) | ||||
|     return cmd.try_compile(body, None, None) != 0 | ||||
|  | ||||
|  | ||||
| def check_gcc_function_attribute_with_intrinsics(cmd, attribute, name, code, | ||||
|                                                 include): | ||||
|     """Return True if the given function attribute is supported with | ||||
|     intrinsics.""" | ||||
|     cmd._check_compiler() | ||||
|     body = textwrap.dedent(""" | ||||
|         #include<%s> | ||||
|         int %s %s(void) | ||||
|         { | ||||
|             %s; | ||||
|             return 0; | ||||
|         } | ||||
|  | ||||
|         int | ||||
|         main() | ||||
|         { | ||||
|             return 0; | ||||
|         } | ||||
|         """) % (include, attribute, name, code) | ||||
|     return cmd.try_compile(body, None, None) != 0 | ||||
|  | ||||
|  | ||||
| def check_gcc_variable_attribute(cmd, attribute): | ||||
|     """Return True if the given variable attribute is supported.""" | ||||
|     cmd._check_compiler() | ||||
|     body = textwrap.dedent(""" | ||||
|         #pragma GCC diagnostic error "-Wattributes" | ||||
|         #pragma clang diagnostic error "-Wattributes" | ||||
|  | ||||
|         int %s foo; | ||||
|  | ||||
|         int | ||||
|         main() | ||||
|         { | ||||
|             return 0; | ||||
|         } | ||||
|         """) % (attribute, ) | ||||
|     return cmd.try_compile(body, None, None) != 0 | ||||
| @ -0,0 +1,22 @@ | ||||
| import os | ||||
| import sys | ||||
| if 'setuptools' in sys.modules: | ||||
|     from setuptools.command.bdist_rpm import bdist_rpm as old_bdist_rpm | ||||
| else: | ||||
|     from distutils.command.bdist_rpm import bdist_rpm as old_bdist_rpm | ||||
|  | ||||
| class bdist_rpm(old_bdist_rpm): | ||||
|  | ||||
|     def _make_spec_file(self): | ||||
|         spec_file = old_bdist_rpm._make_spec_file(self) | ||||
|  | ||||
|         # Replace hardcoded setup.py script name | ||||
|         # with the real setup script name. | ||||
|         setup_py = os.path.basename(sys.argv[0]) | ||||
|         if setup_py == 'setup.py': | ||||
|             return spec_file | ||||
|         new_spec_file = [] | ||||
|         for line in spec_file: | ||||
|             line = line.replace('setup.py', setup_py) | ||||
|             new_spec_file.append(line) | ||||
|         return new_spec_file | ||||
| @ -0,0 +1,62 @@ | ||||
| import os | ||||
| import sys | ||||
| from distutils.command.build import build as old_build | ||||
| from distutils.util import get_platform | ||||
| from numpy.distutils.command.config_compiler import show_fortran_compilers | ||||
|  | ||||
| class build(old_build): | ||||
|  | ||||
|     sub_commands = [('config_cc',     lambda *args: True), | ||||
|                     ('config_fc',     lambda *args: True), | ||||
|                     ('build_src',     old_build.has_ext_modules), | ||||
|                     ] + old_build.sub_commands | ||||
|  | ||||
|     user_options = old_build.user_options + [ | ||||
|         ('fcompiler=', None, | ||||
|          "specify the Fortran compiler type"), | ||||
|         ('warn-error', None, | ||||
|          "turn all warnings into errors (-Werror)"), | ||||
|         ('cpu-baseline=', None, | ||||
|          "specify a list of enabled baseline CPU optimizations"), | ||||
|         ('cpu-dispatch=', None, | ||||
|          "specify a list of dispatched CPU optimizations"), | ||||
|         ('disable-optimization', None, | ||||
|          "disable CPU optimized code(dispatch,simd,fast...)"), | ||||
|         ('simd-test=', None, | ||||
|          "specify a list of CPU optimizations to be tested against NumPy SIMD interface"), | ||||
|         ] | ||||
|  | ||||
|     help_options = old_build.help_options + [ | ||||
|         ('help-fcompiler', None, "list available Fortran compilers", | ||||
|          show_fortran_compilers), | ||||
|         ] | ||||
|  | ||||
|     def initialize_options(self): | ||||
|         old_build.initialize_options(self) | ||||
|         self.fcompiler = None | ||||
|         self.warn_error = False | ||||
|         self.cpu_baseline = "min" | ||||
|         self.cpu_dispatch = "max -xop -fma4" # drop AMD legacy features by default | ||||
|         self.disable_optimization = False | ||||
|         """ | ||||
|         the '_simd' module is a very large. Adding more dispatched features | ||||
|         will increase binary size and compile time. By default we minimize | ||||
|         the targeted features to those most commonly used by the NumPy SIMD interface(NPYV), | ||||
|         NOTE: any specified features will be ignored if they're: | ||||
|             - part of the baseline(--cpu-baseline) | ||||
|             - not part of dispatch-able features(--cpu-dispatch) | ||||
|             - not supported by compiler or platform | ||||
|         """ | ||||
|         self.simd_test = "BASELINE SSE2 SSE42 XOP FMA4 (FMA3 AVX2) AVX512F " \ | ||||
|                          "AVX512_SKX VSX VSX2 VSX3 VSX4 NEON ASIMD VX VXE VXE2" | ||||
|  | ||||
|     def finalize_options(self): | ||||
|         build_scripts = self.build_scripts | ||||
|         old_build.finalize_options(self) | ||||
|         plat_specifier = ".{}-{}.{}".format(get_platform(), *sys.version_info[:2]) | ||||
|         if build_scripts is None: | ||||
|             self.build_scripts = os.path.join(self.build_base, | ||||
|                                               'scripts' + plat_specifier) | ||||
|  | ||||
|     def run(self): | ||||
|         old_build.run(self) | ||||
| @ -0,0 +1,469 @@ | ||||
| """ Modified version of build_clib that handles fortran source files. | ||||
| """ | ||||
| import os | ||||
| from glob import glob | ||||
| import shutil | ||||
| from distutils.command.build_clib import build_clib as old_build_clib | ||||
| from distutils.errors import DistutilsSetupError, DistutilsError, \ | ||||
|     DistutilsFileError | ||||
|  | ||||
| from numpy.distutils import log | ||||
| from distutils.dep_util import newer_group | ||||
| from numpy.distutils.misc_util import ( | ||||
|     filter_sources, get_lib_source_files, get_numpy_include_dirs, | ||||
|     has_cxx_sources, has_f_sources, is_sequence | ||||
| ) | ||||
| from numpy.distutils.ccompiler_opt import new_ccompiler_opt | ||||
|  | ||||
| # Fix Python distutils bug sf #1718574: | ||||
| _l = old_build_clib.user_options | ||||
| for _i in range(len(_l)): | ||||
|     if _l[_i][0] in ['build-clib', 'build-temp']: | ||||
|         _l[_i] = (_l[_i][0] + '=',) + _l[_i][1:] | ||||
| # | ||||
|  | ||||
|  | ||||
| class build_clib(old_build_clib): | ||||
|  | ||||
|     description = "build C/C++/F libraries used by Python extensions" | ||||
|  | ||||
|     user_options = old_build_clib.user_options + [ | ||||
|         ('fcompiler=', None, | ||||
|          "specify the Fortran compiler type"), | ||||
|         ('inplace', 'i', 'Build in-place'), | ||||
|         ('parallel=', 'j', | ||||
|          "number of parallel jobs"), | ||||
|         ('warn-error', None, | ||||
|          "turn all warnings into errors (-Werror)"), | ||||
|         ('cpu-baseline=', None, | ||||
|          "specify a list of enabled baseline CPU optimizations"), | ||||
|         ('cpu-dispatch=', None, | ||||
|          "specify a list of dispatched CPU optimizations"), | ||||
|         ('disable-optimization', None, | ||||
|          "disable CPU optimized code(dispatch,simd,fast...)"), | ||||
|     ] | ||||
|  | ||||
|     boolean_options = old_build_clib.boolean_options + \ | ||||
|     ['inplace', 'warn-error', 'disable-optimization'] | ||||
|  | ||||
|     def initialize_options(self): | ||||
|         old_build_clib.initialize_options(self) | ||||
|         self.fcompiler = None | ||||
|         self.inplace = 0 | ||||
|         self.parallel = None | ||||
|         self.warn_error = None | ||||
|         self.cpu_baseline = None | ||||
|         self.cpu_dispatch = None | ||||
|         self.disable_optimization = None | ||||
|  | ||||
|  | ||||
|     def finalize_options(self): | ||||
|         if self.parallel: | ||||
|             try: | ||||
|                 self.parallel = int(self.parallel) | ||||
|             except ValueError as e: | ||||
|                 raise ValueError("--parallel/-j argument must be an integer") from e | ||||
|         old_build_clib.finalize_options(self) | ||||
|         self.set_undefined_options('build', | ||||
|                                         ('parallel', 'parallel'), | ||||
|                                         ('warn_error', 'warn_error'), | ||||
|                                         ('cpu_baseline', 'cpu_baseline'), | ||||
|                                         ('cpu_dispatch', 'cpu_dispatch'), | ||||
|                                         ('disable_optimization', 'disable_optimization') | ||||
|                                   ) | ||||
|  | ||||
|     def have_f_sources(self): | ||||
|         for (lib_name, build_info) in self.libraries: | ||||
|             if has_f_sources(build_info.get('sources', [])): | ||||
|                 return True | ||||
|         return False | ||||
|  | ||||
|     def have_cxx_sources(self): | ||||
|         for (lib_name, build_info) in self.libraries: | ||||
|             if has_cxx_sources(build_info.get('sources', [])): | ||||
|                 return True | ||||
|         return False | ||||
|  | ||||
|     def run(self): | ||||
|         if not self.libraries: | ||||
|             return | ||||
|  | ||||
|         # Make sure that library sources are complete. | ||||
|         languages = [] | ||||
|  | ||||
|         # Make sure that extension sources are complete. | ||||
|         self.run_command('build_src') | ||||
|  | ||||
|         for (lib_name, build_info) in self.libraries: | ||||
|             l = build_info.get('language', None) | ||||
|             if l and l not in languages: | ||||
|                 languages.append(l) | ||||
|  | ||||
|         from distutils.ccompiler import new_compiler | ||||
|         self.compiler = new_compiler(compiler=self.compiler, | ||||
|                                      dry_run=self.dry_run, | ||||
|                                      force=self.force) | ||||
|         self.compiler.customize(self.distribution, | ||||
|                                 need_cxx=self.have_cxx_sources()) | ||||
|  | ||||
|         if self.warn_error: | ||||
|             self.compiler.compiler.append('-Werror') | ||||
|             self.compiler.compiler_so.append('-Werror') | ||||
|  | ||||
|         libraries = self.libraries | ||||
|         self.libraries = None | ||||
|         self.compiler.customize_cmd(self) | ||||
|         self.libraries = libraries | ||||
|  | ||||
|         self.compiler.show_customization() | ||||
|  | ||||
|         if not self.disable_optimization: | ||||
|             dispatch_hpath = os.path.join("numpy", "distutils", "include", "npy_cpu_dispatch_config.h") | ||||
|             dispatch_hpath = os.path.join(self.get_finalized_command("build_src").build_src, dispatch_hpath) | ||||
|             opt_cache_path = os.path.abspath( | ||||
|                 os.path.join(self.build_temp, 'ccompiler_opt_cache_clib.py') | ||||
|             ) | ||||
|             if hasattr(self, "compiler_opt"): | ||||
|                 # By default `CCompilerOpt` update the cache at the exit of | ||||
|                 # the process, which may lead to duplicate building | ||||
|                 # (see build_extension()/force_rebuild) if run() called | ||||
|                 # multiple times within the same os process/thread without | ||||
|                 # giving the chance the previous instances of `CCompilerOpt` | ||||
|                 # to update the cache. | ||||
|                 self.compiler_opt.cache_flush() | ||||
|  | ||||
|             self.compiler_opt = new_ccompiler_opt( | ||||
|                 compiler=self.compiler, dispatch_hpath=dispatch_hpath, | ||||
|                 cpu_baseline=self.cpu_baseline, cpu_dispatch=self.cpu_dispatch, | ||||
|                 cache_path=opt_cache_path | ||||
|             ) | ||||
|             def report(copt): | ||||
|                 log.info("\n########### CLIB COMPILER OPTIMIZATION ###########") | ||||
|                 log.info(copt.report(full=True)) | ||||
|  | ||||
|             import atexit | ||||
|             atexit.register(report, self.compiler_opt) | ||||
|  | ||||
|         if self.have_f_sources(): | ||||
|             from numpy.distutils.fcompiler import new_fcompiler | ||||
|             self._f_compiler = new_fcompiler(compiler=self.fcompiler, | ||||
|                                              verbose=self.verbose, | ||||
|                                              dry_run=self.dry_run, | ||||
|                                              force=self.force, | ||||
|                                              requiref90='f90' in languages, | ||||
|                                              c_compiler=self.compiler) | ||||
|             if self._f_compiler is not None: | ||||
|                 self._f_compiler.customize(self.distribution) | ||||
|  | ||||
|                 libraries = self.libraries | ||||
|                 self.libraries = None | ||||
|                 self._f_compiler.customize_cmd(self) | ||||
|                 self.libraries = libraries | ||||
|  | ||||
|                 self._f_compiler.show_customization() | ||||
|         else: | ||||
|             self._f_compiler = None | ||||
|  | ||||
|         self.build_libraries(self.libraries) | ||||
|  | ||||
|         if self.inplace: | ||||
|             for l in self.distribution.installed_libraries: | ||||
|                 libname = self.compiler.library_filename(l.name) | ||||
|                 source = os.path.join(self.build_clib, libname) | ||||
|                 target = os.path.join(l.target_dir, libname) | ||||
|                 self.mkpath(l.target_dir) | ||||
|                 shutil.copy(source, target) | ||||
|  | ||||
|     def get_source_files(self): | ||||
|         self.check_library_list(self.libraries) | ||||
|         filenames = [] | ||||
|         for lib in self.libraries: | ||||
|             filenames.extend(get_lib_source_files(lib)) | ||||
|         return filenames | ||||
|  | ||||
|     def build_libraries(self, libraries): | ||||
|         for (lib_name, build_info) in libraries: | ||||
|             self.build_a_library(build_info, lib_name, libraries) | ||||
|  | ||||
|     def assemble_flags(self, in_flags): | ||||
|         """ Assemble flags from flag list | ||||
|  | ||||
|         Parameters | ||||
|         ---------- | ||||
|         in_flags : None or sequence | ||||
|             None corresponds to empty list.  Sequence elements can be strings | ||||
|             or callables that return lists of strings. Callable takes `self` as | ||||
|             single parameter. | ||||
|  | ||||
|         Returns | ||||
|         ------- | ||||
|         out_flags : list | ||||
|         """ | ||||
|         if in_flags is None: | ||||
|             return [] | ||||
|         out_flags = [] | ||||
|         for in_flag in in_flags: | ||||
|             if callable(in_flag): | ||||
|                 out_flags += in_flag(self) | ||||
|             else: | ||||
|                 out_flags.append(in_flag) | ||||
|         return out_flags | ||||
|  | ||||
|     def build_a_library(self, build_info, lib_name, libraries): | ||||
|         # default compilers | ||||
|         compiler = self.compiler | ||||
|         fcompiler = self._f_compiler | ||||
|  | ||||
|         sources = build_info.get('sources') | ||||
|         if sources is None or not is_sequence(sources): | ||||
|             raise DistutilsSetupError(("in 'libraries' option (library '%s'), " | ||||
|                                        "'sources' must be present and must be " | ||||
|                                        "a list of source filenames") % lib_name) | ||||
|         sources = list(sources) | ||||
|  | ||||
|         c_sources, cxx_sources, f_sources, fmodule_sources \ | ||||
|             = filter_sources(sources) | ||||
|         requiref90 = not not fmodule_sources or \ | ||||
|             build_info.get('language', 'c') == 'f90' | ||||
|  | ||||
|         # save source type information so that build_ext can use it. | ||||
|         source_languages = [] | ||||
|         if c_sources: | ||||
|             source_languages.append('c') | ||||
|         if cxx_sources: | ||||
|             source_languages.append('c++') | ||||
|         if requiref90: | ||||
|             source_languages.append('f90') | ||||
|         elif f_sources: | ||||
|             source_languages.append('f77') | ||||
|         build_info['source_languages'] = source_languages | ||||
|  | ||||
|         lib_file = compiler.library_filename(lib_name, | ||||
|                                              output_dir=self.build_clib) | ||||
|         depends = sources + build_info.get('depends', []) | ||||
|  | ||||
|         force_rebuild = self.force | ||||
|         if not self.disable_optimization and not self.compiler_opt.is_cached(): | ||||
|             log.debug("Detected changes on compiler optimizations") | ||||
|             force_rebuild = True | ||||
|         if not (force_rebuild or newer_group(depends, lib_file, 'newer')): | ||||
|             log.debug("skipping '%s' library (up-to-date)", lib_name) | ||||
|             return | ||||
|         else: | ||||
|             log.info("building '%s' library", lib_name) | ||||
|  | ||||
|         config_fc = build_info.get('config_fc', {}) | ||||
|         if fcompiler is not None and config_fc: | ||||
|             log.info('using additional config_fc from setup script ' | ||||
|                      'for fortran compiler: %s' | ||||
|                      % (config_fc,)) | ||||
|             from numpy.distutils.fcompiler import new_fcompiler | ||||
|             fcompiler = new_fcompiler(compiler=fcompiler.compiler_type, | ||||
|                                       verbose=self.verbose, | ||||
|                                       dry_run=self.dry_run, | ||||
|                                       force=self.force, | ||||
|                                       requiref90=requiref90, | ||||
|                                       c_compiler=self.compiler) | ||||
|             if fcompiler is not None: | ||||
|                 dist = self.distribution | ||||
|                 base_config_fc = dist.get_option_dict('config_fc').copy() | ||||
|                 base_config_fc.update(config_fc) | ||||
|                 fcompiler.customize(base_config_fc) | ||||
|  | ||||
|         # check availability of Fortran compilers | ||||
|         if (f_sources or fmodule_sources) and fcompiler is None: | ||||
|             raise DistutilsError("library %s has Fortran sources" | ||||
|                                  " but no Fortran compiler found" % (lib_name)) | ||||
|  | ||||
|         if fcompiler is not None: | ||||
|             fcompiler.extra_f77_compile_args = build_info.get( | ||||
|                 'extra_f77_compile_args') or [] | ||||
|             fcompiler.extra_f90_compile_args = build_info.get( | ||||
|                 'extra_f90_compile_args') or [] | ||||
|  | ||||
|         macros = build_info.get('macros') | ||||
|         if macros is None: | ||||
|             macros = [] | ||||
|         include_dirs = build_info.get('include_dirs') | ||||
|         if include_dirs is None: | ||||
|             include_dirs = [] | ||||
|         # Flags can be strings, or callables that return a list of strings. | ||||
|         extra_postargs = self.assemble_flags( | ||||
|             build_info.get('extra_compiler_args')) | ||||
|         extra_cflags = self.assemble_flags( | ||||
|             build_info.get('extra_cflags')) | ||||
|         extra_cxxflags = self.assemble_flags( | ||||
|             build_info.get('extra_cxxflags')) | ||||
|  | ||||
|         include_dirs.extend(get_numpy_include_dirs()) | ||||
|         # where compiled F90 module files are: | ||||
|         module_dirs = build_info.get('module_dirs') or [] | ||||
|         module_build_dir = os.path.dirname(lib_file) | ||||
|         if requiref90: | ||||
|             self.mkpath(module_build_dir) | ||||
|  | ||||
|         if compiler.compiler_type == 'msvc': | ||||
|             # this hack works around the msvc compiler attributes | ||||
|             # problem, msvc uses its own convention :( | ||||
|             c_sources += cxx_sources | ||||
|             cxx_sources = [] | ||||
|             extra_cflags += extra_cxxflags | ||||
|  | ||||
|         # filtering C dispatch-table sources when optimization is not disabled, | ||||
|         # otherwise treated as normal sources. | ||||
|         copt_c_sources = [] | ||||
|         copt_cxx_sources = [] | ||||
|         copt_baseline_flags = [] | ||||
|         copt_macros = [] | ||||
|         if not self.disable_optimization: | ||||
|             bsrc_dir = self.get_finalized_command("build_src").build_src | ||||
|             dispatch_hpath = os.path.join("numpy", "distutils", "include") | ||||
|             dispatch_hpath = os.path.join(bsrc_dir, dispatch_hpath) | ||||
|             include_dirs.append(dispatch_hpath) | ||||
|             # copt_build_src = None if self.inplace else bsrc_dir | ||||
|             copt_build_src = bsrc_dir | ||||
|             for _srcs, _dst, _ext in ( | ||||
|                 ((c_sources,), copt_c_sources, ('.dispatch.c',)), | ||||
|                 ((c_sources, cxx_sources), copt_cxx_sources, | ||||
|                     ('.dispatch.cpp', '.dispatch.cxx')) | ||||
|             ): | ||||
|                 for _src in _srcs: | ||||
|                     _dst += [ | ||||
|                         _src.pop(_src.index(s)) | ||||
|                         for s in _src[:] if s.endswith(_ext) | ||||
|                     ] | ||||
|             copt_baseline_flags = self.compiler_opt.cpu_baseline_flags() | ||||
|         else: | ||||
|             copt_macros.append(("NPY_DISABLE_OPTIMIZATION", 1)) | ||||
|  | ||||
|         objects = [] | ||||
|         if copt_cxx_sources: | ||||
|             log.info("compiling C++ dispatch-able sources") | ||||
|             objects += self.compiler_opt.try_dispatch( | ||||
|                 copt_c_sources, | ||||
|                 output_dir=self.build_temp, | ||||
|                 src_dir=copt_build_src, | ||||
|                 macros=macros + copt_macros, | ||||
|                 include_dirs=include_dirs, | ||||
|                 debug=self.debug, | ||||
|                 extra_postargs=extra_postargs + extra_cxxflags, | ||||
|                 ccompiler=cxx_compiler | ||||
|             ) | ||||
|  | ||||
|         if copt_c_sources: | ||||
|             log.info("compiling C dispatch-able sources") | ||||
|             objects += self.compiler_opt.try_dispatch( | ||||
|                 copt_c_sources, | ||||
|                 output_dir=self.build_temp, | ||||
|                 src_dir=copt_build_src, | ||||
|                 macros=macros + copt_macros, | ||||
|                 include_dirs=include_dirs, | ||||
|                 debug=self.debug, | ||||
|                 extra_postargs=extra_postargs + extra_cflags) | ||||
|  | ||||
|         if c_sources: | ||||
|             log.info("compiling C sources") | ||||
|             objects += compiler.compile( | ||||
|                 c_sources, | ||||
|                 output_dir=self.build_temp, | ||||
|                 macros=macros + copt_macros, | ||||
|                 include_dirs=include_dirs, | ||||
|                 debug=self.debug, | ||||
|                 extra_postargs=(extra_postargs + | ||||
|                                 copt_baseline_flags + | ||||
|                                 extra_cflags)) | ||||
|  | ||||
|         if cxx_sources: | ||||
|             log.info("compiling C++ sources") | ||||
|             cxx_compiler = compiler.cxx_compiler() | ||||
|             cxx_objects = cxx_compiler.compile( | ||||
|                 cxx_sources, | ||||
|                 output_dir=self.build_temp, | ||||
|                 macros=macros + copt_macros, | ||||
|                 include_dirs=include_dirs, | ||||
|                 debug=self.debug, | ||||
|                 extra_postargs=(extra_postargs + | ||||
|                                 copt_baseline_flags + | ||||
|                                 extra_cxxflags)) | ||||
|             objects.extend(cxx_objects) | ||||
|  | ||||
|         if f_sources or fmodule_sources: | ||||
|             extra_postargs = [] | ||||
|             f_objects = [] | ||||
|  | ||||
|             if requiref90: | ||||
|                 if fcompiler.module_dir_switch is None: | ||||
|                     existing_modules = glob('*.mod') | ||||
|                 extra_postargs += fcompiler.module_options( | ||||
|                     module_dirs, module_build_dir) | ||||
|  | ||||
|             if fmodule_sources: | ||||
|                 log.info("compiling Fortran 90 module sources") | ||||
|                 f_objects += fcompiler.compile(fmodule_sources, | ||||
|                                                output_dir=self.build_temp, | ||||
|                                                macros=macros, | ||||
|                                                include_dirs=include_dirs, | ||||
|                                                debug=self.debug, | ||||
|                                                extra_postargs=extra_postargs) | ||||
|  | ||||
|             if requiref90 and self._f_compiler.module_dir_switch is None: | ||||
|                 # move new compiled F90 module files to module_build_dir | ||||
|                 for f in glob('*.mod'): | ||||
|                     if f in existing_modules: | ||||
|                         continue | ||||
|                     t = os.path.join(module_build_dir, f) | ||||
|                     if os.path.abspath(f) == os.path.abspath(t): | ||||
|                         continue | ||||
|                     if os.path.isfile(t): | ||||
|                         os.remove(t) | ||||
|                     try: | ||||
|                         self.move_file(f, module_build_dir) | ||||
|                     except DistutilsFileError: | ||||
|                         log.warn('failed to move %r to %r' | ||||
|                                  % (f, module_build_dir)) | ||||
|  | ||||
|             if f_sources: | ||||
|                 log.info("compiling Fortran sources") | ||||
|                 f_objects += fcompiler.compile(f_sources, | ||||
|                                                output_dir=self.build_temp, | ||||
|                                                macros=macros, | ||||
|                                                include_dirs=include_dirs, | ||||
|                                                debug=self.debug, | ||||
|                                                extra_postargs=extra_postargs) | ||||
|         else: | ||||
|             f_objects = [] | ||||
|  | ||||
|         if f_objects and not fcompiler.can_ccompiler_link(compiler): | ||||
|             # Default linker cannot link Fortran object files, and results | ||||
|             # need to be wrapped later. Instead of creating a real static | ||||
|             # library, just keep track of the object files. | ||||
|             listfn = os.path.join(self.build_clib, | ||||
|                                   lib_name + '.fobjects') | ||||
|             with open(listfn, 'w') as f: | ||||
|                 f.write("\n".join(os.path.abspath(obj) for obj in f_objects)) | ||||
|  | ||||
|             listfn = os.path.join(self.build_clib, | ||||
|                                   lib_name + '.cobjects') | ||||
|             with open(listfn, 'w') as f: | ||||
|                 f.write("\n".join(os.path.abspath(obj) for obj in objects)) | ||||
|  | ||||
|             # create empty "library" file for dependency tracking | ||||
|             lib_fname = os.path.join(self.build_clib, | ||||
|                                      lib_name + compiler.static_lib_extension) | ||||
|             with open(lib_fname, 'wb') as f: | ||||
|                 pass | ||||
|         else: | ||||
|             # assume that default linker is suitable for | ||||
|             # linking Fortran object files | ||||
|             objects.extend(f_objects) | ||||
|             compiler.create_static_lib(objects, lib_name, | ||||
|                                        output_dir=self.build_clib, | ||||
|                                        debug=self.debug) | ||||
|  | ||||
|         # fix library dependencies | ||||
|         clib_libraries = build_info.get('libraries', []) | ||||
|         for lname, binfo in libraries: | ||||
|             if lname in clib_libraries: | ||||
|                 clib_libraries.extend(binfo.get('libraries', [])) | ||||
|         if clib_libraries: | ||||
|             build_info['libraries'] = clib_libraries | ||||
| @ -0,0 +1,752 @@ | ||||
| """ Modified version of build_ext that handles fortran source files. | ||||
|  | ||||
| """ | ||||
| import os | ||||
| import subprocess | ||||
| from glob import glob | ||||
|  | ||||
| from distutils.dep_util import newer_group | ||||
| from distutils.command.build_ext import build_ext as old_build_ext | ||||
| from distutils.errors import DistutilsFileError, DistutilsSetupError,\ | ||||
|     DistutilsError | ||||
| from distutils.file_util import copy_file | ||||
|  | ||||
| from numpy.distutils import log | ||||
| from numpy.distutils.exec_command import filepath_from_subprocess_output | ||||
| from numpy.distutils.system_info import combine_paths | ||||
| from numpy.distutils.misc_util import ( | ||||
|     filter_sources, get_ext_source_files, get_numpy_include_dirs, | ||||
|     has_cxx_sources, has_f_sources, is_sequence | ||||
| ) | ||||
| from numpy.distutils.command.config_compiler import show_fortran_compilers | ||||
| from numpy.distutils.ccompiler_opt import new_ccompiler_opt, CCompilerOpt | ||||
|  | ||||
| class build_ext (old_build_ext): | ||||
|  | ||||
|     description = "build C/C++/F extensions (compile/link to build directory)" | ||||
|  | ||||
|     user_options = old_build_ext.user_options + [ | ||||
|         ('fcompiler=', None, | ||||
|          "specify the Fortran compiler type"), | ||||
|         ('parallel=', 'j', | ||||
|          "number of parallel jobs"), | ||||
|         ('warn-error', None, | ||||
|          "turn all warnings into errors (-Werror)"), | ||||
|         ('cpu-baseline=', None, | ||||
|          "specify a list of enabled baseline CPU optimizations"), | ||||
|         ('cpu-dispatch=', None, | ||||
|          "specify a list of dispatched CPU optimizations"), | ||||
|         ('disable-optimization', None, | ||||
|          "disable CPU optimized code(dispatch,simd,fast...)"), | ||||
|         ('simd-test=', None, | ||||
|          "specify a list of CPU optimizations to be tested against NumPy SIMD interface"), | ||||
|     ] | ||||
|  | ||||
|     help_options = old_build_ext.help_options + [ | ||||
|         ('help-fcompiler', None, "list available Fortran compilers", | ||||
|          show_fortran_compilers), | ||||
|     ] | ||||
|  | ||||
|     boolean_options = old_build_ext.boolean_options + ['warn-error', 'disable-optimization'] | ||||
|  | ||||
|     def initialize_options(self): | ||||
|         old_build_ext.initialize_options(self) | ||||
|         self.fcompiler = None | ||||
|         self.parallel = None | ||||
|         self.warn_error = None | ||||
|         self.cpu_baseline = None | ||||
|         self.cpu_dispatch = None | ||||
|         self.disable_optimization = None | ||||
|         self.simd_test = None | ||||
|  | ||||
|     def finalize_options(self): | ||||
|         if self.parallel: | ||||
|             try: | ||||
|                 self.parallel = int(self.parallel) | ||||
|             except ValueError as e: | ||||
|                 raise ValueError("--parallel/-j argument must be an integer") from e | ||||
|  | ||||
|         # Ensure that self.include_dirs and self.distribution.include_dirs | ||||
|         # refer to the same list object. finalize_options will modify | ||||
|         # self.include_dirs, but self.distribution.include_dirs is used | ||||
|         # during the actual build. | ||||
|         # self.include_dirs is None unless paths are specified with | ||||
|         # --include-dirs. | ||||
|         # The include paths will be passed to the compiler in the order: | ||||
|         # numpy paths, --include-dirs paths, Python include path. | ||||
|         if isinstance(self.include_dirs, str): | ||||
|             self.include_dirs = self.include_dirs.split(os.pathsep) | ||||
|         incl_dirs = self.include_dirs or [] | ||||
|         if self.distribution.include_dirs is None: | ||||
|             self.distribution.include_dirs = [] | ||||
|         self.include_dirs = self.distribution.include_dirs | ||||
|         self.include_dirs.extend(incl_dirs) | ||||
|  | ||||
|         old_build_ext.finalize_options(self) | ||||
|         self.set_undefined_options('build', | ||||
|                                         ('parallel', 'parallel'), | ||||
|                                         ('warn_error', 'warn_error'), | ||||
|                                         ('cpu_baseline', 'cpu_baseline'), | ||||
|                                         ('cpu_dispatch', 'cpu_dispatch'), | ||||
|                                         ('disable_optimization', 'disable_optimization'), | ||||
|                                         ('simd_test', 'simd_test') | ||||
|                                   ) | ||||
|         CCompilerOpt.conf_target_groups["simd_test"] = self.simd_test | ||||
|  | ||||
|     def run(self): | ||||
|         if not self.extensions: | ||||
|             return | ||||
|  | ||||
|         # Make sure that extension sources are complete. | ||||
|         self.run_command('build_src') | ||||
|  | ||||
|         if self.distribution.has_c_libraries(): | ||||
|             if self.inplace: | ||||
|                 if self.distribution.have_run.get('build_clib'): | ||||
|                     log.warn('build_clib already run, it is too late to ' | ||||
|                              'ensure in-place build of build_clib') | ||||
|                     build_clib = self.distribution.get_command_obj( | ||||
|                         'build_clib') | ||||
|                 else: | ||||
|                     build_clib = self.distribution.get_command_obj( | ||||
|                         'build_clib') | ||||
|                     build_clib.inplace = 1 | ||||
|                     build_clib.ensure_finalized() | ||||
|                     build_clib.run() | ||||
|                     self.distribution.have_run['build_clib'] = 1 | ||||
|  | ||||
|             else: | ||||
|                 self.run_command('build_clib') | ||||
|                 build_clib = self.get_finalized_command('build_clib') | ||||
|             self.library_dirs.append(build_clib.build_clib) | ||||
|         else: | ||||
|             build_clib = None | ||||
|  | ||||
|         # Not including C libraries to the list of | ||||
|         # extension libraries automatically to prevent | ||||
|         # bogus linking commands. Extensions must | ||||
|         # explicitly specify the C libraries that they use. | ||||
|  | ||||
|         from distutils.ccompiler import new_compiler | ||||
|         from numpy.distutils.fcompiler import new_fcompiler | ||||
|  | ||||
|         compiler_type = self.compiler | ||||
|         # Initialize C compiler: | ||||
|         self.compiler = new_compiler(compiler=compiler_type, | ||||
|                                      verbose=self.verbose, | ||||
|                                      dry_run=self.dry_run, | ||||
|                                      force=self.force) | ||||
|         self.compiler.customize(self.distribution) | ||||
|         self.compiler.customize_cmd(self) | ||||
|  | ||||
|         if self.warn_error: | ||||
|             self.compiler.compiler.append('-Werror') | ||||
|             self.compiler.compiler_so.append('-Werror') | ||||
|  | ||||
|         self.compiler.show_customization() | ||||
|  | ||||
|         if not self.disable_optimization: | ||||
|             dispatch_hpath = os.path.join("numpy", "distutils", "include", "npy_cpu_dispatch_config.h") | ||||
|             dispatch_hpath = os.path.join(self.get_finalized_command("build_src").build_src, dispatch_hpath) | ||||
|             opt_cache_path = os.path.abspath( | ||||
|                 os.path.join(self.build_temp, 'ccompiler_opt_cache_ext.py') | ||||
|             ) | ||||
|             if hasattr(self, "compiler_opt"): | ||||
|                 # By default `CCompilerOpt` update the cache at the exit of | ||||
|                 # the process, which may lead to duplicate building | ||||
|                 # (see build_extension()/force_rebuild) if run() called | ||||
|                 # multiple times within the same os process/thread without | ||||
|                 # giving the chance the previous instances of `CCompilerOpt` | ||||
|                 # to update the cache. | ||||
|                 self.compiler_opt.cache_flush() | ||||
|  | ||||
|             self.compiler_opt = new_ccompiler_opt( | ||||
|                 compiler=self.compiler, dispatch_hpath=dispatch_hpath, | ||||
|                 cpu_baseline=self.cpu_baseline, cpu_dispatch=self.cpu_dispatch, | ||||
|                 cache_path=opt_cache_path | ||||
|             ) | ||||
|             def report(copt): | ||||
|                 log.info("\n########### EXT COMPILER OPTIMIZATION ###########") | ||||
|                 log.info(copt.report(full=True)) | ||||
|  | ||||
|             import atexit | ||||
|             atexit.register(report, self.compiler_opt) | ||||
|  | ||||
|         # Setup directory for storing generated extra DLL files on Windows | ||||
|         self.extra_dll_dir = os.path.join(self.build_temp, '.libs') | ||||
|         if not os.path.isdir(self.extra_dll_dir): | ||||
|             os.makedirs(self.extra_dll_dir) | ||||
|  | ||||
|         # Create mapping of libraries built by build_clib: | ||||
|         clibs = {} | ||||
|         if build_clib is not None: | ||||
|             for libname, build_info in build_clib.libraries or []: | ||||
|                 if libname in clibs and clibs[libname] != build_info: | ||||
|                     log.warn('library %r defined more than once,' | ||||
|                              ' overwriting build_info\n%s... \nwith\n%s...' | ||||
|                              % (libname, repr(clibs[libname])[:300], repr(build_info)[:300])) | ||||
|                 clibs[libname] = build_info | ||||
|         # .. and distribution libraries: | ||||
|         for libname, build_info in self.distribution.libraries or []: | ||||
|             if libname in clibs: | ||||
|                 # build_clib libraries have a precedence before distribution ones | ||||
|                 continue | ||||
|             clibs[libname] = build_info | ||||
|  | ||||
|         # Determine if C++/Fortran 77/Fortran 90 compilers are needed. | ||||
|         # Update extension libraries, library_dirs, and macros. | ||||
|         all_languages = set() | ||||
|         for ext in self.extensions: | ||||
|             ext_languages = set() | ||||
|             c_libs = [] | ||||
|             c_lib_dirs = [] | ||||
|             macros = [] | ||||
|             for libname in ext.libraries: | ||||
|                 if libname in clibs: | ||||
|                     binfo = clibs[libname] | ||||
|                     c_libs += binfo.get('libraries', []) | ||||
|                     c_lib_dirs += binfo.get('library_dirs', []) | ||||
|                     for m in binfo.get('macros', []): | ||||
|                         if m not in macros: | ||||
|                             macros.append(m) | ||||
|  | ||||
|                 for l in clibs.get(libname, {}).get('source_languages', []): | ||||
|                     ext_languages.add(l) | ||||
|             if c_libs: | ||||
|                 new_c_libs = ext.libraries + c_libs | ||||
|                 log.info('updating extension %r libraries from %r to %r' | ||||
|                          % (ext.name, ext.libraries, new_c_libs)) | ||||
|                 ext.libraries = new_c_libs | ||||
|                 ext.library_dirs = ext.library_dirs + c_lib_dirs | ||||
|             if macros: | ||||
|                 log.info('extending extension %r defined_macros with %r' | ||||
|                          % (ext.name, macros)) | ||||
|                 ext.define_macros = ext.define_macros + macros | ||||
|  | ||||
|             # determine extension languages | ||||
|             if has_f_sources(ext.sources): | ||||
|                 ext_languages.add('f77') | ||||
|             if has_cxx_sources(ext.sources): | ||||
|                 ext_languages.add('c++') | ||||
|             l = ext.language or self.compiler.detect_language(ext.sources) | ||||
|             if l: | ||||
|                 ext_languages.add(l) | ||||
|  | ||||
|             # reset language attribute for choosing proper linker | ||||
|             # | ||||
|             # When we build extensions with multiple languages, we have to | ||||
|             # choose a linker. The rules here are: | ||||
|             #   1. if there is Fortran code, always prefer the Fortran linker, | ||||
|             #   2. otherwise prefer C++ over C, | ||||
|             #   3. Users can force a particular linker by using | ||||
|             #          `language='c'`  # or 'c++', 'f90', 'f77' | ||||
|             #      in their config.add_extension() calls. | ||||
|             if 'c++' in ext_languages: | ||||
|                 ext_language = 'c++' | ||||
|             else: | ||||
|                 ext_language = 'c'  # default | ||||
|  | ||||
|             has_fortran = False | ||||
|             if 'f90' in ext_languages: | ||||
|                 ext_language = 'f90' | ||||
|                 has_fortran = True | ||||
|             elif 'f77' in ext_languages: | ||||
|                 ext_language = 'f77' | ||||
|                 has_fortran = True | ||||
|  | ||||
|             if not ext.language or has_fortran: | ||||
|                 if l and l != ext_language and ext.language: | ||||
|                     log.warn('resetting extension %r language from %r to %r.' % | ||||
|                              (ext.name, l, ext_language)) | ||||
|  | ||||
|             ext.language = ext_language | ||||
|  | ||||
|             # global language | ||||
|             all_languages.update(ext_languages) | ||||
|  | ||||
|         need_f90_compiler = 'f90' in all_languages | ||||
|         need_f77_compiler = 'f77' in all_languages | ||||
|         need_cxx_compiler = 'c++' in all_languages | ||||
|  | ||||
|         # Initialize C++ compiler: | ||||
|         if need_cxx_compiler: | ||||
|             self._cxx_compiler = new_compiler(compiler=compiler_type, | ||||
|                                               verbose=self.verbose, | ||||
|                                               dry_run=self.dry_run, | ||||
|                                               force=self.force) | ||||
|             compiler = self._cxx_compiler | ||||
|             compiler.customize(self.distribution, need_cxx=need_cxx_compiler) | ||||
|             compiler.customize_cmd(self) | ||||
|             compiler.show_customization() | ||||
|             self._cxx_compiler = compiler.cxx_compiler() | ||||
|         else: | ||||
|             self._cxx_compiler = None | ||||
|  | ||||
|         # Initialize Fortran 77 compiler: | ||||
|         if need_f77_compiler: | ||||
|             ctype = self.fcompiler | ||||
|             self._f77_compiler = new_fcompiler(compiler=self.fcompiler, | ||||
|                                                verbose=self.verbose, | ||||
|                                                dry_run=self.dry_run, | ||||
|                                                force=self.force, | ||||
|                                                requiref90=False, | ||||
|                                                c_compiler=self.compiler) | ||||
|             fcompiler = self._f77_compiler | ||||
|             if fcompiler: | ||||
|                 ctype = fcompiler.compiler_type | ||||
|                 fcompiler.customize(self.distribution) | ||||
|             if fcompiler and fcompiler.get_version(): | ||||
|                 fcompiler.customize_cmd(self) | ||||
|                 fcompiler.show_customization() | ||||
|             else: | ||||
|                 self.warn('f77_compiler=%s is not available.' % | ||||
|                           (ctype)) | ||||
|                 self._f77_compiler = None | ||||
|         else: | ||||
|             self._f77_compiler = None | ||||
|  | ||||
|         # Initialize Fortran 90 compiler: | ||||
|         if need_f90_compiler: | ||||
|             ctype = self.fcompiler | ||||
|             self._f90_compiler = new_fcompiler(compiler=self.fcompiler, | ||||
|                                                verbose=self.verbose, | ||||
|                                                dry_run=self.dry_run, | ||||
|                                                force=self.force, | ||||
|                                                requiref90=True, | ||||
|                                                c_compiler=self.compiler) | ||||
|             fcompiler = self._f90_compiler | ||||
|             if fcompiler: | ||||
|                 ctype = fcompiler.compiler_type | ||||
|                 fcompiler.customize(self.distribution) | ||||
|             if fcompiler and fcompiler.get_version(): | ||||
|                 fcompiler.customize_cmd(self) | ||||
|                 fcompiler.show_customization() | ||||
|             else: | ||||
|                 self.warn('f90_compiler=%s is not available.' % | ||||
|                           (ctype)) | ||||
|                 self._f90_compiler = None | ||||
|         else: | ||||
|             self._f90_compiler = None | ||||
|  | ||||
|         # Build extensions | ||||
|         self.build_extensions() | ||||
|  | ||||
|         # Copy over any extra DLL files | ||||
|         # FIXME: In the case where there are more than two packages, | ||||
|         # we blindly assume that both packages need all of the libraries, | ||||
|         # resulting in a larger wheel than is required. This should be fixed, | ||||
|         # but it's so rare that I won't bother to handle it. | ||||
|         pkg_roots = { | ||||
|             self.get_ext_fullname(ext.name).split('.')[0] | ||||
|             for ext in self.extensions | ||||
|         } | ||||
|         for pkg_root in pkg_roots: | ||||
|             shared_lib_dir = os.path.join(pkg_root, '.libs') | ||||
|             if not self.inplace: | ||||
|                 shared_lib_dir = os.path.join(self.build_lib, shared_lib_dir) | ||||
|             for fn in os.listdir(self.extra_dll_dir): | ||||
|                 if not os.path.isdir(shared_lib_dir): | ||||
|                     os.makedirs(shared_lib_dir) | ||||
|                 if not fn.lower().endswith('.dll'): | ||||
|                     continue | ||||
|                 runtime_lib = os.path.join(self.extra_dll_dir, fn) | ||||
|                 copy_file(runtime_lib, shared_lib_dir) | ||||
|  | ||||
|     def swig_sources(self, sources, extensions=None): | ||||
|         # Do nothing. Swig sources have been handled in build_src command. | ||||
|         return sources | ||||
|  | ||||
|     def build_extension(self, ext): | ||||
|         sources = ext.sources | ||||
|         if sources is None or not is_sequence(sources): | ||||
|             raise DistutilsSetupError( | ||||
|                 ("in 'ext_modules' option (extension '%s'), " | ||||
|                  "'sources' must be present and must be " | ||||
|                  "a list of source filenames") % ext.name) | ||||
|         sources = list(sources) | ||||
|  | ||||
|         if not sources: | ||||
|             return | ||||
|  | ||||
|         fullname = self.get_ext_fullname(ext.name) | ||||
|         if self.inplace: | ||||
|             modpath = fullname.split('.') | ||||
|             package = '.'.join(modpath[0:-1]) | ||||
|             base = modpath[-1] | ||||
|             build_py = self.get_finalized_command('build_py') | ||||
|             package_dir = build_py.get_package_dir(package) | ||||
|             ext_filename = os.path.join(package_dir, | ||||
|                                         self.get_ext_filename(base)) | ||||
|         else: | ||||
|             ext_filename = os.path.join(self.build_lib, | ||||
|                                         self.get_ext_filename(fullname)) | ||||
|         depends = sources + ext.depends | ||||
|  | ||||
|         force_rebuild = self.force | ||||
|         if not self.disable_optimization and not self.compiler_opt.is_cached(): | ||||
|             log.debug("Detected changes on compiler optimizations") | ||||
|             force_rebuild = True | ||||
|         if not (force_rebuild or newer_group(depends, ext_filename, 'newer')): | ||||
|             log.debug("skipping '%s' extension (up-to-date)", ext.name) | ||||
|             return | ||||
|         else: | ||||
|             log.info("building '%s' extension", ext.name) | ||||
|  | ||||
|         extra_args = ext.extra_compile_args or [] | ||||
|         extra_cflags = getattr(ext, 'extra_c_compile_args', None) or [] | ||||
|         extra_cxxflags = getattr(ext, 'extra_cxx_compile_args', None) or [] | ||||
|  | ||||
|         macros = ext.define_macros[:] | ||||
|         for undef in ext.undef_macros: | ||||
|             macros.append((undef,)) | ||||
|  | ||||
|         c_sources, cxx_sources, f_sources, fmodule_sources = \ | ||||
|             filter_sources(ext.sources) | ||||
|  | ||||
|         if self.compiler.compiler_type == 'msvc': | ||||
|             if cxx_sources: | ||||
|                 # Needed to compile kiva.agg._agg extension. | ||||
|                 extra_args.append('/Zm1000') | ||||
|                 extra_cflags += extra_cxxflags | ||||
|             # this hack works around the msvc compiler attributes | ||||
|             # problem, msvc uses its own convention :( | ||||
|             c_sources += cxx_sources | ||||
|             cxx_sources = [] | ||||
|  | ||||
|         # Set Fortran/C++ compilers for compilation and linking. | ||||
|         if ext.language == 'f90': | ||||
|             fcompiler = self._f90_compiler | ||||
|         elif ext.language == 'f77': | ||||
|             fcompiler = self._f77_compiler | ||||
|         else:  # in case ext.language is c++, for instance | ||||
|             fcompiler = self._f90_compiler or self._f77_compiler | ||||
|         if fcompiler is not None: | ||||
|             fcompiler.extra_f77_compile_args = (ext.extra_f77_compile_args or []) if hasattr( | ||||
|                 ext, 'extra_f77_compile_args') else [] | ||||
|             fcompiler.extra_f90_compile_args = (ext.extra_f90_compile_args or []) if hasattr( | ||||
|                 ext, 'extra_f90_compile_args') else [] | ||||
|         cxx_compiler = self._cxx_compiler | ||||
|  | ||||
|         # check for the availability of required compilers | ||||
|         if cxx_sources and cxx_compiler is None: | ||||
|             raise DistutilsError("extension %r has C++ sources" | ||||
|                                  "but no C++ compiler found" % (ext.name)) | ||||
|         if (f_sources or fmodule_sources) and fcompiler is None: | ||||
|             raise DistutilsError("extension %r has Fortran sources " | ||||
|                                  "but no Fortran compiler found" % (ext.name)) | ||||
|         if ext.language in ['f77', 'f90'] and fcompiler is None: | ||||
|             self.warn("extension %r has Fortran libraries " | ||||
|                       "but no Fortran linker found, using default linker" % (ext.name)) | ||||
|         if ext.language == 'c++' and cxx_compiler is None: | ||||
|             self.warn("extension %r has C++ libraries " | ||||
|                       "but no C++ linker found, using default linker" % (ext.name)) | ||||
|  | ||||
|         kws = {'depends': ext.depends} | ||||
|         output_dir = self.build_temp | ||||
|  | ||||
|         include_dirs = ext.include_dirs + get_numpy_include_dirs() | ||||
|  | ||||
|         # filtering C dispatch-table sources when optimization is not disabled, | ||||
|         # otherwise treated as normal sources. | ||||
|         copt_c_sources = [] | ||||
|         copt_cxx_sources = [] | ||||
|         copt_baseline_flags = [] | ||||
|         copt_macros = [] | ||||
|         if not self.disable_optimization: | ||||
|             bsrc_dir = self.get_finalized_command("build_src").build_src | ||||
|             dispatch_hpath = os.path.join("numpy", "distutils", "include") | ||||
|             dispatch_hpath = os.path.join(bsrc_dir, dispatch_hpath) | ||||
|             include_dirs.append(dispatch_hpath) | ||||
|  | ||||
|             # copt_build_src = None if self.inplace else bsrc_dir | ||||
|             # Always generate the generated config files and | ||||
|             # dispatch-able sources inside the build directory, | ||||
|             # even if the build option `inplace` is enabled. | ||||
|             # This approach prevents conflicts with Meson-generated | ||||
|             # config headers. Since `spin build --clean` will not remove | ||||
|             # these headers, they might overwrite the generated Meson headers, | ||||
|             # causing compatibility issues. Maintaining separate directories | ||||
|             # ensures compatibility between distutils dispatch config headers | ||||
|             # and Meson headers, avoiding build disruptions. | ||||
|             # See gh-24450 for more details. | ||||
|             copt_build_src = bsrc_dir | ||||
|             for _srcs, _dst, _ext in ( | ||||
|                 ((c_sources,), copt_c_sources, ('.dispatch.c',)), | ||||
|                 ((c_sources, cxx_sources), copt_cxx_sources, | ||||
|                     ('.dispatch.cpp', '.dispatch.cxx')) | ||||
|             ): | ||||
|                 for _src in _srcs: | ||||
|                     _dst += [ | ||||
|                         _src.pop(_src.index(s)) | ||||
|                         for s in _src[:] if s.endswith(_ext) | ||||
|                     ] | ||||
|             copt_baseline_flags = self.compiler_opt.cpu_baseline_flags() | ||||
|         else: | ||||
|             copt_macros.append(("NPY_DISABLE_OPTIMIZATION", 1)) | ||||
|  | ||||
|         c_objects = [] | ||||
|         if copt_cxx_sources: | ||||
|             log.info("compiling C++ dispatch-able sources") | ||||
|             c_objects += self.compiler_opt.try_dispatch( | ||||
|                 copt_cxx_sources, | ||||
|                 output_dir=output_dir, | ||||
|                 src_dir=copt_build_src, | ||||
|                 macros=macros + copt_macros, | ||||
|                 include_dirs=include_dirs, | ||||
|                 debug=self.debug, | ||||
|                 extra_postargs=extra_args + extra_cxxflags, | ||||
|                 ccompiler=cxx_compiler, | ||||
|                 **kws | ||||
|             ) | ||||
|         if copt_c_sources: | ||||
|             log.info("compiling C dispatch-able sources") | ||||
|             c_objects += self.compiler_opt.try_dispatch( | ||||
|                 copt_c_sources, | ||||
|                 output_dir=output_dir, | ||||
|                 src_dir=copt_build_src, | ||||
|                 macros=macros + copt_macros, | ||||
|                 include_dirs=include_dirs, | ||||
|                 debug=self.debug, | ||||
|                 extra_postargs=extra_args + extra_cflags, | ||||
|                 **kws) | ||||
|         if c_sources: | ||||
|             log.info("compiling C sources") | ||||
|             c_objects += self.compiler.compile( | ||||
|                 c_sources, | ||||
|                 output_dir=output_dir, | ||||
|                 macros=macros + copt_macros, | ||||
|                 include_dirs=include_dirs, | ||||
|                 debug=self.debug, | ||||
|                 extra_postargs=(extra_args + copt_baseline_flags + | ||||
|                                 extra_cflags), | ||||
|                 **kws) | ||||
|         if cxx_sources: | ||||
|             log.info("compiling C++ sources") | ||||
|             c_objects += cxx_compiler.compile( | ||||
|                 cxx_sources, | ||||
|                 output_dir=output_dir, | ||||
|                 macros=macros + copt_macros, | ||||
|                 include_dirs=include_dirs, | ||||
|                 debug=self.debug, | ||||
|                 extra_postargs=(extra_args + copt_baseline_flags + | ||||
|                                 extra_cxxflags), | ||||
|                 **kws) | ||||
|  | ||||
|         extra_postargs = [] | ||||
|         f_objects = [] | ||||
|         if fmodule_sources: | ||||
|             log.info("compiling Fortran 90 module sources") | ||||
|             module_dirs = ext.module_dirs[:] | ||||
|             module_build_dir = os.path.join( | ||||
|                 self.build_temp, os.path.dirname( | ||||
|                     self.get_ext_filename(fullname))) | ||||
|  | ||||
|             self.mkpath(module_build_dir) | ||||
|             if fcompiler.module_dir_switch is None: | ||||
|                 existing_modules = glob('*.mod') | ||||
|             extra_postargs += fcompiler.module_options( | ||||
|                 module_dirs, module_build_dir) | ||||
|             f_objects += fcompiler.compile(fmodule_sources, | ||||
|                                            output_dir=self.build_temp, | ||||
|                                            macros=macros, | ||||
|                                            include_dirs=include_dirs, | ||||
|                                            debug=self.debug, | ||||
|                                            extra_postargs=extra_postargs, | ||||
|                                            depends=ext.depends) | ||||
|  | ||||
|             if fcompiler.module_dir_switch is None: | ||||
|                 for f in glob('*.mod'): | ||||
|                     if f in existing_modules: | ||||
|                         continue | ||||
|                     t = os.path.join(module_build_dir, f) | ||||
|                     if os.path.abspath(f) == os.path.abspath(t): | ||||
|                         continue | ||||
|                     if os.path.isfile(t): | ||||
|                         os.remove(t) | ||||
|                     try: | ||||
|                         self.move_file(f, module_build_dir) | ||||
|                     except DistutilsFileError: | ||||
|                         log.warn('failed to move %r to %r' % | ||||
|                                  (f, module_build_dir)) | ||||
|         if f_sources: | ||||
|             log.info("compiling Fortran sources") | ||||
|             f_objects += fcompiler.compile(f_sources, | ||||
|                                            output_dir=self.build_temp, | ||||
|                                            macros=macros, | ||||
|                                            include_dirs=include_dirs, | ||||
|                                            debug=self.debug, | ||||
|                                            extra_postargs=extra_postargs, | ||||
|                                            depends=ext.depends) | ||||
|  | ||||
|         if f_objects and not fcompiler.can_ccompiler_link(self.compiler): | ||||
|             unlinkable_fobjects = f_objects | ||||
|             objects = c_objects | ||||
|         else: | ||||
|             unlinkable_fobjects = [] | ||||
|             objects = c_objects + f_objects | ||||
|  | ||||
|         if ext.extra_objects: | ||||
|             objects.extend(ext.extra_objects) | ||||
|         extra_args = ext.extra_link_args or [] | ||||
|         libraries = self.get_libraries(ext)[:] | ||||
|         library_dirs = ext.library_dirs[:] | ||||
|  | ||||
|         linker = self.compiler.link_shared_object | ||||
|         # Always use system linker when using MSVC compiler. | ||||
|         if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'): | ||||
|             # expand libraries with fcompiler libraries as we are | ||||
|             # not using fcompiler linker | ||||
|             self._libs_with_msvc_and_fortran( | ||||
|                 fcompiler, libraries, library_dirs) | ||||
|             if ext.runtime_library_dirs: | ||||
|                 # gcc adds RPATH to the link. On windows, copy the dll into | ||||
|                 # self.extra_dll_dir instead. | ||||
|                 for d in ext.runtime_library_dirs: | ||||
|                     for f in glob(d + '/*.dll'): | ||||
|                         copy_file(f, self.extra_dll_dir) | ||||
|                 ext.runtime_library_dirs = [] | ||||
|  | ||||
|         elif ext.language in ['f77', 'f90'] and fcompiler is not None: | ||||
|             linker = fcompiler.link_shared_object | ||||
|         if ext.language == 'c++' and cxx_compiler is not None: | ||||
|             linker = cxx_compiler.link_shared_object | ||||
|  | ||||
|         if fcompiler is not None: | ||||
|             objects, libraries = self._process_unlinkable_fobjects( | ||||
|                     objects, libraries, | ||||
|                     fcompiler, library_dirs, | ||||
|                     unlinkable_fobjects) | ||||
|  | ||||
|         linker(objects, ext_filename, | ||||
|                libraries=libraries, | ||||
|                library_dirs=library_dirs, | ||||
|                runtime_library_dirs=ext.runtime_library_dirs, | ||||
|                extra_postargs=extra_args, | ||||
|                export_symbols=self.get_export_symbols(ext), | ||||
|                debug=self.debug, | ||||
|                build_temp=self.build_temp, | ||||
|                target_lang=ext.language) | ||||
|  | ||||
|     def _add_dummy_mingwex_sym(self, c_sources): | ||||
|         build_src = self.get_finalized_command("build_src").build_src | ||||
|         build_clib = self.get_finalized_command("build_clib").build_clib | ||||
|         objects = self.compiler.compile([os.path.join(build_src, | ||||
|                                                       "gfortran_vs2003_hack.c")], | ||||
|                                         output_dir=self.build_temp) | ||||
|         self.compiler.create_static_lib( | ||||
|             objects, "_gfortran_workaround", output_dir=build_clib, debug=self.debug) | ||||
|  | ||||
|     def _process_unlinkable_fobjects(self, objects, libraries, | ||||
|                                      fcompiler, library_dirs, | ||||
|                                      unlinkable_fobjects): | ||||
|         libraries = list(libraries) | ||||
|         objects = list(objects) | ||||
|         unlinkable_fobjects = list(unlinkable_fobjects) | ||||
|  | ||||
|         # Expand possible fake static libraries to objects; | ||||
|         # make sure to iterate over a copy of the list as | ||||
|         # "fake" libraries will be removed as they are | ||||
|         # encountered | ||||
|         for lib in libraries[:]: | ||||
|             for libdir in library_dirs: | ||||
|                 fake_lib = os.path.join(libdir, lib + '.fobjects') | ||||
|                 if os.path.isfile(fake_lib): | ||||
|                     # Replace fake static library | ||||
|                     libraries.remove(lib) | ||||
|                     with open(fake_lib) as f: | ||||
|                         unlinkable_fobjects.extend(f.read().splitlines()) | ||||
|  | ||||
|                     # Expand C objects | ||||
|                     c_lib = os.path.join(libdir, lib + '.cobjects') | ||||
|                     with open(c_lib) as f: | ||||
|                         objects.extend(f.read().splitlines()) | ||||
|  | ||||
|         # Wrap unlinkable objects to a linkable one | ||||
|         if unlinkable_fobjects: | ||||
|             fobjects = [os.path.abspath(obj) for obj in unlinkable_fobjects] | ||||
|             wrapped = fcompiler.wrap_unlinkable_objects( | ||||
|                     fobjects, output_dir=self.build_temp, | ||||
|                     extra_dll_dir=self.extra_dll_dir) | ||||
|             objects.extend(wrapped) | ||||
|  | ||||
|         return objects, libraries | ||||
|  | ||||
|     def _libs_with_msvc_and_fortran(self, fcompiler, c_libraries, | ||||
|                                     c_library_dirs): | ||||
|         if fcompiler is None: | ||||
|             return | ||||
|  | ||||
|         for libname in c_libraries: | ||||
|             if libname.startswith('msvc'): | ||||
|                 continue | ||||
|             fileexists = False | ||||
|             for libdir in c_library_dirs or []: | ||||
|                 libfile = os.path.join(libdir, '%s.lib' % (libname)) | ||||
|                 if os.path.isfile(libfile): | ||||
|                     fileexists = True | ||||
|                     break | ||||
|             if fileexists: | ||||
|                 continue | ||||
|             # make g77-compiled static libs available to MSVC | ||||
|             fileexists = False | ||||
|             for libdir in c_library_dirs: | ||||
|                 libfile = os.path.join(libdir, 'lib%s.a' % (libname)) | ||||
|                 if os.path.isfile(libfile): | ||||
|                     # copy libname.a file to name.lib so that MSVC linker | ||||
|                     # can find it | ||||
|                     libfile2 = os.path.join(self.build_temp, libname + '.lib') | ||||
|                     copy_file(libfile, libfile2) | ||||
|                     if self.build_temp not in c_library_dirs: | ||||
|                         c_library_dirs.append(self.build_temp) | ||||
|                     fileexists = True | ||||
|                     break | ||||
|             if fileexists: | ||||
|                 continue | ||||
|             log.warn('could not find library %r in directories %s' | ||||
|                      % (libname, c_library_dirs)) | ||||
|  | ||||
|         # Always use system linker when using MSVC compiler. | ||||
|         f_lib_dirs = [] | ||||
|         for dir in fcompiler.library_dirs: | ||||
|             # correct path when compiling in Cygwin but with normal Win | ||||
|             # Python | ||||
|             if dir.startswith('/usr/lib'): | ||||
|                 try: | ||||
|                     dir = subprocess.check_output(['cygpath', '-w', dir]) | ||||
|                 except (OSError, subprocess.CalledProcessError): | ||||
|                     pass | ||||
|                 else: | ||||
|                     dir = filepath_from_subprocess_output(dir) | ||||
|             f_lib_dirs.append(dir) | ||||
|         c_library_dirs.extend(f_lib_dirs) | ||||
|  | ||||
|         # make g77-compiled static libs available to MSVC | ||||
|         for lib in fcompiler.libraries: | ||||
|             if not lib.startswith('msvc'): | ||||
|                 c_libraries.append(lib) | ||||
|                 p = combine_paths(f_lib_dirs, 'lib' + lib + '.a') | ||||
|                 if p: | ||||
|                     dst_name = os.path.join(self.build_temp, lib + '.lib') | ||||
|                     if not os.path.isfile(dst_name): | ||||
|                         copy_file(p[0], dst_name) | ||||
|                     if self.build_temp not in c_library_dirs: | ||||
|                         c_library_dirs.append(self.build_temp) | ||||
|  | ||||
|     def get_source_files(self): | ||||
|         self.check_extensions_list(self.extensions) | ||||
|         filenames = [] | ||||
|         for ext in self.extensions: | ||||
|             filenames.extend(get_ext_source_files(ext)) | ||||
|         return filenames | ||||
|  | ||||
|     def get_outputs(self): | ||||
|         self.check_extensions_list(self.extensions) | ||||
|  | ||||
|         outputs = [] | ||||
|         for ext in self.extensions: | ||||
|             if not ext.sources: | ||||
|                 continue | ||||
|             fullname = self.get_ext_fullname(ext.name) | ||||
|             outputs.append(os.path.join(self.build_lib, | ||||
|                                         self.get_ext_filename(fullname))) | ||||
|         return outputs | ||||
| @ -0,0 +1,31 @@ | ||||
| from distutils.command.build_py import build_py as old_build_py | ||||
| from numpy.distutils.misc_util import is_string | ||||
|  | ||||
| class build_py(old_build_py): | ||||
|  | ||||
|     def run(self): | ||||
|         build_src = self.get_finalized_command('build_src') | ||||
|         if build_src.py_modules_dict and self.packages is None: | ||||
|             self.packages = list(build_src.py_modules_dict.keys ()) | ||||
|         old_build_py.run(self) | ||||
|  | ||||
|     def find_package_modules(self, package, package_dir): | ||||
|         modules = old_build_py.find_package_modules(self, package, package_dir) | ||||
|  | ||||
|         # Find build_src generated *.py files. | ||||
|         build_src = self.get_finalized_command('build_src') | ||||
|         modules += build_src.py_modules_dict.get(package, []) | ||||
|  | ||||
|         return modules | ||||
|  | ||||
|     def find_modules(self): | ||||
|         old_py_modules = self.py_modules[:] | ||||
|         new_py_modules = [_m for _m in self.py_modules if is_string(_m)] | ||||
|         self.py_modules[:] = new_py_modules | ||||
|         modules = old_build_py.find_modules(self) | ||||
|         self.py_modules[:] = old_py_modules | ||||
|  | ||||
|         return modules | ||||
|  | ||||
|     # XXX: Fix find_source_files for item in py_modules such that item is 3-tuple | ||||
|     # and item[2] is source file. | ||||
| @ -0,0 +1,49 @@ | ||||
| """ Modified version of build_scripts that handles building scripts from functions. | ||||
|  | ||||
| """ | ||||
| from distutils.command.build_scripts import build_scripts as old_build_scripts | ||||
| from numpy.distutils import log | ||||
| from numpy.distutils.misc_util import is_string | ||||
|  | ||||
| class build_scripts(old_build_scripts): | ||||
|  | ||||
|     def generate_scripts(self, scripts): | ||||
|         new_scripts = [] | ||||
|         func_scripts = [] | ||||
|         for script in scripts: | ||||
|             if is_string(script): | ||||
|                 new_scripts.append(script) | ||||
|             else: | ||||
|                 func_scripts.append(script) | ||||
|         if not func_scripts: | ||||
|             return new_scripts | ||||
|  | ||||
|         build_dir = self.build_dir | ||||
|         self.mkpath(build_dir) | ||||
|         for func in func_scripts: | ||||
|             script = func(build_dir) | ||||
|             if not script: | ||||
|                 continue | ||||
|             if is_string(script): | ||||
|                 log.info("  adding '%s' to scripts" % (script,)) | ||||
|                 new_scripts.append(script) | ||||
|             else: | ||||
|                 [log.info("  adding '%s' to scripts" % (s,)) for s in script] | ||||
|                 new_scripts.extend(list(script)) | ||||
|         return new_scripts | ||||
|  | ||||
|     def run (self): | ||||
|         if not self.scripts: | ||||
|             return | ||||
|  | ||||
|         self.scripts = self.generate_scripts(self.scripts) | ||||
|         # Now make sure that the distribution object has this list of scripts. | ||||
|         # setuptools' develop command requires that this be a list of filenames, | ||||
|         # not functions. | ||||
|         self.distribution.scripts = self.scripts | ||||
|  | ||||
|         return old_build_scripts.run(self) | ||||
|  | ||||
|     def get_source_files(self): | ||||
|         from numpy.distutils.misc_util import get_script_files | ||||
|         return get_script_files(self.scripts) | ||||
| @ -0,0 +1,773 @@ | ||||
| """ Build swig and f2py sources. | ||||
| """ | ||||
| import os | ||||
| import re | ||||
| import sys | ||||
| import shlex | ||||
| import copy | ||||
|  | ||||
| from distutils.command import build_ext | ||||
| from distutils.dep_util import newer_group, newer | ||||
| from distutils.util import get_platform | ||||
| from distutils.errors import DistutilsError, DistutilsSetupError | ||||
|  | ||||
|  | ||||
| # this import can't be done here, as it uses numpy stuff only available | ||||
| # after it's installed | ||||
| #import numpy.f2py | ||||
| from numpy.distutils import log | ||||
| from numpy.distutils.misc_util import ( | ||||
|     fortran_ext_match, appendpath, is_string, is_sequence, get_cmd | ||||
|     ) | ||||
| from numpy.distutils.from_template import process_file as process_f_file | ||||
| from numpy.distutils.conv_template import process_file as process_c_file | ||||
|  | ||||
| def subst_vars(target, source, d): | ||||
|     """Substitute any occurrence of @foo@ by d['foo'] from source file into | ||||
|     target.""" | ||||
|     var = re.compile('@([a-zA-Z_]+)@') | ||||
|     with open(source, 'r') as fs: | ||||
|         with open(target, 'w') as ft: | ||||
|             for l in fs: | ||||
|                 m = var.search(l) | ||||
|                 if m: | ||||
|                     ft.write(l.replace('@%s@' % m.group(1), d[m.group(1)])) | ||||
|                 else: | ||||
|                     ft.write(l) | ||||
|  | ||||
| class build_src(build_ext.build_ext): | ||||
|  | ||||
|     description = "build sources from SWIG, F2PY files or a function" | ||||
|  | ||||
|     user_options = [ | ||||
|         ('build-src=', 'd', "directory to \"build\" sources to"), | ||||
|         ('f2py-opts=', None, "list of f2py command line options"), | ||||
|         ('swig=', None, "path to the SWIG executable"), | ||||
|         ('swig-opts=', None, "list of SWIG command line options"), | ||||
|         ('swig-cpp', None, "make SWIG create C++ files (default is autodetected from sources)"), | ||||
|         ('f2pyflags=', None, "additional flags to f2py (use --f2py-opts= instead)"), # obsolete | ||||
|         ('swigflags=', None, "additional flags to swig (use --swig-opts= instead)"), # obsolete | ||||
|         ('force', 'f', "forcibly build everything (ignore file timestamps)"), | ||||
|         ('inplace', 'i', | ||||
|          "ignore build-lib and put compiled extensions into the source " | ||||
|          "directory alongside your pure Python modules"), | ||||
|         ('verbose-cfg', None, | ||||
|          "change logging level from WARN to INFO which will show all " | ||||
|          "compiler output") | ||||
|         ] | ||||
|  | ||||
|     boolean_options = ['force', 'inplace', 'verbose-cfg'] | ||||
|  | ||||
|     help_options = [] | ||||
|  | ||||
|     def initialize_options(self): | ||||
|         self.extensions = None | ||||
|         self.package = None | ||||
|         self.py_modules = None | ||||
|         self.py_modules_dict = None | ||||
|         self.build_src = None | ||||
|         self.build_lib = None | ||||
|         self.build_base = None | ||||
|         self.force = None | ||||
|         self.inplace = None | ||||
|         self.package_dir = None | ||||
|         self.f2pyflags = None # obsolete | ||||
|         self.f2py_opts = None | ||||
|         self.swigflags = None # obsolete | ||||
|         self.swig_opts = None | ||||
|         self.swig_cpp = None | ||||
|         self.swig = None | ||||
|         self.verbose_cfg = None | ||||
|  | ||||
|     def finalize_options(self): | ||||
|         self.set_undefined_options('build', | ||||
|                                    ('build_base', 'build_base'), | ||||
|                                    ('build_lib', 'build_lib'), | ||||
|                                    ('force', 'force')) | ||||
|         if self.package is None: | ||||
|             self.package = self.distribution.ext_package | ||||
|         self.extensions = self.distribution.ext_modules | ||||
|         self.libraries = self.distribution.libraries or [] | ||||
|         self.py_modules = self.distribution.py_modules or [] | ||||
|         self.data_files = self.distribution.data_files or [] | ||||
|  | ||||
|         if self.build_src is None: | ||||
|             plat_specifier = ".{}-{}.{}".format(get_platform(), *sys.version_info[:2]) | ||||
|             self.build_src = os.path.join(self.build_base, 'src'+plat_specifier) | ||||
|  | ||||
|         # py_modules_dict is used in build_py.find_package_modules | ||||
|         self.py_modules_dict = {} | ||||
|  | ||||
|         if self.f2pyflags: | ||||
|             if self.f2py_opts: | ||||
|                 log.warn('ignoring --f2pyflags as --f2py-opts already used') | ||||
|             else: | ||||
|                 self.f2py_opts = self.f2pyflags | ||||
|             self.f2pyflags = None | ||||
|         if self.f2py_opts is None: | ||||
|             self.f2py_opts = [] | ||||
|         else: | ||||
|             self.f2py_opts = shlex.split(self.f2py_opts) | ||||
|  | ||||
|         if self.swigflags: | ||||
|             if self.swig_opts: | ||||
|                 log.warn('ignoring --swigflags as --swig-opts already used') | ||||
|             else: | ||||
|                 self.swig_opts = self.swigflags | ||||
|             self.swigflags = None | ||||
|  | ||||
|         if self.swig_opts is None: | ||||
|             self.swig_opts = [] | ||||
|         else: | ||||
|             self.swig_opts = shlex.split(self.swig_opts) | ||||
|  | ||||
|         # use options from build_ext command | ||||
|         build_ext = self.get_finalized_command('build_ext') | ||||
|         if self.inplace is None: | ||||
|             self.inplace = build_ext.inplace | ||||
|         if self.swig_cpp is None: | ||||
|             self.swig_cpp = build_ext.swig_cpp | ||||
|         for c in ['swig', 'swig_opt']: | ||||
|             o = '--'+c.replace('_', '-') | ||||
|             v = getattr(build_ext, c, None) | ||||
|             if v: | ||||
|                 if getattr(self, c): | ||||
|                     log.warn('both build_src and build_ext define %s option' % (o)) | ||||
|                 else: | ||||
|                     log.info('using "%s=%s" option from build_ext command' % (o, v)) | ||||
|                     setattr(self, c, v) | ||||
|  | ||||
|     def run(self): | ||||
|         log.info("build_src") | ||||
|         if not (self.extensions or self.libraries): | ||||
|             return | ||||
|         self.build_sources() | ||||
|  | ||||
|     def build_sources(self): | ||||
|  | ||||
|         if self.inplace: | ||||
|             self.get_package_dir = \ | ||||
|                      self.get_finalized_command('build_py').get_package_dir | ||||
|  | ||||
|         self.build_py_modules_sources() | ||||
|  | ||||
|         for libname_info in self.libraries: | ||||
|             self.build_library_sources(*libname_info) | ||||
|  | ||||
|         if self.extensions: | ||||
|             self.check_extensions_list(self.extensions) | ||||
|  | ||||
|             for ext in self.extensions: | ||||
|                 self.build_extension_sources(ext) | ||||
|  | ||||
|         self.build_data_files_sources() | ||||
|         self.build_npy_pkg_config() | ||||
|  | ||||
|     def build_data_files_sources(self): | ||||
|         if not self.data_files: | ||||
|             return | ||||
|         log.info('building data_files sources') | ||||
|         from numpy.distutils.misc_util import get_data_files | ||||
|         new_data_files = [] | ||||
|         for data in self.data_files: | ||||
|             if isinstance(data, str): | ||||
|                 new_data_files.append(data) | ||||
|             elif isinstance(data, tuple): | ||||
|                 d, files = data | ||||
|                 if self.inplace: | ||||
|                     build_dir = self.get_package_dir('.'.join(d.split(os.sep))) | ||||
|                 else: | ||||
|                     build_dir = os.path.join(self.build_src, d) | ||||
|                 funcs = [f for f in files if hasattr(f, '__call__')] | ||||
|                 files = [f for f in files if not hasattr(f, '__call__')] | ||||
|                 for f in funcs: | ||||
|                     if f.__code__.co_argcount==1: | ||||
|                         s = f(build_dir) | ||||
|                     else: | ||||
|                         s = f() | ||||
|                     if s is not None: | ||||
|                         if isinstance(s, list): | ||||
|                             files.extend(s) | ||||
|                         elif isinstance(s, str): | ||||
|                             files.append(s) | ||||
|                         else: | ||||
|                             raise TypeError(repr(s)) | ||||
|                 filenames = get_data_files((d, files)) | ||||
|                 new_data_files.append((d, filenames)) | ||||
|             else: | ||||
|                 raise TypeError(repr(data)) | ||||
|         self.data_files[:] = new_data_files | ||||
|  | ||||
|  | ||||
|     def _build_npy_pkg_config(self, info, gd): | ||||
|         template, install_dir, subst_dict = info | ||||
|         template_dir = os.path.dirname(template) | ||||
|         for k, v in gd.items(): | ||||
|             subst_dict[k] = v | ||||
|  | ||||
|         if self.inplace == 1: | ||||
|             generated_dir = os.path.join(template_dir, install_dir) | ||||
|         else: | ||||
|             generated_dir = os.path.join(self.build_src, template_dir, | ||||
|                     install_dir) | ||||
|         generated = os.path.basename(os.path.splitext(template)[0]) | ||||
|         generated_path = os.path.join(generated_dir, generated) | ||||
|         if not os.path.exists(generated_dir): | ||||
|             os.makedirs(generated_dir) | ||||
|  | ||||
|         subst_vars(generated_path, template, subst_dict) | ||||
|  | ||||
|         # Where to install relatively to install prefix | ||||
|         full_install_dir = os.path.join(template_dir, install_dir) | ||||
|         return full_install_dir, generated_path | ||||
|  | ||||
|     def build_npy_pkg_config(self): | ||||
|         log.info('build_src: building npy-pkg config files') | ||||
|  | ||||
|         # XXX: another ugly workaround to circumvent distutils brain damage. We | ||||
|         # need the install prefix here, but finalizing the options of the | ||||
|         # install command when only building sources cause error. Instead, we | ||||
|         # copy the install command instance, and finalize the copy so that it | ||||
|         # does not disrupt how distutils want to do things when with the | ||||
|         # original install command instance. | ||||
|         install_cmd = copy.copy(get_cmd('install')) | ||||
|         if not install_cmd.finalized == 1: | ||||
|             install_cmd.finalize_options() | ||||
|         build_npkg = False | ||||
|         if self.inplace == 1: | ||||
|             top_prefix = '.' | ||||
|             build_npkg = True | ||||
|         elif hasattr(install_cmd, 'install_libbase'): | ||||
|             top_prefix = install_cmd.install_libbase | ||||
|             build_npkg = True | ||||
|  | ||||
|         if build_npkg: | ||||
|             for pkg, infos in self.distribution.installed_pkg_config.items(): | ||||
|                 pkg_path = self.distribution.package_dir[pkg] | ||||
|                 prefix = os.path.join(os.path.abspath(top_prefix), pkg_path) | ||||
|                 d = {'prefix': prefix} | ||||
|                 for info in infos: | ||||
|                     install_dir, generated = self._build_npy_pkg_config(info, d) | ||||
|                     self.distribution.data_files.append((install_dir, | ||||
|                         [generated])) | ||||
|  | ||||
|     def build_py_modules_sources(self): | ||||
|         if not self.py_modules: | ||||
|             return | ||||
|         log.info('building py_modules sources') | ||||
|         new_py_modules = [] | ||||
|         for source in self.py_modules: | ||||
|             if is_sequence(source) and len(source)==3: | ||||
|                 package, module_base, source = source | ||||
|                 if self.inplace: | ||||
|                     build_dir = self.get_package_dir(package) | ||||
|                 else: | ||||
|                     build_dir = os.path.join(self.build_src, | ||||
|                                              os.path.join(*package.split('.'))) | ||||
|                 if hasattr(source, '__call__'): | ||||
|                     target = os.path.join(build_dir, module_base + '.py') | ||||
|                     source = source(target) | ||||
|                 if source is None: | ||||
|                     continue | ||||
|                 modules = [(package, module_base, source)] | ||||
|                 if package not in self.py_modules_dict: | ||||
|                     self.py_modules_dict[package] = [] | ||||
|                 self.py_modules_dict[package] += modules | ||||
|             else: | ||||
|                 new_py_modules.append(source) | ||||
|         self.py_modules[:] = new_py_modules | ||||
|  | ||||
|     def build_library_sources(self, lib_name, build_info): | ||||
|         sources = list(build_info.get('sources', [])) | ||||
|  | ||||
|         if not sources: | ||||
|             return | ||||
|  | ||||
|         log.info('building library "%s" sources' % (lib_name)) | ||||
|  | ||||
|         sources = self.generate_sources(sources, (lib_name, build_info)) | ||||
|  | ||||
|         sources = self.template_sources(sources, (lib_name, build_info)) | ||||
|  | ||||
|         sources, h_files = self.filter_h_files(sources) | ||||
|  | ||||
|         if h_files: | ||||
|             log.info('%s - nothing done with h_files = %s', | ||||
|                      self.package, h_files) | ||||
|  | ||||
|         #for f in h_files: | ||||
|         #    self.distribution.headers.append((lib_name,f)) | ||||
|  | ||||
|         build_info['sources'] = sources | ||||
|         return | ||||
|  | ||||
|     def build_extension_sources(self, ext): | ||||
|  | ||||
|         sources = list(ext.sources) | ||||
|  | ||||
|         log.info('building extension "%s" sources' % (ext.name)) | ||||
|  | ||||
|         fullname = self.get_ext_fullname(ext.name) | ||||
|  | ||||
|         modpath = fullname.split('.') | ||||
|         package = '.'.join(modpath[0:-1]) | ||||
|  | ||||
|         if self.inplace: | ||||
|             self.ext_target_dir = self.get_package_dir(package) | ||||
|  | ||||
|         sources = self.generate_sources(sources, ext) | ||||
|         sources = self.template_sources(sources, ext) | ||||
|         sources = self.swig_sources(sources, ext) | ||||
|         sources = self.f2py_sources(sources, ext) | ||||
|         sources = self.pyrex_sources(sources, ext) | ||||
|  | ||||
|         sources, py_files = self.filter_py_files(sources) | ||||
|  | ||||
|         if package not in self.py_modules_dict: | ||||
|             self.py_modules_dict[package] = [] | ||||
|         modules = [] | ||||
|         for f in py_files: | ||||
|             module = os.path.splitext(os.path.basename(f))[0] | ||||
|             modules.append((package, module, f)) | ||||
|         self.py_modules_dict[package] += modules | ||||
|  | ||||
|         sources, h_files = self.filter_h_files(sources) | ||||
|  | ||||
|         if h_files: | ||||
|             log.info('%s - nothing done with h_files = %s', | ||||
|                      package, h_files) | ||||
|         #for f in h_files: | ||||
|         #    self.distribution.headers.append((package,f)) | ||||
|  | ||||
|         ext.sources = sources | ||||
|  | ||||
|     def generate_sources(self, sources, extension): | ||||
|         new_sources = [] | ||||
|         func_sources = [] | ||||
|         for source in sources: | ||||
|             if is_string(source): | ||||
|                 new_sources.append(source) | ||||
|             else: | ||||
|                 func_sources.append(source) | ||||
|         if not func_sources: | ||||
|             return new_sources | ||||
|         if self.inplace and not is_sequence(extension): | ||||
|             build_dir = self.ext_target_dir | ||||
|         else: | ||||
|             if is_sequence(extension): | ||||
|                 name = extension[0] | ||||
|             #    if 'include_dirs' not in extension[1]: | ||||
|             #        extension[1]['include_dirs'] = [] | ||||
|             #    incl_dirs = extension[1]['include_dirs'] | ||||
|             else: | ||||
|                 name = extension.name | ||||
|             #    incl_dirs = extension.include_dirs | ||||
|             #if self.build_src not in incl_dirs: | ||||
|             #    incl_dirs.append(self.build_src) | ||||
|             build_dir = os.path.join(*([self.build_src] | ||||
|                                        +name.split('.')[:-1])) | ||||
|         self.mkpath(build_dir) | ||||
|  | ||||
|         if self.verbose_cfg: | ||||
|             new_level = log.INFO | ||||
|         else: | ||||
|             new_level = log.WARN | ||||
|         old_level = log.set_threshold(new_level) | ||||
|  | ||||
|         for func in func_sources: | ||||
|             source = func(extension, build_dir) | ||||
|             if not source: | ||||
|                 continue | ||||
|             if is_sequence(source): | ||||
|                 [log.info("  adding '%s' to sources." % (s,)) for s in source] | ||||
|                 new_sources.extend(source) | ||||
|             else: | ||||
|                 log.info("  adding '%s' to sources." % (source,)) | ||||
|                 new_sources.append(source) | ||||
|         log.set_threshold(old_level) | ||||
|         return new_sources | ||||
|  | ||||
|     def filter_py_files(self, sources): | ||||
|         return self.filter_files(sources, ['.py']) | ||||
|  | ||||
|     def filter_h_files(self, sources): | ||||
|         return self.filter_files(sources, ['.h', '.hpp', '.inc']) | ||||
|  | ||||
|     def filter_files(self, sources, exts = []): | ||||
|         new_sources = [] | ||||
|         files = [] | ||||
|         for source in sources: | ||||
|             (base, ext) = os.path.splitext(source) | ||||
|             if ext in exts: | ||||
|                 files.append(source) | ||||
|             else: | ||||
|                 new_sources.append(source) | ||||
|         return new_sources, files | ||||
|  | ||||
|     def template_sources(self, sources, extension): | ||||
|         new_sources = [] | ||||
|         if is_sequence(extension): | ||||
|             depends = extension[1].get('depends') | ||||
|             include_dirs = extension[1].get('include_dirs') | ||||
|         else: | ||||
|             depends = extension.depends | ||||
|             include_dirs = extension.include_dirs | ||||
|         for source in sources: | ||||
|             (base, ext) = os.path.splitext(source) | ||||
|             if ext == '.src':  # Template file | ||||
|                 if self.inplace: | ||||
|                     target_dir = os.path.dirname(base) | ||||
|                 else: | ||||
|                     target_dir = appendpath(self.build_src, os.path.dirname(base)) | ||||
|                 self.mkpath(target_dir) | ||||
|                 target_file = os.path.join(target_dir, os.path.basename(base)) | ||||
|                 if (self.force or newer_group([source] + depends, target_file)): | ||||
|                     if _f_pyf_ext_match(base): | ||||
|                         log.info("from_template:> %s" % (target_file)) | ||||
|                         outstr = process_f_file(source) | ||||
|                     else: | ||||
|                         log.info("conv_template:> %s" % (target_file)) | ||||
|                         outstr = process_c_file(source) | ||||
|                     with open(target_file, 'w') as fid: | ||||
|                         fid.write(outstr) | ||||
|                 if _header_ext_match(target_file): | ||||
|                     d = os.path.dirname(target_file) | ||||
|                     if d not in include_dirs: | ||||
|                         log.info("  adding '%s' to include_dirs." % (d)) | ||||
|                         include_dirs.append(d) | ||||
|                 new_sources.append(target_file) | ||||
|             else: | ||||
|                 new_sources.append(source) | ||||
|         return new_sources | ||||
|  | ||||
|     def pyrex_sources(self, sources, extension): | ||||
|         """Pyrex not supported; this remains for Cython support (see below)""" | ||||
|         new_sources = [] | ||||
|         ext_name = extension.name.split('.')[-1] | ||||
|         for source in sources: | ||||
|             (base, ext) = os.path.splitext(source) | ||||
|             if ext == '.pyx': | ||||
|                 target_file = self.generate_a_pyrex_source(base, ext_name, | ||||
|                                                            source, | ||||
|                                                            extension) | ||||
|                 new_sources.append(target_file) | ||||
|             else: | ||||
|                 new_sources.append(source) | ||||
|         return new_sources | ||||
|  | ||||
|     def generate_a_pyrex_source(self, base, ext_name, source, extension): | ||||
|         """Pyrex is not supported, but some projects monkeypatch this method. | ||||
|  | ||||
|         That allows compiling Cython code, see gh-6955. | ||||
|         This method will remain here for compatibility reasons. | ||||
|         """ | ||||
|         return [] | ||||
|  | ||||
|     def f2py_sources(self, sources, extension): | ||||
|         new_sources = [] | ||||
|         f2py_sources = [] | ||||
|         f_sources = [] | ||||
|         f2py_targets = {} | ||||
|         target_dirs = [] | ||||
|         ext_name = extension.name.split('.')[-1] | ||||
|         skip_f2py = 0 | ||||
|  | ||||
|         for source in sources: | ||||
|             (base, ext) = os.path.splitext(source) | ||||
|             if ext == '.pyf': # F2PY interface file | ||||
|                 if self.inplace: | ||||
|                     target_dir = os.path.dirname(base) | ||||
|                 else: | ||||
|                     target_dir = appendpath(self.build_src, os.path.dirname(base)) | ||||
|                 if os.path.isfile(source): | ||||
|                     name = get_f2py_modulename(source) | ||||
|                     if name != ext_name: | ||||
|                         raise DistutilsSetupError('mismatch of extension names: %s ' | ||||
|                                                   'provides %r but expected %r' % ( | ||||
|                             source, name, ext_name)) | ||||
|                     target_file = os.path.join(target_dir, name+'module.c') | ||||
|                 else: | ||||
|                     log.debug('  source %s does not exist: skipping f2py\'ing.' \ | ||||
|                               % (source)) | ||||
|                     name = ext_name | ||||
|                     skip_f2py = 1 | ||||
|                     target_file = os.path.join(target_dir, name+'module.c') | ||||
|                     if not os.path.isfile(target_file): | ||||
|                         log.warn('  target %s does not exist:\n   '\ | ||||
|                                  'Assuming %smodule.c was generated with '\ | ||||
|                                  '"build_src --inplace" command.' \ | ||||
|                                  % (target_file, name)) | ||||
|                         target_dir = os.path.dirname(base) | ||||
|                         target_file = os.path.join(target_dir, name+'module.c') | ||||
|                         if not os.path.isfile(target_file): | ||||
|                             raise DistutilsSetupError("%r missing" % (target_file,)) | ||||
|                         log.info('   Yes! Using %r as up-to-date target.' \ | ||||
|                                  % (target_file)) | ||||
|                 target_dirs.append(target_dir) | ||||
|                 f2py_sources.append(source) | ||||
|                 f2py_targets[source] = target_file | ||||
|                 new_sources.append(target_file) | ||||
|             elif fortran_ext_match(ext): | ||||
|                 f_sources.append(source) | ||||
|             else: | ||||
|                 new_sources.append(source) | ||||
|  | ||||
|         if not (f2py_sources or f_sources): | ||||
|             return new_sources | ||||
|  | ||||
|         for d in target_dirs: | ||||
|             self.mkpath(d) | ||||
|  | ||||
|         f2py_options = extension.f2py_options + self.f2py_opts | ||||
|  | ||||
|         if self.distribution.libraries: | ||||
|             for name, build_info in self.distribution.libraries: | ||||
|                 if name in extension.libraries: | ||||
|                     f2py_options.extend(build_info.get('f2py_options', [])) | ||||
|  | ||||
|         log.info("f2py options: %s" % (f2py_options)) | ||||
|  | ||||
|         if f2py_sources: | ||||
|             if len(f2py_sources) != 1: | ||||
|                 raise DistutilsSetupError( | ||||
|                     'only one .pyf file is allowed per extension module but got'\ | ||||
|                     ' more: %r' % (f2py_sources,)) | ||||
|             source = f2py_sources[0] | ||||
|             target_file = f2py_targets[source] | ||||
|             target_dir = os.path.dirname(target_file) or '.' | ||||
|             depends = [source] + extension.depends | ||||
|             if (self.force or newer_group(depends, target_file, 'newer')) \ | ||||
|                    and not skip_f2py: | ||||
|                 log.info("f2py: %s" % (source)) | ||||
|                 from numpy.f2py import f2py2e | ||||
|                 f2py2e.run_main(f2py_options | ||||
|                                     + ['--build-dir', target_dir, source]) | ||||
|             else: | ||||
|                 log.debug("  skipping '%s' f2py interface (up-to-date)" % (source)) | ||||
|         else: | ||||
|             #XXX TODO: --inplace support for sdist command | ||||
|             if is_sequence(extension): | ||||
|                 name = extension[0] | ||||
|             else: name = extension.name | ||||
|             target_dir = os.path.join(*([self.build_src] | ||||
|                                         +name.split('.')[:-1])) | ||||
|             target_file = os.path.join(target_dir, ext_name + 'module.c') | ||||
|             new_sources.append(target_file) | ||||
|             depends = f_sources + extension.depends | ||||
|             if (self.force or newer_group(depends, target_file, 'newer')) \ | ||||
|                    and not skip_f2py: | ||||
|                 log.info("f2py:> %s" % (target_file)) | ||||
|                 self.mkpath(target_dir) | ||||
|                 from numpy.f2py import f2py2e | ||||
|                 f2py2e.run_main(f2py_options + ['--lower', | ||||
|                                                 '--build-dir', target_dir]+\ | ||||
|                                 ['-m', ext_name]+f_sources) | ||||
|             else: | ||||
|                 log.debug("  skipping f2py fortran files for '%s' (up-to-date)"\ | ||||
|                           % (target_file)) | ||||
|  | ||||
|         if not os.path.isfile(target_file): | ||||
|             raise DistutilsError("f2py target file %r not generated" % (target_file,)) | ||||
|  | ||||
|         build_dir = os.path.join(self.build_src, target_dir) | ||||
|         target_c = os.path.join(build_dir, 'fortranobject.c') | ||||
|         target_h = os.path.join(build_dir, 'fortranobject.h') | ||||
|         log.info("  adding '%s' to sources." % (target_c)) | ||||
|         new_sources.append(target_c) | ||||
|         if build_dir not in extension.include_dirs: | ||||
|             log.info("  adding '%s' to include_dirs." % (build_dir)) | ||||
|             extension.include_dirs.append(build_dir) | ||||
|  | ||||
|         if not skip_f2py: | ||||
|             import numpy.f2py | ||||
|             d = os.path.dirname(numpy.f2py.__file__) | ||||
|             source_c = os.path.join(d, 'src', 'fortranobject.c') | ||||
|             source_h = os.path.join(d, 'src', 'fortranobject.h') | ||||
|             if newer(source_c, target_c) or newer(source_h, target_h): | ||||
|                 self.mkpath(os.path.dirname(target_c)) | ||||
|                 self.copy_file(source_c, target_c) | ||||
|                 self.copy_file(source_h, target_h) | ||||
|         else: | ||||
|             if not os.path.isfile(target_c): | ||||
|                 raise DistutilsSetupError("f2py target_c file %r not found" % (target_c,)) | ||||
|             if not os.path.isfile(target_h): | ||||
|                 raise DistutilsSetupError("f2py target_h file %r not found" % (target_h,)) | ||||
|  | ||||
|         for name_ext in ['-f2pywrappers.f', '-f2pywrappers2.f90']: | ||||
|             filename = os.path.join(target_dir, ext_name + name_ext) | ||||
|             if os.path.isfile(filename): | ||||
|                 log.info("  adding '%s' to sources." % (filename)) | ||||
|                 f_sources.append(filename) | ||||
|  | ||||
|         return new_sources + f_sources | ||||
|  | ||||
|     def swig_sources(self, sources, extension): | ||||
|         # Assuming SWIG 1.3.14 or later. See compatibility note in | ||||
|         #   http://www.swig.org/Doc1.3/Python.html#Python_nn6 | ||||
|  | ||||
|         new_sources = [] | ||||
|         swig_sources = [] | ||||
|         swig_targets = {} | ||||
|         target_dirs = [] | ||||
|         py_files = []     # swig generated .py files | ||||
|         target_ext = '.c' | ||||
|         if '-c++' in extension.swig_opts: | ||||
|             typ = 'c++' | ||||
|             is_cpp = True | ||||
|             extension.swig_opts.remove('-c++') | ||||
|         elif self.swig_cpp: | ||||
|             typ = 'c++' | ||||
|             is_cpp = True | ||||
|         else: | ||||
|             typ = None | ||||
|             is_cpp = False | ||||
|         skip_swig = 0 | ||||
|         ext_name = extension.name.split('.')[-1] | ||||
|  | ||||
|         for source in sources: | ||||
|             (base, ext) = os.path.splitext(source) | ||||
|             if ext == '.i': # SWIG interface file | ||||
|                 # the code below assumes that the sources list | ||||
|                 # contains not more than one .i SWIG interface file | ||||
|                 if self.inplace: | ||||
|                     target_dir = os.path.dirname(base) | ||||
|                     py_target_dir = self.ext_target_dir | ||||
|                 else: | ||||
|                     target_dir = appendpath(self.build_src, os.path.dirname(base)) | ||||
|                     py_target_dir = target_dir | ||||
|                 if os.path.isfile(source): | ||||
|                     name = get_swig_modulename(source) | ||||
|                     if name != ext_name[1:]: | ||||
|                         raise DistutilsSetupError( | ||||
|                             'mismatch of extension names: %s provides %r' | ||||
|                             ' but expected %r' % (source, name, ext_name[1:])) | ||||
|                     if typ is None: | ||||
|                         typ = get_swig_target(source) | ||||
|                         is_cpp = typ=='c++' | ||||
|                     else: | ||||
|                         typ2 = get_swig_target(source) | ||||
|                         if typ2 is None: | ||||
|                             log.warn('source %r does not define swig target, assuming %s swig target' \ | ||||
|                                      % (source, typ)) | ||||
|                         elif typ!=typ2: | ||||
|                             log.warn('expected %r but source %r defines %r swig target' \ | ||||
|                                      % (typ, source, typ2)) | ||||
|                             if typ2=='c++': | ||||
|                                 log.warn('resetting swig target to c++ (some targets may have .c extension)') | ||||
|                                 is_cpp = True | ||||
|                             else: | ||||
|                                 log.warn('assuming that %r has c++ swig target' % (source)) | ||||
|                     if is_cpp: | ||||
|                         target_ext = '.cpp' | ||||
|                     target_file = os.path.join(target_dir, '%s_wrap%s' \ | ||||
|                                                % (name, target_ext)) | ||||
|                 else: | ||||
|                     log.warn('  source %s does not exist: skipping swig\'ing.' \ | ||||
|                              % (source)) | ||||
|                     name = ext_name[1:] | ||||
|                     skip_swig = 1 | ||||
|                     target_file = _find_swig_target(target_dir, name) | ||||
|                     if not os.path.isfile(target_file): | ||||
|                         log.warn('  target %s does not exist:\n   '\ | ||||
|                                  'Assuming %s_wrap.{c,cpp} was generated with '\ | ||||
|                                  '"build_src --inplace" command.' \ | ||||
|                                  % (target_file, name)) | ||||
|                         target_dir = os.path.dirname(base) | ||||
|                         target_file = _find_swig_target(target_dir, name) | ||||
|                         if not os.path.isfile(target_file): | ||||
|                             raise DistutilsSetupError("%r missing" % (target_file,)) | ||||
|                         log.warn('   Yes! Using %r as up-to-date target.' \ | ||||
|                                  % (target_file)) | ||||
|                 target_dirs.append(target_dir) | ||||
|                 new_sources.append(target_file) | ||||
|                 py_files.append(os.path.join(py_target_dir, name+'.py')) | ||||
|                 swig_sources.append(source) | ||||
|                 swig_targets[source] = new_sources[-1] | ||||
|             else: | ||||
|                 new_sources.append(source) | ||||
|  | ||||
|         if not swig_sources: | ||||
|             return new_sources | ||||
|  | ||||
|         if skip_swig: | ||||
|             return new_sources + py_files | ||||
|  | ||||
|         for d in target_dirs: | ||||
|             self.mkpath(d) | ||||
|  | ||||
|         swig = self.swig or self.find_swig() | ||||
|         swig_cmd = [swig, "-python"] + extension.swig_opts | ||||
|         if is_cpp: | ||||
|             swig_cmd.append('-c++') | ||||
|         for d in extension.include_dirs: | ||||
|             swig_cmd.append('-I'+d) | ||||
|         for source in swig_sources: | ||||
|             target = swig_targets[source] | ||||
|             depends = [source] + extension.depends | ||||
|             if self.force or newer_group(depends, target, 'newer'): | ||||
|                 log.info("%s: %s" % (os.path.basename(swig) \ | ||||
|                                      + (is_cpp and '++' or ''), source)) | ||||
|                 self.spawn(swig_cmd + self.swig_opts \ | ||||
|                            + ["-o", target, '-outdir', py_target_dir, source]) | ||||
|             else: | ||||
|                 log.debug("  skipping '%s' swig interface (up-to-date)" \ | ||||
|                          % (source)) | ||||
|  | ||||
|         return new_sources + py_files | ||||
|  | ||||
| _f_pyf_ext_match = re.compile(r'.*\.(f90|f95|f77|for|ftn|f|pyf)\Z', re.I).match | ||||
| _header_ext_match = re.compile(r'.*\.(inc|h|hpp)\Z', re.I).match | ||||
|  | ||||
| #### SWIG related auxiliary functions #### | ||||
| _swig_module_name_match = re.compile(r'\s*%module\s*(.*\(\s*package\s*=\s*"(?P<package>[\w_]+)".*\)|)\s*(?P<name>[\w_]+)', | ||||
|                                      re.I).match | ||||
| _has_c_header = re.compile(r'-\*-\s*c\s*-\*-', re.I).search | ||||
| _has_cpp_header = re.compile(r'-\*-\s*c\+\+\s*-\*-', re.I).search | ||||
|  | ||||
| def get_swig_target(source): | ||||
|     with open(source) as f: | ||||
|         result = None | ||||
|         line = f.readline() | ||||
|         if _has_cpp_header(line): | ||||
|             result = 'c++' | ||||
|         if _has_c_header(line): | ||||
|             result = 'c' | ||||
|     return result | ||||
|  | ||||
| def get_swig_modulename(source): | ||||
|     with open(source) as f: | ||||
|         name = None | ||||
|         for line in f: | ||||
|             m = _swig_module_name_match(line) | ||||
|             if m: | ||||
|                 name = m.group('name') | ||||
|                 break | ||||
|     return name | ||||
|  | ||||
| def _find_swig_target(target_dir, name): | ||||
|     for ext in ['.cpp', '.c']: | ||||
|         target = os.path.join(target_dir, '%s_wrap%s' % (name, ext)) | ||||
|         if os.path.isfile(target): | ||||
|             break | ||||
|     return target | ||||
|  | ||||
| #### F2PY related auxiliary functions #### | ||||
|  | ||||
| _f2py_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]+)', | ||||
|                                      re.I).match | ||||
| _f2py_user_module_name_match = re.compile(r'\s*python\s*module\s*(?P<name>[\w_]*?' | ||||
|                                           r'__user__[\w_]*)', re.I).match | ||||
|  | ||||
| def get_f2py_modulename(source): | ||||
|     name = None | ||||
|     with open(source) as f: | ||||
|         for line in f: | ||||
|             m = _f2py_module_name_match(line) | ||||
|             if m: | ||||
|                 if _f2py_user_module_name_match(line): # skip *__user__* names | ||||
|                     continue | ||||
|                 name = m.group('name') | ||||
|                 break | ||||
|     return name | ||||
|  | ||||
| ########################################## | ||||
							
								
								
									
										516
									
								
								lib/python3.11/site-packages/numpy/distutils/command/config.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										516
									
								
								lib/python3.11/site-packages/numpy/distutils/command/config.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,516 @@ | ||||
| # Added Fortran compiler support to config. Currently useful only for | ||||
| # try_compile call. try_run works but is untested for most of Fortran | ||||
| # compilers (they must define linker_exe first). | ||||
| # Pearu Peterson | ||||
| import os | ||||
| import signal | ||||
| import subprocess | ||||
| import sys | ||||
| import textwrap | ||||
| import warnings | ||||
|  | ||||
| from distutils.command.config import config as old_config | ||||
| from distutils.command.config import LANG_EXT | ||||
| from distutils import log | ||||
| from distutils.file_util import copy_file | ||||
| from distutils.ccompiler import CompileError, LinkError | ||||
| import distutils | ||||
| from numpy.distutils.exec_command import filepath_from_subprocess_output | ||||
| from numpy.distutils.mingw32ccompiler import generate_manifest | ||||
| from numpy.distutils.command.autodist import (check_gcc_function_attribute, | ||||
|                                               check_gcc_function_attribute_with_intrinsics, | ||||
|                                               check_gcc_variable_attribute, | ||||
|                                               check_gcc_version_at_least, | ||||
|                                               check_inline, | ||||
|                                               check_restrict, | ||||
|                                               check_compiler_gcc) | ||||
|  | ||||
| LANG_EXT['f77'] = '.f' | ||||
| LANG_EXT['f90'] = '.f90' | ||||
|  | ||||
| class config(old_config): | ||||
|     old_config.user_options += [ | ||||
|         ('fcompiler=', None, "specify the Fortran compiler type"), | ||||
|         ] | ||||
|  | ||||
|     def initialize_options(self): | ||||
|         self.fcompiler = None | ||||
|         old_config.initialize_options(self) | ||||
|  | ||||
|     def _check_compiler (self): | ||||
|         old_config._check_compiler(self) | ||||
|         from numpy.distutils.fcompiler import FCompiler, new_fcompiler | ||||
|  | ||||
|         if sys.platform == 'win32' and (self.compiler.compiler_type in | ||||
|                                         ('msvc', 'intelw', 'intelemw')): | ||||
|             # XXX: hack to circumvent a python 2.6 bug with msvc9compiler: | ||||
|             # initialize call query_vcvarsall, which throws an OSError, and | ||||
|             # causes an error along the way without much information. We try to | ||||
|             # catch it here, hoping it is early enough, and print a helpful | ||||
|             # message instead of Error: None. | ||||
|             if not self.compiler.initialized: | ||||
|                 try: | ||||
|                     self.compiler.initialize() | ||||
|                 except OSError as e: | ||||
|                     msg = textwrap.dedent("""\ | ||||
|                         Could not initialize compiler instance: do you have Visual Studio | ||||
|                         installed?  If you are trying to build with MinGW, please use "python setup.py | ||||
|                         build -c mingw32" instead.  If you have Visual Studio installed, check it is | ||||
|                         correctly installed, and the right version (VS 2015 as of this writing). | ||||
|  | ||||
|                         Original exception was: %s, and the Compiler class was %s | ||||
|                         ============================================================================""") \ | ||||
|                         % (e, self.compiler.__class__.__name__) | ||||
|                     print(textwrap.dedent("""\ | ||||
|                         ============================================================================""")) | ||||
|                     raise distutils.errors.DistutilsPlatformError(msg) from e | ||||
|  | ||||
|             # After MSVC is initialized, add an explicit /MANIFEST to linker | ||||
|             # flags.  See issues gh-4245 and gh-4101 for details.  Also | ||||
|             # relevant are issues 4431 and 16296 on the Python bug tracker. | ||||
|             from distutils import msvc9compiler | ||||
|             if msvc9compiler.get_build_version() >= 10: | ||||
|                 for ldflags in [self.compiler.ldflags_shared, | ||||
|                                 self.compiler.ldflags_shared_debug]: | ||||
|                     if '/MANIFEST' not in ldflags: | ||||
|                         ldflags.append('/MANIFEST') | ||||
|  | ||||
|         if not isinstance(self.fcompiler, FCompiler): | ||||
|             self.fcompiler = new_fcompiler(compiler=self.fcompiler, | ||||
|                                            dry_run=self.dry_run, force=1, | ||||
|                                            c_compiler=self.compiler) | ||||
|             if self.fcompiler is not None: | ||||
|                 self.fcompiler.customize(self.distribution) | ||||
|                 if self.fcompiler.get_version(): | ||||
|                     self.fcompiler.customize_cmd(self) | ||||
|                     self.fcompiler.show_customization() | ||||
|  | ||||
|     def _wrap_method(self, mth, lang, args): | ||||
|         from distutils.ccompiler import CompileError | ||||
|         from distutils.errors import DistutilsExecError | ||||
|         save_compiler = self.compiler | ||||
|         if lang in ['f77', 'f90']: | ||||
|             self.compiler = self.fcompiler | ||||
|         if self.compiler is None: | ||||
|             raise CompileError('%s compiler is not set' % (lang,)) | ||||
|         try: | ||||
|             ret = mth(*((self,)+args)) | ||||
|         except (DistutilsExecError, CompileError) as e: | ||||
|             self.compiler = save_compiler | ||||
|             raise CompileError from e | ||||
|         self.compiler = save_compiler | ||||
|         return ret | ||||
|  | ||||
|     def _compile (self, body, headers, include_dirs, lang): | ||||
|         src, obj = self._wrap_method(old_config._compile, lang, | ||||
|                                      (body, headers, include_dirs, lang)) | ||||
|         # _compile in unixcompiler.py sometimes creates .d dependency files. | ||||
|         # Clean them up. | ||||
|         self.temp_files.append(obj + '.d') | ||||
|         return src, obj | ||||
|  | ||||
|     def _link (self, body, | ||||
|                headers, include_dirs, | ||||
|                libraries, library_dirs, lang): | ||||
|         if self.compiler.compiler_type=='msvc': | ||||
|             libraries = (libraries or [])[:] | ||||
|             library_dirs = (library_dirs or [])[:] | ||||
|             if lang in ['f77', 'f90']: | ||||
|                 lang = 'c' # always use system linker when using MSVC compiler | ||||
|                 if self.fcompiler: | ||||
|                     for d in self.fcompiler.library_dirs or []: | ||||
|                         # correct path when compiling in Cygwin but with | ||||
|                         # normal Win Python | ||||
|                         if d.startswith('/usr/lib'): | ||||
|                             try: | ||||
|                                 d = subprocess.check_output(['cygpath', | ||||
|                                                              '-w', d]) | ||||
|                             except (OSError, subprocess.CalledProcessError): | ||||
|                                 pass | ||||
|                             else: | ||||
|                                 d = filepath_from_subprocess_output(d) | ||||
|                         library_dirs.append(d) | ||||
|                     for libname in self.fcompiler.libraries or []: | ||||
|                         if libname not in libraries: | ||||
|                             libraries.append(libname) | ||||
|             for libname in libraries: | ||||
|                 if libname.startswith('msvc'): continue | ||||
|                 fileexists = False | ||||
|                 for libdir in library_dirs or []: | ||||
|                     libfile = os.path.join(libdir, '%s.lib' % (libname)) | ||||
|                     if os.path.isfile(libfile): | ||||
|                         fileexists = True | ||||
|                         break | ||||
|                 if fileexists: continue | ||||
|                 # make g77-compiled static libs available to MSVC | ||||
|                 fileexists = False | ||||
|                 for libdir in library_dirs: | ||||
|                     libfile = os.path.join(libdir, 'lib%s.a' % (libname)) | ||||
|                     if os.path.isfile(libfile): | ||||
|                         # copy libname.a file to name.lib so that MSVC linker | ||||
|                         # can find it | ||||
|                         libfile2 = os.path.join(libdir, '%s.lib' % (libname)) | ||||
|                         copy_file(libfile, libfile2) | ||||
|                         self.temp_files.append(libfile2) | ||||
|                         fileexists = True | ||||
|                         break | ||||
|                 if fileexists: continue | ||||
|                 log.warn('could not find library %r in directories %s' \ | ||||
|                          % (libname, library_dirs)) | ||||
|         elif self.compiler.compiler_type == 'mingw32': | ||||
|             generate_manifest(self) | ||||
|         return self._wrap_method(old_config._link, lang, | ||||
|                                  (body, headers, include_dirs, | ||||
|                                   libraries, library_dirs, lang)) | ||||
|  | ||||
|     def check_header(self, header, include_dirs=None, library_dirs=None, lang='c'): | ||||
|         self._check_compiler() | ||||
|         return self.try_compile( | ||||
|                 "/* we need a dummy line to make distutils happy */", | ||||
|                 [header], include_dirs) | ||||
|  | ||||
|     def check_decl(self, symbol, | ||||
|                    headers=None, include_dirs=None): | ||||
|         self._check_compiler() | ||||
|         body = textwrap.dedent(""" | ||||
|             int main(void) | ||||
|             { | ||||
|             #ifndef %s | ||||
|                 (void) %s; | ||||
|             #endif | ||||
|                 ; | ||||
|                 return 0; | ||||
|             }""") % (symbol, symbol) | ||||
|  | ||||
|         return self.try_compile(body, headers, include_dirs) | ||||
|  | ||||
|     def check_macro_true(self, symbol, | ||||
|                          headers=None, include_dirs=None): | ||||
|         self._check_compiler() | ||||
|         body = textwrap.dedent(""" | ||||
|             int main(void) | ||||
|             { | ||||
|             #if %s | ||||
|             #else | ||||
|             #error false or undefined macro | ||||
|             #endif | ||||
|                 ; | ||||
|                 return 0; | ||||
|             }""") % (symbol,) | ||||
|  | ||||
|         return self.try_compile(body, headers, include_dirs) | ||||
|  | ||||
|     def check_type(self, type_name, headers=None, include_dirs=None, | ||||
|             library_dirs=None): | ||||
|         """Check type availability. Return True if the type can be compiled, | ||||
|         False otherwise""" | ||||
|         self._check_compiler() | ||||
|  | ||||
|         # First check the type can be compiled | ||||
|         body = textwrap.dedent(r""" | ||||
|             int main(void) { | ||||
|               if ((%(name)s *) 0) | ||||
|                 return 0; | ||||
|               if (sizeof (%(name)s)) | ||||
|                 return 0; | ||||
|             } | ||||
|             """) % {'name': type_name} | ||||
|  | ||||
|         st = False | ||||
|         try: | ||||
|             try: | ||||
|                 self._compile(body % {'type': type_name}, | ||||
|                         headers, include_dirs, 'c') | ||||
|                 st = True | ||||
|             except distutils.errors.CompileError: | ||||
|                 st = False | ||||
|         finally: | ||||
|             self._clean() | ||||
|  | ||||
|         return st | ||||
|  | ||||
|     def check_type_size(self, type_name, headers=None, include_dirs=None, library_dirs=None, expected=None): | ||||
|         """Check size of a given type.""" | ||||
|         self._check_compiler() | ||||
|  | ||||
|         # First check the type can be compiled | ||||
|         body = textwrap.dedent(r""" | ||||
|             typedef %(type)s npy_check_sizeof_type; | ||||
|             int main (void) | ||||
|             { | ||||
|                 static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) >= 0)]; | ||||
|                 test_array [0] = 0 | ||||
|  | ||||
|                 ; | ||||
|                 return 0; | ||||
|             } | ||||
|             """) | ||||
|         self._compile(body % {'type': type_name}, | ||||
|                 headers, include_dirs, 'c') | ||||
|         self._clean() | ||||
|  | ||||
|         if expected: | ||||
|             body = textwrap.dedent(r""" | ||||
|                 typedef %(type)s npy_check_sizeof_type; | ||||
|                 int main (void) | ||||
|                 { | ||||
|                     static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) == %(size)s)]; | ||||
|                     test_array [0] = 0 | ||||
|  | ||||
|                     ; | ||||
|                     return 0; | ||||
|                 } | ||||
|                 """) | ||||
|             for size in expected: | ||||
|                 try: | ||||
|                     self._compile(body % {'type': type_name, 'size': size}, | ||||
|                             headers, include_dirs, 'c') | ||||
|                     self._clean() | ||||
|                     return size | ||||
|                 except CompileError: | ||||
|                     pass | ||||
|  | ||||
|         # this fails to *compile* if size > sizeof(type) | ||||
|         body = textwrap.dedent(r""" | ||||
|             typedef %(type)s npy_check_sizeof_type; | ||||
|             int main (void) | ||||
|             { | ||||
|                 static int test_array [1 - 2 * !(((long) (sizeof (npy_check_sizeof_type))) <= %(size)s)]; | ||||
|                 test_array [0] = 0 | ||||
|  | ||||
|                 ; | ||||
|                 return 0; | ||||
|             } | ||||
|             """) | ||||
|  | ||||
|         # The principle is simple: we first find low and high bounds of size | ||||
|         # for the type, where low/high are looked up on a log scale. Then, we | ||||
|         # do a binary search to find the exact size between low and high | ||||
|         low = 0 | ||||
|         mid = 0 | ||||
|         while True: | ||||
|             try: | ||||
|                 self._compile(body % {'type': type_name, 'size': mid}, | ||||
|                         headers, include_dirs, 'c') | ||||
|                 self._clean() | ||||
|                 break | ||||
|             except CompileError: | ||||
|                 #log.info("failure to test for bound %d" % mid) | ||||
|                 low = mid + 1 | ||||
|                 mid = 2 * mid + 1 | ||||
|  | ||||
|         high = mid | ||||
|         # Binary search: | ||||
|         while low != high: | ||||
|             mid = (high - low) // 2 + low | ||||
|             try: | ||||
|                 self._compile(body % {'type': type_name, 'size': mid}, | ||||
|                         headers, include_dirs, 'c') | ||||
|                 self._clean() | ||||
|                 high = mid | ||||
|             except CompileError: | ||||
|                 low = mid + 1 | ||||
|         return low | ||||
|  | ||||
|     def check_func(self, func, | ||||
|                    headers=None, include_dirs=None, | ||||
|                    libraries=None, library_dirs=None, | ||||
|                    decl=False, call=False, call_args=None): | ||||
|         # clean up distutils's config a bit: add void to main(), and | ||||
|         # return a value. | ||||
|         self._check_compiler() | ||||
|         body = [] | ||||
|         if decl: | ||||
|             if type(decl) == str: | ||||
|                 body.append(decl) | ||||
|             else: | ||||
|                 body.append("int %s (void);" % func) | ||||
|         # Handle MSVC intrinsics: force MS compiler to make a function call. | ||||
|         # Useful to test for some functions when built with optimization on, to | ||||
|         # avoid build error because the intrinsic and our 'fake' test | ||||
|         # declaration do not match. | ||||
|         body.append("#ifdef _MSC_VER") | ||||
|         body.append("#pragma function(%s)" % func) | ||||
|         body.append("#endif") | ||||
|         body.append("int main (void) {") | ||||
|         if call: | ||||
|             if call_args is None: | ||||
|                 call_args = '' | ||||
|             body.append("  %s(%s);" % (func, call_args)) | ||||
|         else: | ||||
|             body.append("  %s;" % func) | ||||
|         body.append("  return 0;") | ||||
|         body.append("}") | ||||
|         body = '\n'.join(body) + "\n" | ||||
|  | ||||
|         return self.try_link(body, headers, include_dirs, | ||||
|                              libraries, library_dirs) | ||||
|  | ||||
|     def check_funcs_once(self, funcs, | ||||
|                    headers=None, include_dirs=None, | ||||
|                    libraries=None, library_dirs=None, | ||||
|                    decl=False, call=False, call_args=None): | ||||
|         """Check a list of functions at once. | ||||
|  | ||||
|         This is useful to speed up things, since all the functions in the funcs | ||||
|         list will be put in one compilation unit. | ||||
|  | ||||
|         Arguments | ||||
|         --------- | ||||
|         funcs : seq | ||||
|             list of functions to test | ||||
|         include_dirs : seq | ||||
|             list of header paths | ||||
|         libraries : seq | ||||
|             list of libraries to link the code snippet to | ||||
|         library_dirs : seq | ||||
|             list of library paths | ||||
|         decl : dict | ||||
|             for every (key, value), the declaration in the value will be | ||||
|             used for function in key. If a function is not in the | ||||
|             dictionary, no declaration will be used. | ||||
|         call : dict | ||||
|             for every item (f, value), if the value is True, a call will be | ||||
|             done to the function f. | ||||
|         """ | ||||
|         self._check_compiler() | ||||
|         body = [] | ||||
|         if decl: | ||||
|             for f, v in decl.items(): | ||||
|                 if v: | ||||
|                     body.append("int %s (void);" % f) | ||||
|  | ||||
|         # Handle MS intrinsics. See check_func for more info. | ||||
|         body.append("#ifdef _MSC_VER") | ||||
|         for func in funcs: | ||||
|             body.append("#pragma function(%s)" % func) | ||||
|         body.append("#endif") | ||||
|  | ||||
|         body.append("int main (void) {") | ||||
|         if call: | ||||
|             for f in funcs: | ||||
|                 if f in call and call[f]: | ||||
|                     if not (call_args and f in call_args and call_args[f]): | ||||
|                         args = '' | ||||
|                     else: | ||||
|                         args = call_args[f] | ||||
|                     body.append("  %s(%s);" % (f, args)) | ||||
|                 else: | ||||
|                     body.append("  %s;" % f) | ||||
|         else: | ||||
|             for f in funcs: | ||||
|                 body.append("  %s;" % f) | ||||
|         body.append("  return 0;") | ||||
|         body.append("}") | ||||
|         body = '\n'.join(body) + "\n" | ||||
|  | ||||
|         return self.try_link(body, headers, include_dirs, | ||||
|                              libraries, library_dirs) | ||||
|  | ||||
|     def check_inline(self): | ||||
|         """Return the inline keyword recognized by the compiler, empty string | ||||
|         otherwise.""" | ||||
|         return check_inline(self) | ||||
|  | ||||
|     def check_restrict(self): | ||||
|         """Return the restrict keyword recognized by the compiler, empty string | ||||
|         otherwise.""" | ||||
|         return check_restrict(self) | ||||
|  | ||||
|     def check_compiler_gcc(self): | ||||
|         """Return True if the C compiler is gcc""" | ||||
|         return check_compiler_gcc(self) | ||||
|  | ||||
|     def check_gcc_function_attribute(self, attribute, name): | ||||
|         return check_gcc_function_attribute(self, attribute, name) | ||||
|  | ||||
|     def check_gcc_function_attribute_with_intrinsics(self, attribute, name, | ||||
|                                                      code, include): | ||||
|         return check_gcc_function_attribute_with_intrinsics(self, attribute, | ||||
|                                                             name, code, include) | ||||
|  | ||||
|     def check_gcc_variable_attribute(self, attribute): | ||||
|         return check_gcc_variable_attribute(self, attribute) | ||||
|  | ||||
|     def check_gcc_version_at_least(self, major, minor=0, patchlevel=0): | ||||
|         """Return True if the GCC version is greater than or equal to the | ||||
|         specified version.""" | ||||
|         return check_gcc_version_at_least(self, major, minor, patchlevel) | ||||
|  | ||||
|     def get_output(self, body, headers=None, include_dirs=None, | ||||
|                    libraries=None, library_dirs=None, | ||||
|                    lang="c", use_tee=None): | ||||
|         """Try to compile, link to an executable, and run a program | ||||
|         built from 'body' and 'headers'. Returns the exit status code | ||||
|         of the program and its output. | ||||
|         """ | ||||
|         # 2008-11-16, RemoveMe | ||||
|         warnings.warn("\n+++++++++++++++++++++++++++++++++++++++++++++++++\n" | ||||
|                       "Usage of get_output is deprecated: please do not \n" | ||||
|                       "use it anymore, and avoid configuration checks \n" | ||||
|                       "involving running executable on the target machine.\n" | ||||
|                       "+++++++++++++++++++++++++++++++++++++++++++++++++\n", | ||||
|                       DeprecationWarning, stacklevel=2) | ||||
|         self._check_compiler() | ||||
|         exitcode, output = 255, '' | ||||
|         try: | ||||
|             grabber = GrabStdout() | ||||
|             try: | ||||
|                 src, obj, exe = self._link(body, headers, include_dirs, | ||||
|                                            libraries, library_dirs, lang) | ||||
|                 grabber.restore() | ||||
|             except Exception: | ||||
|                 output = grabber.data | ||||
|                 grabber.restore() | ||||
|                 raise | ||||
|             exe = os.path.join('.', exe) | ||||
|             try: | ||||
|                 # specify cwd arg for consistency with | ||||
|                 # historic usage pattern of exec_command() | ||||
|                 # also, note that exe appears to be a string, | ||||
|                 # which exec_command() handled, but we now | ||||
|                 # use a list for check_output() -- this assumes | ||||
|                 # that exe is always a single command | ||||
|                 output = subprocess.check_output([exe], cwd='.') | ||||
|             except subprocess.CalledProcessError as exc: | ||||
|                 exitstatus = exc.returncode | ||||
|                 output = '' | ||||
|             except OSError: | ||||
|                 # preserve the EnvironmentError exit status | ||||
|                 # used historically in exec_command() | ||||
|                 exitstatus = 127 | ||||
|                 output = '' | ||||
|             else: | ||||
|                 output = filepath_from_subprocess_output(output) | ||||
|             if hasattr(os, 'WEXITSTATUS'): | ||||
|                 exitcode = os.WEXITSTATUS(exitstatus) | ||||
|                 if os.WIFSIGNALED(exitstatus): | ||||
|                     sig = os.WTERMSIG(exitstatus) | ||||
|                     log.error('subprocess exited with signal %d' % (sig,)) | ||||
|                     if sig == signal.SIGINT: | ||||
|                         # control-C | ||||
|                         raise KeyboardInterrupt | ||||
|             else: | ||||
|                 exitcode = exitstatus | ||||
|             log.info("success!") | ||||
|         except (CompileError, LinkError): | ||||
|             log.info("failure.") | ||||
|         self._clean() | ||||
|         return exitcode, output | ||||
|  | ||||
| class GrabStdout: | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.sys_stdout = sys.stdout | ||||
|         self.data = '' | ||||
|         sys.stdout = self | ||||
|  | ||||
|     def write (self, data): | ||||
|         self.sys_stdout.write(data) | ||||
|         self.data += data | ||||
|  | ||||
|     def flush (self): | ||||
|         self.sys_stdout.flush() | ||||
|  | ||||
|     def restore(self): | ||||
|         sys.stdout = self.sys_stdout | ||||
| @ -0,0 +1,126 @@ | ||||
| from distutils.core import Command | ||||
| from numpy.distutils import log | ||||
|  | ||||
| #XXX: Linker flags | ||||
|  | ||||
| def show_fortran_compilers(_cache=None): | ||||
|     # Using cache to prevent infinite recursion. | ||||
|     if _cache: | ||||
|         return | ||||
|     elif _cache is None: | ||||
|         _cache = [] | ||||
|     _cache.append(1) | ||||
|     from numpy.distutils.fcompiler import show_fcompilers | ||||
|     import distutils.core | ||||
|     dist = distutils.core._setup_distribution | ||||
|     show_fcompilers(dist) | ||||
|  | ||||
| class config_fc(Command): | ||||
|     """ Distutils command to hold user specified options | ||||
|     to Fortran compilers. | ||||
|  | ||||
|     config_fc command is used by the FCompiler.customize() method. | ||||
|     """ | ||||
|  | ||||
|     description = "specify Fortran 77/Fortran 90 compiler information" | ||||
|  | ||||
|     user_options = [ | ||||
|         ('fcompiler=', None, "specify Fortran compiler type"), | ||||
|         ('f77exec=', None, "specify F77 compiler command"), | ||||
|         ('f90exec=', None, "specify F90 compiler command"), | ||||
|         ('f77flags=', None, "specify F77 compiler flags"), | ||||
|         ('f90flags=', None, "specify F90 compiler flags"), | ||||
|         ('opt=', None, "specify optimization flags"), | ||||
|         ('arch=', None, "specify architecture specific optimization flags"), | ||||
|         ('debug', 'g', "compile with debugging information"), | ||||
|         ('noopt', None, "compile without optimization"), | ||||
|         ('noarch', None, "compile without arch-dependent optimization"), | ||||
|         ] | ||||
|  | ||||
|     help_options = [ | ||||
|         ('help-fcompiler', None, "list available Fortran compilers", | ||||
|          show_fortran_compilers), | ||||
|         ] | ||||
|  | ||||
|     boolean_options = ['debug', 'noopt', 'noarch'] | ||||
|  | ||||
|     def initialize_options(self): | ||||
|         self.fcompiler = None | ||||
|         self.f77exec = None | ||||
|         self.f90exec = None | ||||
|         self.f77flags = None | ||||
|         self.f90flags = None | ||||
|         self.opt = None | ||||
|         self.arch = None | ||||
|         self.debug = None | ||||
|         self.noopt = None | ||||
|         self.noarch = None | ||||
|  | ||||
|     def finalize_options(self): | ||||
|         log.info('unifying config_fc, config, build_clib, build_ext, build commands --fcompiler options') | ||||
|         build_clib = self.get_finalized_command('build_clib') | ||||
|         build_ext = self.get_finalized_command('build_ext') | ||||
|         config = self.get_finalized_command('config') | ||||
|         build = self.get_finalized_command('build') | ||||
|         cmd_list = [self, config, build_clib, build_ext, build] | ||||
|         for a in ['fcompiler']: | ||||
|             l = [] | ||||
|             for c in cmd_list: | ||||
|                 v = getattr(c, a) | ||||
|                 if v is not None: | ||||
|                     if not isinstance(v, str): v = v.compiler_type | ||||
|                     if v not in l: l.append(v) | ||||
|             if not l: v1 = None | ||||
|             else: v1 = l[0] | ||||
|             if len(l)>1: | ||||
|                 log.warn('  commands have different --%s options: %s'\ | ||||
|                          ', using first in list as default' % (a, l)) | ||||
|             if v1: | ||||
|                 for c in cmd_list: | ||||
|                     if getattr(c, a) is None: setattr(c, a, v1) | ||||
|  | ||||
|     def run(self): | ||||
|         # Do nothing. | ||||
|         return | ||||
|  | ||||
| class config_cc(Command): | ||||
|     """ Distutils command to hold user specified options | ||||
|     to C/C++ compilers. | ||||
|     """ | ||||
|  | ||||
|     description = "specify C/C++ compiler information" | ||||
|  | ||||
|     user_options = [ | ||||
|         ('compiler=', None, "specify C/C++ compiler type"), | ||||
|         ] | ||||
|  | ||||
|     def initialize_options(self): | ||||
|         self.compiler = None | ||||
|  | ||||
|     def finalize_options(self): | ||||
|         log.info('unifying config_cc, config, build_clib, build_ext, build commands --compiler options') | ||||
|         build_clib = self.get_finalized_command('build_clib') | ||||
|         build_ext = self.get_finalized_command('build_ext') | ||||
|         config = self.get_finalized_command('config') | ||||
|         build = self.get_finalized_command('build') | ||||
|         cmd_list = [self, config, build_clib, build_ext, build] | ||||
|         for a in ['compiler']: | ||||
|             l = [] | ||||
|             for c in cmd_list: | ||||
|                 v = getattr(c, a) | ||||
|                 if v is not None: | ||||
|                     if not isinstance(v, str): v = v.compiler_type | ||||
|                     if v not in l: l.append(v) | ||||
|             if not l: v1 = None | ||||
|             else: v1 = l[0] | ||||
|             if len(l)>1: | ||||
|                 log.warn('  commands have different --%s options: %s'\ | ||||
|                          ', using first in list as default' % (a, l)) | ||||
|             if v1: | ||||
|                 for c in cmd_list: | ||||
|                     if getattr(c, a) is None: setattr(c, a, v1) | ||||
|         return | ||||
|  | ||||
|     def run(self): | ||||
|         # Do nothing. | ||||
|         return | ||||
| @ -0,0 +1,15 @@ | ||||
| """ Override the develop command from setuptools so we can ensure that our | ||||
| generated files (from build_src or build_scripts) are properly converted to real | ||||
| files with filenames. | ||||
|  | ||||
| """ | ||||
| from setuptools.command.develop import develop as old_develop | ||||
|  | ||||
| class develop(old_develop): | ||||
|     __doc__ = old_develop.__doc__ | ||||
|     def install_for_development(self): | ||||
|         # Build sources in-place, too. | ||||
|         self.reinitialize_command('build_src', inplace=1) | ||||
|         # Make sure scripts are built. | ||||
|         self.run_command('build_scripts') | ||||
|         old_develop.install_for_development(self) | ||||
| @ -0,0 +1,25 @@ | ||||
| import sys | ||||
|  | ||||
| from setuptools.command.egg_info import egg_info as _egg_info | ||||
|  | ||||
| class egg_info(_egg_info): | ||||
|     def run(self): | ||||
|         if 'sdist' in sys.argv: | ||||
|             import warnings | ||||
|             import textwrap | ||||
|             msg = textwrap.dedent(""" | ||||
|                 `build_src` is being run, this may lead to missing | ||||
|                 files in your sdist!  You want to use distutils.sdist | ||||
|                 instead of the setuptools version: | ||||
|  | ||||
|                     from distutils.command.sdist import sdist | ||||
|                     cmdclass={'sdist': sdist}" | ||||
|  | ||||
|                 See numpy's setup.py or gh-7131 for details.""") | ||||
|             warnings.warn(msg, UserWarning, stacklevel=2) | ||||
|  | ||||
|         # We need to ensure that build_src has been executed in order to give | ||||
|         # setuptools' egg_info command real filenames instead of functions which | ||||
|         # generate files. | ||||
|         self.run_command("build_src") | ||||
|         _egg_info.run(self) | ||||
| @ -0,0 +1,79 @@ | ||||
| import sys | ||||
| if 'setuptools' in sys.modules: | ||||
|     import setuptools.command.install as old_install_mod | ||||
|     have_setuptools = True | ||||
| else: | ||||
|     import distutils.command.install as old_install_mod | ||||
|     have_setuptools = False | ||||
| from distutils.file_util import write_file | ||||
|  | ||||
| old_install = old_install_mod.install | ||||
|  | ||||
| class install(old_install): | ||||
|  | ||||
|     # Always run install_clib - the command is cheap, so no need to bypass it; | ||||
|     # but it's not run by setuptools -- so it's run again in install_data | ||||
|     sub_commands = old_install.sub_commands + [ | ||||
|         ('install_clib', lambda x: True) | ||||
|     ] | ||||
|  | ||||
|     def finalize_options (self): | ||||
|         old_install.finalize_options(self) | ||||
|         self.install_lib = self.install_libbase | ||||
|  | ||||
|     def setuptools_run(self): | ||||
|         """ The setuptools version of the .run() method. | ||||
|  | ||||
|         We must pull in the entire code so we can override the level used in the | ||||
|         _getframe() call since we wrap this call by one more level. | ||||
|         """ | ||||
|         from distutils.command.install import install as distutils_install | ||||
|  | ||||
|         # Explicit request for old-style install?  Just do it | ||||
|         if self.old_and_unmanageable or self.single_version_externally_managed: | ||||
|             return distutils_install.run(self) | ||||
|  | ||||
|         # Attempt to detect whether we were called from setup() or by another | ||||
|         # command.  If we were called by setup(), our caller will be the | ||||
|         # 'run_command' method in 'distutils.dist', and *its* caller will be | ||||
|         # the 'run_commands' method.  If we were called any other way, our | ||||
|         # immediate caller *might* be 'run_command', but it won't have been | ||||
|         # called by 'run_commands'.  This is slightly kludgy, but seems to | ||||
|         # work. | ||||
|         # | ||||
|         caller = sys._getframe(3) | ||||
|         caller_module = caller.f_globals.get('__name__', '') | ||||
|         caller_name = caller.f_code.co_name | ||||
|  | ||||
|         if caller_module != 'distutils.dist' or caller_name!='run_commands': | ||||
|             # We weren't called from the command line or setup(), so we | ||||
|             # should run in backward-compatibility mode to support bdist_* | ||||
|             # commands. | ||||
|             distutils_install.run(self) | ||||
|         else: | ||||
|             self.do_egg_install() | ||||
|  | ||||
|     def run(self): | ||||
|         if not have_setuptools: | ||||
|             r = old_install.run(self) | ||||
|         else: | ||||
|             r = self.setuptools_run() | ||||
|         if self.record: | ||||
|             # bdist_rpm fails when INSTALLED_FILES contains | ||||
|             # paths with spaces. Such paths must be enclosed | ||||
|             # with double-quotes. | ||||
|             with open(self.record) as f: | ||||
|                 lines = [] | ||||
|                 need_rewrite = False | ||||
|                 for l in f: | ||||
|                     l = l.rstrip() | ||||
|                     if ' ' in l: | ||||
|                         need_rewrite = True | ||||
|                         l = '"%s"' % (l) | ||||
|                     lines.append(l) | ||||
|             if need_rewrite: | ||||
|                 self.execute(write_file, | ||||
|                              (self.record, lines), | ||||
|                              "re-writing list of installed files to '%s'" % | ||||
|                              self.record) | ||||
|         return r | ||||
| @ -0,0 +1,40 @@ | ||||
| import os | ||||
| from distutils.core import Command | ||||
| from distutils.ccompiler import new_compiler | ||||
| from numpy.distutils.misc_util import get_cmd | ||||
|  | ||||
| class install_clib(Command): | ||||
|     description = "Command to install installable C libraries" | ||||
|  | ||||
|     user_options = [] | ||||
|  | ||||
|     def initialize_options(self): | ||||
|         self.install_dir = None | ||||
|         self.outfiles = [] | ||||
|  | ||||
|     def finalize_options(self): | ||||
|         self.set_undefined_options('install', ('install_lib', 'install_dir')) | ||||
|  | ||||
|     def run (self): | ||||
|         build_clib_cmd = get_cmd("build_clib") | ||||
|         if not build_clib_cmd.build_clib: | ||||
|             # can happen if the user specified `--skip-build` | ||||
|             build_clib_cmd.finalize_options() | ||||
|         build_dir = build_clib_cmd.build_clib | ||||
|  | ||||
|         # We need the compiler to get the library name -> filename association | ||||
|         if not build_clib_cmd.compiler: | ||||
|             compiler = new_compiler(compiler=None) | ||||
|             compiler.customize(self.distribution) | ||||
|         else: | ||||
|             compiler = build_clib_cmd.compiler | ||||
|  | ||||
|         for l in self.distribution.installed_libraries: | ||||
|             target_dir = os.path.join(self.install_dir, l.target_dir) | ||||
|             name = compiler.library_filename(l.name) | ||||
|             source = os.path.join(build_dir, name) | ||||
|             self.mkpath(target_dir) | ||||
|             self.outfiles.append(self.copy_file(source, target_dir)[0]) | ||||
|  | ||||
|     def get_outputs(self): | ||||
|         return self.outfiles | ||||
| @ -0,0 +1,24 @@ | ||||
| import sys | ||||
| have_setuptools = ('setuptools' in sys.modules) | ||||
|  | ||||
| from distutils.command.install_data import install_data as old_install_data | ||||
|  | ||||
| #data installer with improved intelligence over distutils | ||||
| #data files are copied into the project directory instead | ||||
| #of willy-nilly | ||||
| class install_data (old_install_data): | ||||
|  | ||||
|     def run(self): | ||||
|         old_install_data.run(self) | ||||
|  | ||||
|         if have_setuptools: | ||||
|             # Run install_clib again, since setuptools does not run sub-commands | ||||
|             # of install automatically | ||||
|             self.run_command('install_clib') | ||||
|  | ||||
|     def finalize_options (self): | ||||
|         self.set_undefined_options('install', | ||||
|                                    ('install_lib', 'install_dir'), | ||||
|                                    ('root', 'root'), | ||||
|                                    ('force', 'force'), | ||||
|                                   ) | ||||
| @ -0,0 +1,25 @@ | ||||
| import os | ||||
| from distutils.command.install_headers import install_headers as old_install_headers | ||||
|  | ||||
| class install_headers (old_install_headers): | ||||
|  | ||||
|     def run (self): | ||||
|         headers = self.distribution.headers | ||||
|         if not headers: | ||||
|             return | ||||
|  | ||||
|         prefix = os.path.dirname(self.install_dir) | ||||
|         for header in headers: | ||||
|             if isinstance(header, tuple): | ||||
|                 # Kind of a hack, but I don't know where else to change this... | ||||
|                 if header[0] == 'numpy._core': | ||||
|                     header = ('numpy', header[1]) | ||||
|                     if os.path.splitext(header[1])[1] == '.inc': | ||||
|                         continue | ||||
|                 d = os.path.join(*([prefix]+header[0].split('.'))) | ||||
|                 header = header[1] | ||||
|             else: | ||||
|                 d = self.install_dir | ||||
|             self.mkpath(d) | ||||
|             (out, _) = self.copy_file(header, d) | ||||
|             self.outfiles.append(out) | ||||
| @ -0,0 +1,27 @@ | ||||
| import sys | ||||
| if 'setuptools' in sys.modules: | ||||
|     from setuptools.command.sdist import sdist as old_sdist | ||||
| else: | ||||
|     from distutils.command.sdist import sdist as old_sdist | ||||
|  | ||||
| from numpy.distutils.misc_util import get_data_files | ||||
|  | ||||
| class sdist(old_sdist): | ||||
|  | ||||
|     def add_defaults (self): | ||||
|         old_sdist.add_defaults(self) | ||||
|  | ||||
|         dist = self.distribution | ||||
|  | ||||
|         if dist.has_data_files(): | ||||
|             for data in dist.data_files: | ||||
|                 self.filelist.extend(get_data_files(data)) | ||||
|  | ||||
|         if dist.has_headers(): | ||||
|             headers = [] | ||||
|             for h in dist.headers: | ||||
|                 if isinstance(h, str): headers.append(h) | ||||
|                 else: headers.append(h[1]) | ||||
|             self.filelist.extend(headers) | ||||
|  | ||||
|         return | ||||
		Reference in New Issue
	
	Block a user