Mercurial > hg > vamp-build-and-test
comparison DEPENDENCIES/mingw32/Python27/Lib/site-packages/numpy/distutils/misc_util.py @ 87:2a2c65a20a8b
Add Python libs and headers
author | Chris Cannam |
---|---|
date | Wed, 25 Feb 2015 14:05:22 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
86:413a9d26189e | 87:2a2c65a20a8b |
---|---|
1 from __future__ import division, absolute_import, print_function | |
2 | |
3 import os | |
4 import re | |
5 import sys | |
6 import imp | |
7 import copy | |
8 import glob | |
9 import atexit | |
10 import tempfile | |
11 import subprocess | |
12 import shutil | |
13 | |
14 import distutils | |
15 from distutils.errors import DistutilsError | |
16 | |
17 try: | |
18 set | |
19 except NameError: | |
20 from sets import Set as set | |
21 | |
22 from numpy.distutils.compat import get_exception | |
23 | |
24 __all__ = ['Configuration', 'get_numpy_include_dirs', 'default_config_dict', | |
25 'dict_append', 'appendpath', 'generate_config_py', | |
26 'get_cmd', 'allpath', 'get_mathlibs', | |
27 'terminal_has_colors', 'red_text', 'green_text', 'yellow_text', | |
28 'blue_text', 'cyan_text', 'cyg2win32', 'mingw32', 'all_strings', | |
29 'has_f_sources', 'has_cxx_sources', 'filter_sources', | |
30 'get_dependencies', 'is_local_src_dir', 'get_ext_source_files', | |
31 'get_script_files', 'get_lib_source_files', 'get_data_files', | |
32 'dot_join', 'get_frame', 'minrelpath', 'njoin', | |
33 'is_sequence', 'is_string', 'as_list', 'gpaths', 'get_language', | |
34 'quote_args', 'get_build_architecture', 'get_info', 'get_pkg_info'] | |
35 | |
36 class InstallableLib(object): | |
37 """ | |
38 Container to hold information on an installable library. | |
39 | |
40 Parameters | |
41 ---------- | |
42 name : str | |
43 Name of the installed library. | |
44 build_info : dict | |
45 Dictionary holding build information. | |
46 target_dir : str | |
47 Absolute path specifying where to install the library. | |
48 | |
49 See Also | |
50 -------- | |
51 Configuration.add_installed_library | |
52 | |
53 Notes | |
54 ----- | |
55 The three parameters are stored as attributes with the same names. | |
56 | |
57 """ | |
58 def __init__(self, name, build_info, target_dir): | |
59 self.name = name | |
60 self.build_info = build_info | |
61 self.target_dir = target_dir | |
62 | |
63 def quote_args(args): | |
64 # don't used _nt_quote_args as it does not check if | |
65 # args items already have quotes or not. | |
66 args = list(args) | |
67 for i in range(len(args)): | |
68 a = args[i] | |
69 if ' ' in a and a[0] not in '"\'': | |
70 args[i] = '"%s"' % (a) | |
71 return args | |
72 | |
73 def allpath(name): | |
74 "Convert a /-separated pathname to one using the OS's path separator." | |
75 splitted = name.split('/') | |
76 return os.path.join(*splitted) | |
77 | |
78 def rel_path(path, parent_path): | |
79 """Return path relative to parent_path. | |
80 """ | |
81 pd = os.path.abspath(parent_path) | |
82 apath = os.path.abspath(path) | |
83 if len(apath)<len(pd): | |
84 return path | |
85 if apath==pd: | |
86 return '' | |
87 if pd == apath[:len(pd)]: | |
88 assert apath[len(pd)] in [os.sep], repr((path, apath[len(pd)])) | |
89 path = apath[len(pd)+1:] | |
90 return path | |
91 | |
92 def get_path_from_frame(frame, parent_path=None): | |
93 """Return path of the module given a frame object from the call stack. | |
94 | |
95 Returned path is relative to parent_path when given, | |
96 otherwise it is absolute path. | |
97 """ | |
98 | |
99 # First, try to find if the file name is in the frame. | |
100 try: | |
101 caller_file = eval('__file__', frame.f_globals, frame.f_locals) | |
102 d = os.path.dirname(os.path.abspath(caller_file)) | |
103 except NameError: | |
104 # __file__ is not defined, so let's try __name__. We try this second | |
105 # because setuptools spoofs __name__ to be '__main__' even though | |
106 # sys.modules['__main__'] might be something else, like easy_install(1). | |
107 caller_name = eval('__name__', frame.f_globals, frame.f_locals) | |
108 __import__(caller_name) | |
109 mod = sys.modules[caller_name] | |
110 if hasattr(mod, '__file__'): | |
111 d = os.path.dirname(os.path.abspath(mod.__file__)) | |
112 else: | |
113 # we're probably running setup.py as execfile("setup.py") | |
114 # (likely we're building an egg) | |
115 d = os.path.abspath('.') | |
116 # hmm, should we use sys.argv[0] like in __builtin__ case? | |
117 | |
118 if parent_path is not None: | |
119 d = rel_path(d, parent_path) | |
120 | |
121 return d or '.' | |
122 | |
123 def njoin(*path): | |
124 """Join two or more pathname components + | |
125 - convert a /-separated pathname to one using the OS's path separator. | |
126 - resolve `..` and `.` from path. | |
127 | |
128 Either passing n arguments as in njoin('a','b'), or a sequence | |
129 of n names as in njoin(['a','b']) is handled, or a mixture of such arguments. | |
130 """ | |
131 paths = [] | |
132 for p in path: | |
133 if is_sequence(p): | |
134 # njoin(['a', 'b'], 'c') | |
135 paths.append(njoin(*p)) | |
136 else: | |
137 assert is_string(p) | |
138 paths.append(p) | |
139 path = paths | |
140 if not path: | |
141 # njoin() | |
142 joined = '' | |
143 else: | |
144 # njoin('a', 'b') | |
145 joined = os.path.join(*path) | |
146 if os.path.sep != '/': | |
147 joined = joined.replace('/', os.path.sep) | |
148 return minrelpath(joined) | |
149 | |
150 def get_mathlibs(path=None): | |
151 """Return the MATHLIB line from numpyconfig.h | |
152 """ | |
153 if path is not None: | |
154 config_file = os.path.join(path, '_numpyconfig.h') | |
155 else: | |
156 # Look for the file in each of the numpy include directories. | |
157 dirs = get_numpy_include_dirs() | |
158 for path in dirs: | |
159 fn = os.path.join(path, '_numpyconfig.h') | |
160 if os.path.exists(fn): | |
161 config_file = fn | |
162 break | |
163 else: | |
164 raise DistutilsError('_numpyconfig.h not found in numpy include ' | |
165 'dirs %r' % (dirs,)) | |
166 | |
167 fid = open(config_file) | |
168 mathlibs = [] | |
169 s = '#define MATHLIB' | |
170 for line in fid: | |
171 if line.startswith(s): | |
172 value = line[len(s):].strip() | |
173 if value: | |
174 mathlibs.extend(value.split(',')) | |
175 fid.close() | |
176 return mathlibs | |
177 | |
178 def minrelpath(path): | |
179 """Resolve `..` and '.' from path. | |
180 """ | |
181 if not is_string(path): | |
182 return path | |
183 if '.' not in path: | |
184 return path | |
185 l = path.split(os.sep) | |
186 while l: | |
187 try: | |
188 i = l.index('.', 1) | |
189 except ValueError: | |
190 break | |
191 del l[i] | |
192 j = 1 | |
193 while l: | |
194 try: | |
195 i = l.index('..', j) | |
196 except ValueError: | |
197 break | |
198 if l[i-1]=='..': | |
199 j += 1 | |
200 else: | |
201 del l[i], l[i-1] | |
202 j = 1 | |
203 if not l: | |
204 return '' | |
205 return os.sep.join(l) | |
206 | |
207 def _fix_paths(paths, local_path, include_non_existing): | |
208 assert is_sequence(paths), repr(type(paths)) | |
209 new_paths = [] | |
210 assert not is_string(paths), repr(paths) | |
211 for n in paths: | |
212 if is_string(n): | |
213 if '*' in n or '?' in n: | |
214 p = glob.glob(n) | |
215 p2 = glob.glob(njoin(local_path, n)) | |
216 if p2: | |
217 new_paths.extend(p2) | |
218 elif p: | |
219 new_paths.extend(p) | |
220 else: | |
221 if include_non_existing: | |
222 new_paths.append(n) | |
223 print('could not resolve pattern in %r: %r' % | |
224 (local_path, n)) | |
225 else: | |
226 n2 = njoin(local_path, n) | |
227 if os.path.exists(n2): | |
228 new_paths.append(n2) | |
229 else: | |
230 if os.path.exists(n): | |
231 new_paths.append(n) | |
232 elif include_non_existing: | |
233 new_paths.append(n) | |
234 if not os.path.exists(n): | |
235 print('non-existing path in %r: %r' % | |
236 (local_path, n)) | |
237 | |
238 elif is_sequence(n): | |
239 new_paths.extend(_fix_paths(n, local_path, include_non_existing)) | |
240 else: | |
241 new_paths.append(n) | |
242 return [minrelpath(p) for p in new_paths] | |
243 | |
244 def gpaths(paths, local_path='', include_non_existing=True): | |
245 """Apply glob to paths and prepend local_path if needed. | |
246 """ | |
247 if is_string(paths): | |
248 paths = (paths,) | |
249 return _fix_paths(paths, local_path, include_non_existing) | |
250 | |
251 | |
252 _temporary_directory = None | |
253 def clean_up_temporary_directory(): | |
254 global _temporary_directory | |
255 if not _temporary_directory: | |
256 return | |
257 try: | |
258 shutil.rmtree(_temporary_directory) | |
259 except OSError: | |
260 pass | |
261 _temporary_directory = None | |
262 | |
263 def make_temp_file(suffix='', prefix='', text=True): | |
264 global _temporary_directory | |
265 if not _temporary_directory: | |
266 _temporary_directory = tempfile.mkdtemp() | |
267 atexit.register(clean_up_temporary_directory) | |
268 fid, name = tempfile.mkstemp(suffix=suffix, | |
269 prefix=prefix, | |
270 dir=_temporary_directory, | |
271 text=text) | |
272 fo = os.fdopen(fid, 'w') | |
273 return fo, name | |
274 | |
275 # Hooks for colored terminal output. | |
276 # See also http://www.livinglogic.de/Python/ansistyle | |
277 def terminal_has_colors(): | |
278 if sys.platform=='cygwin' and 'USE_COLOR' not in os.environ: | |
279 # Avoid importing curses that causes illegal operation | |
280 # with a message: | |
281 # PYTHON2 caused an invalid page fault in | |
282 # module CYGNURSES7.DLL as 015f:18bbfc28 | |
283 # Details: Python 2.3.3 [GCC 3.3.1 (cygming special)] | |
284 # ssh to Win32 machine from debian | |
285 # curses.version is 2.2 | |
286 # CYGWIN_98-4.10, release 1.5.7(0.109/3/2)) | |
287 return 0 | |
288 if hasattr(sys.stdout, 'isatty') and sys.stdout.isatty(): | |
289 try: | |
290 import curses | |
291 curses.setupterm() | |
292 if (curses.tigetnum("colors") >= 0 | |
293 and curses.tigetnum("pairs") >= 0 | |
294 and ((curses.tigetstr("setf") is not None | |
295 and curses.tigetstr("setb") is not None) | |
296 or (curses.tigetstr("setaf") is not None | |
297 and curses.tigetstr("setab") is not None) | |
298 or curses.tigetstr("scp") is not None)): | |
299 return 1 | |
300 except Exception: | |
301 pass | |
302 return 0 | |
303 | |
304 if terminal_has_colors(): | |
305 _colour_codes = dict(black=0, red=1, green=2, yellow=3, | |
306 blue=4, magenta=5, cyan=6, white=7, default=9) | |
307 def colour_text(s, fg=None, bg=None, bold=False): | |
308 seq = [] | |
309 if bold: | |
310 seq.append('1') | |
311 if fg: | |
312 fgcode = 30 + _colour_codes.get(fg.lower(), 0) | |
313 seq.append(str(fgcode)) | |
314 if bg: | |
315 bgcode = 40 + _colour_codes.get(fg.lower(), 7) | |
316 seq.append(str(bgcode)) | |
317 if seq: | |
318 return '\x1b[%sm%s\x1b[0m' % (';'.join(seq), s) | |
319 else: | |
320 return s | |
321 else: | |
322 def colour_text(s, fg=None, bg=None): | |
323 return s | |
324 | |
325 def default_text(s): | |
326 return colour_text(s, 'default') | |
327 def red_text(s): | |
328 return colour_text(s, 'red') | |
329 def green_text(s): | |
330 return colour_text(s, 'green') | |
331 def yellow_text(s): | |
332 return colour_text(s, 'yellow') | |
333 def cyan_text(s): | |
334 return colour_text(s, 'cyan') | |
335 def blue_text(s): | |
336 return colour_text(s, 'blue') | |
337 | |
338 ######################### | |
339 | |
340 def cyg2win32(path): | |
341 if sys.platform=='cygwin' and path.startswith('/cygdrive'): | |
342 path = path[10] + ':' + os.path.normcase(path[11:]) | |
343 return path | |
344 | |
345 def mingw32(): | |
346 """Return true when using mingw32 environment. | |
347 """ | |
348 if sys.platform=='win32': | |
349 if os.environ.get('OSTYPE', '')=='msys': | |
350 return True | |
351 if os.environ.get('MSYSTEM', '')=='MINGW32': | |
352 return True | |
353 return False | |
354 | |
355 def msvc_runtime_library(): | |
356 "Return name of MSVC runtime library if Python was built with MSVC >= 7" | |
357 msc_pos = sys.version.find('MSC v.') | |
358 if msc_pos != -1: | |
359 msc_ver = sys.version[msc_pos+6:msc_pos+10] | |
360 lib = {'1300': 'msvcr70', # MSVC 7.0 | |
361 '1310': 'msvcr71', # MSVC 7.1 | |
362 '1400': 'msvcr80', # MSVC 8 | |
363 '1500': 'msvcr90', # MSVC 9 (VS 2008) | |
364 '1600': 'msvcr100', # MSVC 10 (aka 2010) | |
365 }.get(msc_ver, None) | |
366 else: | |
367 lib = None | |
368 return lib | |
369 | |
370 | |
371 ######################### | |
372 | |
373 #XXX need support for .C that is also C++ | |
374 cxx_ext_match = re.compile(r'.*[.](cpp|cxx|cc)\Z', re.I).match | |
375 fortran_ext_match = re.compile(r'.*[.](f90|f95|f77|for|ftn|f)\Z', re.I).match | |
376 f90_ext_match = re.compile(r'.*[.](f90|f95)\Z', re.I).match | |
377 f90_module_name_match = re.compile(r'\s*module\s*(?P<name>[\w_]+)', re.I).match | |
378 def _get_f90_modules(source): | |
379 """Return a list of Fortran f90 module names that | |
380 given source file defines. | |
381 """ | |
382 if not f90_ext_match(source): | |
383 return [] | |
384 modules = [] | |
385 f = open(source, 'r') | |
386 for line in f: | |
387 m = f90_module_name_match(line) | |
388 if m: | |
389 name = m.group('name') | |
390 modules.append(name) | |
391 # break # XXX can we assume that there is one module per file? | |
392 f.close() | |
393 return modules | |
394 | |
395 def is_string(s): | |
396 return isinstance(s, str) | |
397 | |
398 def all_strings(lst): | |
399 """Return True if all items in lst are string objects. """ | |
400 for item in lst: | |
401 if not is_string(item): | |
402 return False | |
403 return True | |
404 | |
405 def is_sequence(seq): | |
406 if is_string(seq): | |
407 return False | |
408 try: | |
409 len(seq) | |
410 except: | |
411 return False | |
412 return True | |
413 | |
414 def is_glob_pattern(s): | |
415 return is_string(s) and ('*' in s or '?' is s) | |
416 | |
417 def as_list(seq): | |
418 if is_sequence(seq): | |
419 return list(seq) | |
420 else: | |
421 return [seq] | |
422 | |
423 def get_language(sources): | |
424 # not used in numpy/scipy packages, use build_ext.detect_language instead | |
425 """Determine language value (c,f77,f90) from sources """ | |
426 language = None | |
427 for source in sources: | |
428 if isinstance(source, str): | |
429 if f90_ext_match(source): | |
430 language = 'f90' | |
431 break | |
432 elif fortran_ext_match(source): | |
433 language = 'f77' | |
434 return language | |
435 | |
436 def has_f_sources(sources): | |
437 """Return True if sources contains Fortran files """ | |
438 for source in sources: | |
439 if fortran_ext_match(source): | |
440 return True | |
441 return False | |
442 | |
443 def has_cxx_sources(sources): | |
444 """Return True if sources contains C++ files """ | |
445 for source in sources: | |
446 if cxx_ext_match(source): | |
447 return True | |
448 return False | |
449 | |
450 def filter_sources(sources): | |
451 """Return four lists of filenames containing | |
452 C, C++, Fortran, and Fortran 90 module sources, | |
453 respectively. | |
454 """ | |
455 c_sources = [] | |
456 cxx_sources = [] | |
457 f_sources = [] | |
458 fmodule_sources = [] | |
459 for source in sources: | |
460 if fortran_ext_match(source): | |
461 modules = _get_f90_modules(source) | |
462 if modules: | |
463 fmodule_sources.append(source) | |
464 else: | |
465 f_sources.append(source) | |
466 elif cxx_ext_match(source): | |
467 cxx_sources.append(source) | |
468 else: | |
469 c_sources.append(source) | |
470 return c_sources, cxx_sources, f_sources, fmodule_sources | |
471 | |
472 | |
473 def _get_headers(directory_list): | |
474 # get *.h files from list of directories | |
475 headers = [] | |
476 for d in directory_list: | |
477 head = glob.glob(os.path.join(d, "*.h")) #XXX: *.hpp files?? | |
478 headers.extend(head) | |
479 return headers | |
480 | |
481 def _get_directories(list_of_sources): | |
482 # get unique directories from list of sources. | |
483 direcs = [] | |
484 for f in list_of_sources: | |
485 d = os.path.split(f) | |
486 if d[0] != '' and not d[0] in direcs: | |
487 direcs.append(d[0]) | |
488 return direcs | |
489 | |
490 def get_dependencies(sources): | |
491 #XXX scan sources for include statements | |
492 return _get_headers(_get_directories(sources)) | |
493 | |
494 def is_local_src_dir(directory): | |
495 """Return true if directory is local directory. | |
496 """ | |
497 if not is_string(directory): | |
498 return False | |
499 abs_dir = os.path.abspath(directory) | |
500 c = os.path.commonprefix([os.getcwd(), abs_dir]) | |
501 new_dir = abs_dir[len(c):].split(os.sep) | |
502 if new_dir and not new_dir[0]: | |
503 new_dir = new_dir[1:] | |
504 if new_dir and new_dir[0]=='build': | |
505 return False | |
506 new_dir = os.sep.join(new_dir) | |
507 return os.path.isdir(new_dir) | |
508 | |
509 def general_source_files(top_path): | |
510 pruned_directories = {'CVS':1, '.svn':1, 'build':1} | |
511 prune_file_pat = re.compile(r'(?:[~#]|\.py[co]|\.o)$') | |
512 for dirpath, dirnames, filenames in os.walk(top_path, topdown=True): | |
513 pruned = [ d for d in dirnames if d not in pruned_directories ] | |
514 dirnames[:] = pruned | |
515 for f in filenames: | |
516 if not prune_file_pat.search(f): | |
517 yield os.path.join(dirpath, f) | |
518 | |
519 def general_source_directories_files(top_path): | |
520 """Return a directory name relative to top_path and | |
521 files contained. | |
522 """ | |
523 pruned_directories = ['CVS', '.svn', 'build'] | |
524 prune_file_pat = re.compile(r'(?:[~#]|\.py[co]|\.o)$') | |
525 for dirpath, dirnames, filenames in os.walk(top_path, topdown=True): | |
526 pruned = [ d for d in dirnames if d not in pruned_directories ] | |
527 dirnames[:] = pruned | |
528 for d in dirnames: | |
529 dpath = os.path.join(dirpath, d) | |
530 rpath = rel_path(dpath, top_path) | |
531 files = [] | |
532 for f in os.listdir(dpath): | |
533 fn = os.path.join(dpath, f) | |
534 if os.path.isfile(fn) and not prune_file_pat.search(fn): | |
535 files.append(fn) | |
536 yield rpath, files | |
537 dpath = top_path | |
538 rpath = rel_path(dpath, top_path) | |
539 filenames = [os.path.join(dpath, f) for f in os.listdir(dpath) \ | |
540 if not prune_file_pat.search(f)] | |
541 files = [f for f in filenames if os.path.isfile(f)] | |
542 yield rpath, files | |
543 | |
544 | |
545 def get_ext_source_files(ext): | |
546 # Get sources and any include files in the same directory. | |
547 filenames = [] | |
548 sources = [_m for _m in ext.sources if is_string(_m)] | |
549 filenames.extend(sources) | |
550 filenames.extend(get_dependencies(sources)) | |
551 for d in ext.depends: | |
552 if is_local_src_dir(d): | |
553 filenames.extend(list(general_source_files(d))) | |
554 elif os.path.isfile(d): | |
555 filenames.append(d) | |
556 return filenames | |
557 | |
558 def get_script_files(scripts): | |
559 scripts = [_m for _m in scripts if is_string(_m)] | |
560 return scripts | |
561 | |
562 def get_lib_source_files(lib): | |
563 filenames = [] | |
564 sources = lib[1].get('sources', []) | |
565 sources = [_m for _m in sources if is_string(_m)] | |
566 filenames.extend(sources) | |
567 filenames.extend(get_dependencies(sources)) | |
568 depends = lib[1].get('depends', []) | |
569 for d in depends: | |
570 if is_local_src_dir(d): | |
571 filenames.extend(list(general_source_files(d))) | |
572 elif os.path.isfile(d): | |
573 filenames.append(d) | |
574 return filenames | |
575 | |
576 def get_shared_lib_extension(is_python_ext=False): | |
577 """Return the correct file extension for shared libraries. | |
578 | |
579 Parameters | |
580 ---------- | |
581 is_python_ext : bool, optional | |
582 Whether the shared library is a Python extension. Default is False. | |
583 | |
584 Returns | |
585 ------- | |
586 so_ext : str | |
587 The shared library extension. | |
588 | |
589 Notes | |
590 ----- | |
591 For Python shared libs, `so_ext` will typically be '.so' on Linux and OS X, | |
592 and '.pyd' on Windows. For Python >= 3.2 `so_ext` has a tag prepended on | |
593 POSIX systems according to PEP 3149. For Python 3.2 this is implemented on | |
594 Linux, but not on OS X. | |
595 | |
596 """ | |
597 confvars = distutils.sysconfig.get_config_vars() | |
598 # SO is deprecated in 3.3.1, use EXT_SUFFIX instead | |
599 so_ext = confvars.get('EXT_SUFFIX', None) | |
600 if so_ext is None: | |
601 so_ext = confvars.get('SO', '') | |
602 | |
603 if not is_python_ext: | |
604 # hardcode known values, config vars (including SHLIB_SUFFIX) are | |
605 # unreliable (see #3182) | |
606 # darwin, windows and debug linux are wrong in 3.3.1 and older | |
607 if (sys.platform.startswith('linux') or | |
608 sys.platform.startswith('gnukfreebsd')): | |
609 so_ext = '.so' | |
610 elif sys.platform.startswith('darwin'): | |
611 so_ext = '.dylib' | |
612 elif sys.platform.startswith('win'): | |
613 so_ext = '.dll' | |
614 else: | |
615 # fall back to config vars for unknown platforms | |
616 # fix long extension for Python >=3.2, see PEP 3149. | |
617 if 'SOABI' in confvars: | |
618 # Does nothing unless SOABI config var exists | |
619 so_ext = so_ext.replace('.' + confvars.get('SOABI'), '', 1) | |
620 | |
621 return so_ext | |
622 | |
623 def get_data_files(data): | |
624 if is_string(data): | |
625 return [data] | |
626 sources = data[1] | |
627 filenames = [] | |
628 for s in sources: | |
629 if hasattr(s, '__call__'): | |
630 continue | |
631 if is_local_src_dir(s): | |
632 filenames.extend(list(general_source_files(s))) | |
633 elif is_string(s): | |
634 if os.path.isfile(s): | |
635 filenames.append(s) | |
636 else: | |
637 print('Not existing data file:', s) | |
638 else: | |
639 raise TypeError(repr(s)) | |
640 return filenames | |
641 | |
642 def dot_join(*args): | |
643 return '.'.join([a for a in args if a]) | |
644 | |
645 def get_frame(level=0): | |
646 """Return frame object from call stack with given level. | |
647 """ | |
648 try: | |
649 return sys._getframe(level+1) | |
650 except AttributeError: | |
651 frame = sys.exc_info()[2].tb_frame | |
652 for _ in range(level+1): | |
653 frame = frame.f_back | |
654 return frame | |
655 | |
656 | |
657 ###################### | |
658 | |
659 class Configuration(object): | |
660 | |
661 _list_keys = ['packages', 'ext_modules', 'data_files', 'include_dirs', | |
662 'libraries', 'headers', 'scripts', 'py_modules', | |
663 'installed_libraries', 'define_macros'] | |
664 _dict_keys = ['package_dir', 'installed_pkg_config'] | |
665 _extra_keys = ['name', 'version'] | |
666 | |
667 numpy_include_dirs = [] | |
668 | |
669 def __init__(self, | |
670 package_name=None, | |
671 parent_name=None, | |
672 top_path=None, | |
673 package_path=None, | |
674 caller_level=1, | |
675 setup_name='setup.py', | |
676 **attrs): | |
677 """Construct configuration instance of a package. | |
678 | |
679 package_name -- name of the package | |
680 Ex.: 'distutils' | |
681 parent_name -- name of the parent package | |
682 Ex.: 'numpy' | |
683 top_path -- directory of the toplevel package | |
684 Ex.: the directory where the numpy package source sits | |
685 package_path -- directory of package. Will be computed by magic from the | |
686 directory of the caller module if not specified | |
687 Ex.: the directory where numpy.distutils is | |
688 caller_level -- frame level to caller namespace, internal parameter. | |
689 """ | |
690 self.name = dot_join(parent_name, package_name) | |
691 self.version = None | |
692 | |
693 caller_frame = get_frame(caller_level) | |
694 self.local_path = get_path_from_frame(caller_frame, top_path) | |
695 # local_path -- directory of a file (usually setup.py) that | |
696 # defines a configuration() function. | |
697 # local_path -- directory of a file (usually setup.py) that | |
698 # defines a configuration() function. | |
699 if top_path is None: | |
700 top_path = self.local_path | |
701 self.local_path = '' | |
702 if package_path is None: | |
703 package_path = self.local_path | |
704 elif os.path.isdir(njoin(self.local_path, package_path)): | |
705 package_path = njoin(self.local_path, package_path) | |
706 if not os.path.isdir(package_path or '.'): | |
707 raise ValueError("%r is not a directory" % (package_path,)) | |
708 self.top_path = top_path | |
709 self.package_path = package_path | |
710 # this is the relative path in the installed package | |
711 self.path_in_package = os.path.join(*self.name.split('.')) | |
712 | |
713 self.list_keys = self._list_keys[:] | |
714 self.dict_keys = self._dict_keys[:] | |
715 | |
716 for n in self.list_keys: | |
717 v = copy.copy(attrs.get(n, [])) | |
718 setattr(self, n, as_list(v)) | |
719 | |
720 for n in self.dict_keys: | |
721 v = copy.copy(attrs.get(n, {})) | |
722 setattr(self, n, v) | |
723 | |
724 known_keys = self.list_keys + self.dict_keys | |
725 self.extra_keys = self._extra_keys[:] | |
726 for n in attrs.keys(): | |
727 if n in known_keys: | |
728 continue | |
729 a = attrs[n] | |
730 setattr(self, n, a) | |
731 if isinstance(a, list): | |
732 self.list_keys.append(n) | |
733 elif isinstance(a, dict): | |
734 self.dict_keys.append(n) | |
735 else: | |
736 self.extra_keys.append(n) | |
737 | |
738 if os.path.exists(njoin(package_path, '__init__.py')): | |
739 self.packages.append(self.name) | |
740 self.package_dir[self.name] = package_path | |
741 | |
742 self.options = dict( | |
743 ignore_setup_xxx_py = False, | |
744 assume_default_configuration = False, | |
745 delegate_options_to_subpackages = False, | |
746 quiet = False, | |
747 ) | |
748 | |
749 caller_instance = None | |
750 for i in range(1, 3): | |
751 try: | |
752 f = get_frame(i) | |
753 except ValueError: | |
754 break | |
755 try: | |
756 caller_instance = eval('self', f.f_globals, f.f_locals) | |
757 break | |
758 except NameError: | |
759 pass | |
760 if isinstance(caller_instance, self.__class__): | |
761 if caller_instance.options['delegate_options_to_subpackages']: | |
762 self.set_options(**caller_instance.options) | |
763 | |
764 self.setup_name = setup_name | |
765 | |
766 def todict(self): | |
767 """ | |
768 Return a dictionary compatible with the keyword arguments of distutils | |
769 setup function. | |
770 | |
771 Examples | |
772 -------- | |
773 >>> setup(**config.todict()) #doctest: +SKIP | |
774 """ | |
775 | |
776 self._optimize_data_files() | |
777 d = {} | |
778 known_keys = self.list_keys + self.dict_keys + self.extra_keys | |
779 for n in known_keys: | |
780 a = getattr(self, n) | |
781 if a: | |
782 d[n] = a | |
783 return d | |
784 | |
785 def info(self, message): | |
786 if not self.options['quiet']: | |
787 print(message) | |
788 | |
789 def warn(self, message): | |
790 sys.stderr.write('Warning: %s' % (message,)) | |
791 | |
792 def set_options(self, **options): | |
793 """ | |
794 Configure Configuration instance. | |
795 | |
796 The following options are available: | |
797 - ignore_setup_xxx_py | |
798 - assume_default_configuration | |
799 - delegate_options_to_subpackages | |
800 - quiet | |
801 | |
802 """ | |
803 for key, value in options.items(): | |
804 if key in self.options: | |
805 self.options[key] = value | |
806 else: | |
807 raise ValueError('Unknown option: '+key) | |
808 | |
809 def get_distribution(self): | |
810 """Return the distutils distribution object for self.""" | |
811 from numpy.distutils.core import get_distribution | |
812 return get_distribution() | |
813 | |
814 def _wildcard_get_subpackage(self, subpackage_name, | |
815 parent_name, | |
816 caller_level = 1): | |
817 l = subpackage_name.split('.') | |
818 subpackage_path = njoin([self.local_path]+l) | |
819 dirs = [_m for _m in glob.glob(subpackage_path) if os.path.isdir(_m)] | |
820 config_list = [] | |
821 for d in dirs: | |
822 if not os.path.isfile(njoin(d, '__init__.py')): | |
823 continue | |
824 if 'build' in d.split(os.sep): | |
825 continue | |
826 n = '.'.join(d.split(os.sep)[-len(l):]) | |
827 c = self.get_subpackage(n, | |
828 parent_name = parent_name, | |
829 caller_level = caller_level+1) | |
830 config_list.extend(c) | |
831 return config_list | |
832 | |
833 def _get_configuration_from_setup_py(self, setup_py, | |
834 subpackage_name, | |
835 subpackage_path, | |
836 parent_name, | |
837 caller_level = 1): | |
838 # In case setup_py imports local modules: | |
839 sys.path.insert(0, os.path.dirname(setup_py)) | |
840 try: | |
841 fo_setup_py = open(setup_py, 'U') | |
842 setup_name = os.path.splitext(os.path.basename(setup_py))[0] | |
843 n = dot_join(self.name, subpackage_name, setup_name) | |
844 setup_module = imp.load_module('_'.join(n.split('.')), | |
845 fo_setup_py, | |
846 setup_py, | |
847 ('.py', 'U', 1)) | |
848 fo_setup_py.close() | |
849 if not hasattr(setup_module, 'configuration'): | |
850 if not self.options['assume_default_configuration']: | |
851 self.warn('Assuming default configuration '\ | |
852 '(%s does not define configuration())'\ | |
853 % (setup_module)) | |
854 config = Configuration(subpackage_name, parent_name, | |
855 self.top_path, subpackage_path, | |
856 caller_level = caller_level + 1) | |
857 else: | |
858 pn = dot_join(*([parent_name] + subpackage_name.split('.')[:-1])) | |
859 args = (pn,) | |
860 def fix_args_py2(args): | |
861 if setup_module.configuration.__code__.co_argcount > 1: | |
862 args = args + (self.top_path,) | |
863 return args | |
864 def fix_args_py3(args): | |
865 if setup_module.configuration.__code__.co_argcount > 1: | |
866 args = args + (self.top_path,) | |
867 return args | |
868 if sys.version_info[0] < 3: | |
869 args = fix_args_py2(args) | |
870 else: | |
871 args = fix_args_py3(args) | |
872 config = setup_module.configuration(*args) | |
873 if config.name!=dot_join(parent_name, subpackage_name): | |
874 self.warn('Subpackage %r configuration returned as %r' % \ | |
875 (dot_join(parent_name, subpackage_name), config.name)) | |
876 finally: | |
877 del sys.path[0] | |
878 return config | |
879 | |
880 def get_subpackage(self,subpackage_name, | |
881 subpackage_path=None, | |
882 parent_name=None, | |
883 caller_level = 1): | |
884 """Return list of subpackage configurations. | |
885 | |
886 Parameters | |
887 ---------- | |
888 subpackage_name : str or None | |
889 Name of the subpackage to get the configuration. '*' in | |
890 subpackage_name is handled as a wildcard. | |
891 subpackage_path : str | |
892 If None, then the path is assumed to be the local path plus the | |
893 subpackage_name. If a setup.py file is not found in the | |
894 subpackage_path, then a default configuration is used. | |
895 parent_name : str | |
896 Parent name. | |
897 """ | |
898 if subpackage_name is None: | |
899 if subpackage_path is None: | |
900 raise ValueError( | |
901 "either subpackage_name or subpackage_path must be specified") | |
902 subpackage_name = os.path.basename(subpackage_path) | |
903 | |
904 # handle wildcards | |
905 l = subpackage_name.split('.') | |
906 if subpackage_path is None and '*' in subpackage_name: | |
907 return self._wildcard_get_subpackage(subpackage_name, | |
908 parent_name, | |
909 caller_level = caller_level+1) | |
910 assert '*' not in subpackage_name, repr((subpackage_name, subpackage_path, parent_name)) | |
911 if subpackage_path is None: | |
912 subpackage_path = njoin([self.local_path] + l) | |
913 else: | |
914 subpackage_path = njoin([subpackage_path] + l[:-1]) | |
915 subpackage_path = self.paths([subpackage_path])[0] | |
916 setup_py = njoin(subpackage_path, self.setup_name) | |
917 if not self.options['ignore_setup_xxx_py']: | |
918 if not os.path.isfile(setup_py): | |
919 setup_py = njoin(subpackage_path, | |
920 'setup_%s.py' % (subpackage_name)) | |
921 if not os.path.isfile(setup_py): | |
922 if not self.options['assume_default_configuration']: | |
923 self.warn('Assuming default configuration '\ | |
924 '(%s/{setup_%s,setup}.py was not found)' \ | |
925 % (os.path.dirname(setup_py), subpackage_name)) | |
926 config = Configuration(subpackage_name, parent_name, | |
927 self.top_path, subpackage_path, | |
928 caller_level = caller_level+1) | |
929 else: | |
930 config = self._get_configuration_from_setup_py( | |
931 setup_py, | |
932 subpackage_name, | |
933 subpackage_path, | |
934 parent_name, | |
935 caller_level = caller_level + 1) | |
936 if config: | |
937 return [config] | |
938 else: | |
939 return [] | |
940 | |
941 def add_subpackage(self,subpackage_name, | |
942 subpackage_path=None, | |
943 standalone = False): | |
944 """Add a sub-package to the current Configuration instance. | |
945 | |
946 This is useful in a setup.py script for adding sub-packages to a | |
947 package. | |
948 | |
949 Parameters | |
950 ---------- | |
951 subpackage_name : str | |
952 name of the subpackage | |
953 subpackage_path : str | |
954 if given, the subpackage path such as the subpackage is in | |
955 subpackage_path / subpackage_name. If None,the subpackage is | |
956 assumed to be located in the local path / subpackage_name. | |
957 standalone : bool | |
958 """ | |
959 | |
960 if standalone: | |
961 parent_name = None | |
962 else: | |
963 parent_name = self.name | |
964 config_list = self.get_subpackage(subpackage_name, subpackage_path, | |
965 parent_name = parent_name, | |
966 caller_level = 2) | |
967 if not config_list: | |
968 self.warn('No configuration returned, assuming unavailable.') | |
969 for config in config_list: | |
970 d = config | |
971 if isinstance(config, Configuration): | |
972 d = config.todict() | |
973 assert isinstance(d, dict), repr(type(d)) | |
974 | |
975 self.info('Appending %s configuration to %s' \ | |
976 % (d.get('name'), self.name)) | |
977 self.dict_append(**d) | |
978 | |
979 dist = self.get_distribution() | |
980 if dist is not None: | |
981 self.warn('distutils distribution has been initialized,'\ | |
982 ' it may be too late to add a subpackage '+ subpackage_name) | |
983 | |
984 def add_data_dir(self, data_path): | |
985 """Recursively add files under data_path to data_files list. | |
986 | |
987 Recursively add files under data_path to the list of data_files to be | |
988 installed (and distributed). The data_path can be either a relative | |
989 path-name, or an absolute path-name, or a 2-tuple where the first | |
990 argument shows where in the install directory the data directory | |
991 should be installed to. | |
992 | |
993 Parameters | |
994 ---------- | |
995 data_path : seq or str | |
996 Argument can be either | |
997 | |
998 * 2-sequence (<datadir suffix>, <path to data directory>) | |
999 * path to data directory where python datadir suffix defaults | |
1000 to package dir. | |
1001 | |
1002 Notes | |
1003 ----- | |
1004 Rules for installation paths: | |
1005 foo/bar -> (foo/bar, foo/bar) -> parent/foo/bar | |
1006 (gun, foo/bar) -> parent/gun | |
1007 foo/* -> (foo/a, foo/a), (foo/b, foo/b) -> parent/foo/a, parent/foo/b | |
1008 (gun, foo/*) -> (gun, foo/a), (gun, foo/b) -> gun | |
1009 (gun/*, foo/*) -> parent/gun/a, parent/gun/b | |
1010 /foo/bar -> (bar, /foo/bar) -> parent/bar | |
1011 (gun, /foo/bar) -> parent/gun | |
1012 (fun/*/gun/*, sun/foo/bar) -> parent/fun/foo/gun/bar | |
1013 | |
1014 Examples | |
1015 -------- | |
1016 For example suppose the source directory contains fun/foo.dat and | |
1017 fun/bar/car.dat:: | |
1018 | |
1019 >>> self.add_data_dir('fun') #doctest: +SKIP | |
1020 >>> self.add_data_dir(('sun', 'fun')) #doctest: +SKIP | |
1021 >>> self.add_data_dir(('gun', '/full/path/to/fun'))#doctest: +SKIP | |
1022 | |
1023 Will install data-files to the locations:: | |
1024 | |
1025 <package install directory>/ | |
1026 fun/ | |
1027 foo.dat | |
1028 bar/ | |
1029 car.dat | |
1030 sun/ | |
1031 foo.dat | |
1032 bar/ | |
1033 car.dat | |
1034 gun/ | |
1035 foo.dat | |
1036 car.dat | |
1037 """ | |
1038 if is_sequence(data_path): | |
1039 d, data_path = data_path | |
1040 else: | |
1041 d = None | |
1042 if is_sequence(data_path): | |
1043 [self.add_data_dir((d, p)) for p in data_path] | |
1044 return | |
1045 if not is_string(data_path): | |
1046 raise TypeError("not a string: %r" % (data_path,)) | |
1047 if d is None: | |
1048 if os.path.isabs(data_path): | |
1049 return self.add_data_dir((os.path.basename(data_path), data_path)) | |
1050 return self.add_data_dir((data_path, data_path)) | |
1051 paths = self.paths(data_path, include_non_existing=False) | |
1052 if is_glob_pattern(data_path): | |
1053 if is_glob_pattern(d): | |
1054 pattern_list = allpath(d).split(os.sep) | |
1055 pattern_list.reverse() | |
1056 # /a/*//b/ -> /a/*/b | |
1057 rl = list(range(len(pattern_list)-1)); rl.reverse() | |
1058 for i in rl: | |
1059 if not pattern_list[i]: | |
1060 del pattern_list[i] | |
1061 # | |
1062 for path in paths: | |
1063 if not os.path.isdir(path): | |
1064 print('Not a directory, skipping', path) | |
1065 continue | |
1066 rpath = rel_path(path, self.local_path) | |
1067 path_list = rpath.split(os.sep) | |
1068 path_list.reverse() | |
1069 target_list = [] | |
1070 i = 0 | |
1071 for s in pattern_list: | |
1072 if is_glob_pattern(s): | |
1073 if i>=len(path_list): | |
1074 raise ValueError('cannot fill pattern %r with %r' \ | |
1075 % (d, path)) | |
1076 target_list.append(path_list[i]) | |
1077 else: | |
1078 assert s==path_list[i], repr((s, path_list[i], data_path, d, path, rpath)) | |
1079 target_list.append(s) | |
1080 i += 1 | |
1081 if path_list[i:]: | |
1082 self.warn('mismatch of pattern_list=%s and path_list=%s'\ | |
1083 % (pattern_list, path_list)) | |
1084 target_list.reverse() | |
1085 self.add_data_dir((os.sep.join(target_list), path)) | |
1086 else: | |
1087 for path in paths: | |
1088 self.add_data_dir((d, path)) | |
1089 return | |
1090 assert not is_glob_pattern(d), repr(d) | |
1091 | |
1092 dist = self.get_distribution() | |
1093 if dist is not None and dist.data_files is not None: | |
1094 data_files = dist.data_files | |
1095 else: | |
1096 data_files = self.data_files | |
1097 | |
1098 for path in paths: | |
1099 for d1, f in list(general_source_directories_files(path)): | |
1100 target_path = os.path.join(self.path_in_package, d, d1) | |
1101 data_files.append((target_path, f)) | |
1102 | |
1103 def _optimize_data_files(self): | |
1104 data_dict = {} | |
1105 for p, files in self.data_files: | |
1106 if p not in data_dict: | |
1107 data_dict[p] = set() | |
1108 for f in files: | |
1109 data_dict[p].add(f) | |
1110 self.data_files[:] = [(p, list(files)) for p, files in data_dict.items()] | |
1111 | |
1112 def add_data_files(self,*files): | |
1113 """Add data files to configuration data_files. | |
1114 | |
1115 Parameters | |
1116 ---------- | |
1117 files : sequence | |
1118 Argument(s) can be either | |
1119 | |
1120 * 2-sequence (<datadir prefix>,<path to data file(s)>) | |
1121 * paths to data files where python datadir prefix defaults | |
1122 to package dir. | |
1123 | |
1124 Notes | |
1125 ----- | |
1126 The form of each element of the files sequence is very flexible | |
1127 allowing many combinations of where to get the files from the package | |
1128 and where they should ultimately be installed on the system. The most | |
1129 basic usage is for an element of the files argument sequence to be a | |
1130 simple filename. This will cause that file from the local path to be | |
1131 installed to the installation path of the self.name package (package | |
1132 path). The file argument can also be a relative path in which case the | |
1133 entire relative path will be installed into the package directory. | |
1134 Finally, the file can be an absolute path name in which case the file | |
1135 will be found at the absolute path name but installed to the package | |
1136 path. | |
1137 | |
1138 This basic behavior can be augmented by passing a 2-tuple in as the | |
1139 file argument. The first element of the tuple should specify the | |
1140 relative path (under the package install directory) where the | |
1141 remaining sequence of files should be installed to (it has nothing to | |
1142 do with the file-names in the source distribution). The second element | |
1143 of the tuple is the sequence of files that should be installed. The | |
1144 files in this sequence can be filenames, relative paths, or absolute | |
1145 paths. For absolute paths the file will be installed in the top-level | |
1146 package installation directory (regardless of the first argument). | |
1147 Filenames and relative path names will be installed in the package | |
1148 install directory under the path name given as the first element of | |
1149 the tuple. | |
1150 | |
1151 Rules for installation paths: | |
1152 | |
1153 #. file.txt -> (., file.txt)-> parent/file.txt | |
1154 #. foo/file.txt -> (foo, foo/file.txt) -> parent/foo/file.txt | |
1155 #. /foo/bar/file.txt -> (., /foo/bar/file.txt) -> parent/file.txt | |
1156 #. *.txt -> parent/a.txt, parent/b.txt | |
1157 #. foo/*.txt -> parent/foo/a.txt, parent/foo/b.txt | |
1158 #. */*.txt -> (*, */*.txt) -> parent/c/a.txt, parent/d/b.txt | |
1159 #. (sun, file.txt) -> parent/sun/file.txt | |
1160 #. (sun, bar/file.txt) -> parent/sun/file.txt | |
1161 #. (sun, /foo/bar/file.txt) -> parent/sun/file.txt | |
1162 #. (sun, *.txt) -> parent/sun/a.txt, parent/sun/b.txt | |
1163 #. (sun, bar/*.txt) -> parent/sun/a.txt, parent/sun/b.txt | |
1164 #. (sun/*, */*.txt) -> parent/sun/c/a.txt, parent/d/b.txt | |
1165 | |
1166 An additional feature is that the path to a data-file can actually be | |
1167 a function that takes no arguments and returns the actual path(s) to | |
1168 the data-files. This is useful when the data files are generated while | |
1169 building the package. | |
1170 | |
1171 Examples | |
1172 -------- | |
1173 Add files to the list of data_files to be included with the package. | |
1174 | |
1175 >>> self.add_data_files('foo.dat', | |
1176 ... ('fun', ['gun.dat', 'nun/pun.dat', '/tmp/sun.dat']), | |
1177 ... 'bar/cat.dat', | |
1178 ... '/full/path/to/can.dat') #doctest: +SKIP | |
1179 | |
1180 will install these data files to:: | |
1181 | |
1182 <package install directory>/ | |
1183 foo.dat | |
1184 fun/ | |
1185 gun.dat | |
1186 nun/ | |
1187 pun.dat | |
1188 sun.dat | |
1189 bar/ | |
1190 car.dat | |
1191 can.dat | |
1192 | |
1193 where <package install directory> is the package (or sub-package) | |
1194 directory such as '/usr/lib/python2.4/site-packages/mypackage' ('C: | |
1195 \\Python2.4 \\Lib \\site-packages \\mypackage') or | |
1196 '/usr/lib/python2.4/site- packages/mypackage/mysubpackage' ('C: | |
1197 \\Python2.4 \\Lib \\site-packages \\mypackage \\mysubpackage'). | |
1198 """ | |
1199 | |
1200 if len(files)>1: | |
1201 for f in files: | |
1202 self.add_data_files(f) | |
1203 return | |
1204 assert len(files)==1 | |
1205 if is_sequence(files[0]): | |
1206 d, files = files[0] | |
1207 else: | |
1208 d = None | |
1209 if is_string(files): | |
1210 filepat = files | |
1211 elif is_sequence(files): | |
1212 if len(files)==1: | |
1213 filepat = files[0] | |
1214 else: | |
1215 for f in files: | |
1216 self.add_data_files((d, f)) | |
1217 return | |
1218 else: | |
1219 raise TypeError(repr(type(files))) | |
1220 | |
1221 if d is None: | |
1222 if hasattr(filepat, '__call__'): | |
1223 d = '' | |
1224 elif os.path.isabs(filepat): | |
1225 d = '' | |
1226 else: | |
1227 d = os.path.dirname(filepat) | |
1228 self.add_data_files((d, files)) | |
1229 return | |
1230 | |
1231 paths = self.paths(filepat, include_non_existing=False) | |
1232 if is_glob_pattern(filepat): | |
1233 if is_glob_pattern(d): | |
1234 pattern_list = d.split(os.sep) | |
1235 pattern_list.reverse() | |
1236 for path in paths: | |
1237 path_list = path.split(os.sep) | |
1238 path_list.reverse() | |
1239 path_list.pop() # filename | |
1240 target_list = [] | |
1241 i = 0 | |
1242 for s in pattern_list: | |
1243 if is_glob_pattern(s): | |
1244 target_list.append(path_list[i]) | |
1245 i += 1 | |
1246 else: | |
1247 target_list.append(s) | |
1248 target_list.reverse() | |
1249 self.add_data_files((os.sep.join(target_list), path)) | |
1250 else: | |
1251 self.add_data_files((d, paths)) | |
1252 return | |
1253 assert not is_glob_pattern(d), repr((d, filepat)) | |
1254 | |
1255 dist = self.get_distribution() | |
1256 if dist is not None and dist.data_files is not None: | |
1257 data_files = dist.data_files | |
1258 else: | |
1259 data_files = self.data_files | |
1260 | |
1261 data_files.append((os.path.join(self.path_in_package, d), paths)) | |
1262 | |
1263 ### XXX Implement add_py_modules | |
1264 | |
1265 def add_define_macros(self, macros): | |
1266 """Add define macros to configuration | |
1267 | |
1268 Add the given sequence of macro name and value duples to the beginning | |
1269 of the define_macros list This list will be visible to all extension | |
1270 modules of the current package. | |
1271 """ | |
1272 dist = self.get_distribution() | |
1273 if dist is not None: | |
1274 if not hasattr(dist, 'define_macros'): | |
1275 dist.define_macros = [] | |
1276 dist.define_macros.extend(macros) | |
1277 else: | |
1278 self.define_macros.extend(macros) | |
1279 | |
1280 | |
1281 def add_include_dirs(self,*paths): | |
1282 """Add paths to configuration include directories. | |
1283 | |
1284 Add the given sequence of paths to the beginning of the include_dirs | |
1285 list. This list will be visible to all extension modules of the | |
1286 current package. | |
1287 """ | |
1288 include_dirs = self.paths(paths) | |
1289 dist = self.get_distribution() | |
1290 if dist is not None: | |
1291 if dist.include_dirs is None: | |
1292 dist.include_dirs = [] | |
1293 dist.include_dirs.extend(include_dirs) | |
1294 else: | |
1295 self.include_dirs.extend(include_dirs) | |
1296 | |
1297 def add_headers(self,*files): | |
1298 """Add installable headers to configuration. | |
1299 | |
1300 Add the given sequence of files to the beginning of the headers list. | |
1301 By default, headers will be installed under <python- | |
1302 include>/<self.name.replace('.','/')>/ directory. If an item of files | |
1303 is a tuple, then its first argument specifies the actual installation | |
1304 location relative to the <python-include> path. | |
1305 | |
1306 Parameters | |
1307 ---------- | |
1308 files : str or seq | |
1309 Argument(s) can be either: | |
1310 | |
1311 * 2-sequence (<includedir suffix>,<path to header file(s)>) | |
1312 * path(s) to header file(s) where python includedir suffix will | |
1313 default to package name. | |
1314 """ | |
1315 headers = [] | |
1316 for path in files: | |
1317 if is_string(path): | |
1318 [headers.append((self.name, p)) for p in self.paths(path)] | |
1319 else: | |
1320 if not isinstance(path, (tuple, list)) or len(path) != 2: | |
1321 raise TypeError(repr(path)) | |
1322 [headers.append((path[0], p)) for p in self.paths(path[1])] | |
1323 dist = self.get_distribution() | |
1324 if dist is not None: | |
1325 if dist.headers is None: | |
1326 dist.headers = [] | |
1327 dist.headers.extend(headers) | |
1328 else: | |
1329 self.headers.extend(headers) | |
1330 | |
1331 def paths(self,*paths,**kws): | |
1332 """Apply glob to paths and prepend local_path if needed. | |
1333 | |
1334 Applies glob.glob(...) to each path in the sequence (if needed) and | |
1335 pre-pends the local_path if needed. Because this is called on all | |
1336 source lists, this allows wildcard characters to be specified in lists | |
1337 of sources for extension modules and libraries and scripts and allows | |
1338 path-names be relative to the source directory. | |
1339 | |
1340 """ | |
1341 include_non_existing = kws.get('include_non_existing', True) | |
1342 return gpaths(paths, | |
1343 local_path = self.local_path, | |
1344 include_non_existing=include_non_existing) | |
1345 | |
1346 def _fix_paths_dict(self, kw): | |
1347 for k in kw.keys(): | |
1348 v = kw[k] | |
1349 if k in ['sources', 'depends', 'include_dirs', 'library_dirs', | |
1350 'module_dirs', 'extra_objects']: | |
1351 new_v = self.paths(v) | |
1352 kw[k] = new_v | |
1353 | |
1354 def add_extension(self,name,sources,**kw): | |
1355 """Add extension to configuration. | |
1356 | |
1357 Create and add an Extension instance to the ext_modules list. This | |
1358 method also takes the following optional keyword arguments that are | |
1359 passed on to the Extension constructor. | |
1360 | |
1361 Parameters | |
1362 ---------- | |
1363 name : str | |
1364 name of the extension | |
1365 sources : seq | |
1366 list of the sources. The list of sources may contain functions | |
1367 (called source generators) which must take an extension instance | |
1368 and a build directory as inputs and return a source file or list of | |
1369 source files or None. If None is returned then no sources are | |
1370 generated. If the Extension instance has no sources after | |
1371 processing all source generators, then no extension module is | |
1372 built. | |
1373 include_dirs : | |
1374 define_macros : | |
1375 undef_macros : | |
1376 library_dirs : | |
1377 libraries : | |
1378 runtime_library_dirs : | |
1379 extra_objects : | |
1380 extra_compile_args : | |
1381 extra_link_args : | |
1382 extra_f77_compile_args : | |
1383 extra_f90_compile_args : | |
1384 export_symbols : | |
1385 swig_opts : | |
1386 depends : | |
1387 The depends list contains paths to files or directories that the | |
1388 sources of the extension module depend on. If any path in the | |
1389 depends list is newer than the extension module, then the module | |
1390 will be rebuilt. | |
1391 language : | |
1392 f2py_options : | |
1393 module_dirs : | |
1394 extra_info : dict or list | |
1395 dict or list of dict of keywords to be appended to keywords. | |
1396 | |
1397 Notes | |
1398 ----- | |
1399 The self.paths(...) method is applied to all lists that may contain | |
1400 paths. | |
1401 """ | |
1402 ext_args = copy.copy(kw) | |
1403 ext_args['name'] = dot_join(self.name, name) | |
1404 ext_args['sources'] = sources | |
1405 | |
1406 if 'extra_info' in ext_args: | |
1407 extra_info = ext_args['extra_info'] | |
1408 del ext_args['extra_info'] | |
1409 if isinstance(extra_info, dict): | |
1410 extra_info = [extra_info] | |
1411 for info in extra_info: | |
1412 assert isinstance(info, dict), repr(info) | |
1413 dict_append(ext_args,**info) | |
1414 | |
1415 self._fix_paths_dict(ext_args) | |
1416 | |
1417 # Resolve out-of-tree dependencies | |
1418 libraries = ext_args.get('libraries', []) | |
1419 libnames = [] | |
1420 ext_args['libraries'] = [] | |
1421 for libname in libraries: | |
1422 if isinstance(libname, tuple): | |
1423 self._fix_paths_dict(libname[1]) | |
1424 | |
1425 # Handle library names of the form libname@relative/path/to/library | |
1426 if '@' in libname: | |
1427 lname, lpath = libname.split('@', 1) | |
1428 lpath = os.path.abspath(njoin(self.local_path, lpath)) | |
1429 if os.path.isdir(lpath): | |
1430 c = self.get_subpackage(None, lpath, | |
1431 caller_level = 2) | |
1432 if isinstance(c, Configuration): | |
1433 c = c.todict() | |
1434 for l in [l[0] for l in c.get('libraries', [])]: | |
1435 llname = l.split('__OF__', 1)[0] | |
1436 if llname == lname: | |
1437 c.pop('name', None) | |
1438 dict_append(ext_args,**c) | |
1439 break | |
1440 continue | |
1441 libnames.append(libname) | |
1442 | |
1443 ext_args['libraries'] = libnames + ext_args['libraries'] | |
1444 ext_args['define_macros'] = \ | |
1445 self.define_macros + ext_args.get('define_macros', []) | |
1446 | |
1447 from numpy.distutils.core import Extension | |
1448 ext = Extension(**ext_args) | |
1449 self.ext_modules.append(ext) | |
1450 | |
1451 dist = self.get_distribution() | |
1452 if dist is not None: | |
1453 self.warn('distutils distribution has been initialized,'\ | |
1454 ' it may be too late to add an extension '+name) | |
1455 return ext | |
1456 | |
1457 def add_library(self,name,sources,**build_info): | |
1458 """ | |
1459 Add library to configuration. | |
1460 | |
1461 Parameters | |
1462 ---------- | |
1463 name : str | |
1464 Name of the extension. | |
1465 sources : sequence | |
1466 List of the sources. The list of sources may contain functions | |
1467 (called source generators) which must take an extension instance | |
1468 and a build directory as inputs and return a source file or list of | |
1469 source files or None. If None is returned then no sources are | |
1470 generated. If the Extension instance has no sources after | |
1471 processing all source generators, then no extension module is | |
1472 built. | |
1473 build_info : dict, optional | |
1474 The following keys are allowed: | |
1475 | |
1476 * depends | |
1477 * macros | |
1478 * include_dirs | |
1479 * extra_compiler_args | |
1480 * extra_f77_compiler_args | |
1481 * extra_f90_compiler_args | |
1482 * f2py_options | |
1483 * language | |
1484 | |
1485 """ | |
1486 self._add_library(name, sources, None, build_info) | |
1487 | |
1488 dist = self.get_distribution() | |
1489 if dist is not None: | |
1490 self.warn('distutils distribution has been initialized,'\ | |
1491 ' it may be too late to add a library '+ name) | |
1492 | |
1493 def _add_library(self, name, sources, install_dir, build_info): | |
1494 """Common implementation for add_library and add_installed_library. Do | |
1495 not use directly""" | |
1496 build_info = copy.copy(build_info) | |
1497 name = name #+ '__OF__' + self.name | |
1498 build_info['sources'] = sources | |
1499 | |
1500 # Sometimes, depends is not set up to an empty list by default, and if | |
1501 # depends is not given to add_library, distutils barfs (#1134) | |
1502 if not 'depends' in build_info: | |
1503 build_info['depends'] = [] | |
1504 | |
1505 self._fix_paths_dict(build_info) | |
1506 | |
1507 # Add to libraries list so that it is build with build_clib | |
1508 self.libraries.append((name, build_info)) | |
1509 | |
1510 def add_installed_library(self, name, sources, install_dir, build_info=None): | |
1511 """ | |
1512 Similar to add_library, but the specified library is installed. | |
1513 | |
1514 Most C libraries used with `distutils` are only used to build python | |
1515 extensions, but libraries built through this method will be installed | |
1516 so that they can be reused by third-party packages. | |
1517 | |
1518 Parameters | |
1519 ---------- | |
1520 name : str | |
1521 Name of the installed library. | |
1522 sources : sequence | |
1523 List of the library's source files. See `add_library` for details. | |
1524 install_dir : str | |
1525 Path to install the library, relative to the current sub-package. | |
1526 build_info : dict, optional | |
1527 The following keys are allowed: | |
1528 | |
1529 * depends | |
1530 * macros | |
1531 * include_dirs | |
1532 * extra_compiler_args | |
1533 * extra_f77_compiler_args | |
1534 * extra_f90_compiler_args | |
1535 * f2py_options | |
1536 * language | |
1537 | |
1538 Returns | |
1539 ------- | |
1540 None | |
1541 | |
1542 See Also | |
1543 -------- | |
1544 add_library, add_npy_pkg_config, get_info | |
1545 | |
1546 Notes | |
1547 ----- | |
1548 The best way to encode the options required to link against the specified | |
1549 C libraries is to use a "libname.ini" file, and use `get_info` to | |
1550 retrieve the required options (see `add_npy_pkg_config` for more | |
1551 information). | |
1552 | |
1553 """ | |
1554 if not build_info: | |
1555 build_info = {} | |
1556 | |
1557 install_dir = os.path.join(self.package_path, install_dir) | |
1558 self._add_library(name, sources, install_dir, build_info) | |
1559 self.installed_libraries.append(InstallableLib(name, build_info, install_dir)) | |
1560 | |
1561 def add_npy_pkg_config(self, template, install_dir, subst_dict=None): | |
1562 """ | |
1563 Generate and install a npy-pkg config file from a template. | |
1564 | |
1565 The config file generated from `template` is installed in the | |
1566 given install directory, using `subst_dict` for variable substitution. | |
1567 | |
1568 Parameters | |
1569 ---------- | |
1570 template : str | |
1571 The path of the template, relatively to the current package path. | |
1572 install_dir : str | |
1573 Where to install the npy-pkg config file, relatively to the current | |
1574 package path. | |
1575 subst_dict : dict, optional | |
1576 If given, any string of the form ``@key@`` will be replaced by | |
1577 ``subst_dict[key]`` in the template file when installed. The install | |
1578 prefix is always available through the variable ``@prefix@``, since the | |
1579 install prefix is not easy to get reliably from setup.py. | |
1580 | |
1581 See also | |
1582 -------- | |
1583 add_installed_library, get_info | |
1584 | |
1585 Notes | |
1586 ----- | |
1587 This works for both standard installs and in-place builds, i.e. the | |
1588 ``@prefix@`` refer to the source directory for in-place builds. | |
1589 | |
1590 Examples | |
1591 -------- | |
1592 :: | |
1593 | |
1594 config.add_npy_pkg_config('foo.ini.in', 'lib', {'foo': bar}) | |
1595 | |
1596 Assuming the foo.ini.in file has the following content:: | |
1597 | |
1598 [meta] | |
1599 Name=@foo@ | |
1600 Version=1.0 | |
1601 Description=dummy description | |
1602 | |
1603 [default] | |
1604 Cflags=-I@prefix@/include | |
1605 Libs= | |
1606 | |
1607 The generated file will have the following content:: | |
1608 | |
1609 [meta] | |
1610 Name=bar | |
1611 Version=1.0 | |
1612 Description=dummy description | |
1613 | |
1614 [default] | |
1615 Cflags=-Iprefix_dir/include | |
1616 Libs= | |
1617 | |
1618 and will be installed as foo.ini in the 'lib' subpath. | |
1619 | |
1620 """ | |
1621 if subst_dict is None: | |
1622 subst_dict = {} | |
1623 basename = os.path.splitext(template)[0] | |
1624 template = os.path.join(self.package_path, template) | |
1625 | |
1626 if self.name in self.installed_pkg_config: | |
1627 self.installed_pkg_config[self.name].append((template, install_dir, | |
1628 subst_dict)) | |
1629 else: | |
1630 self.installed_pkg_config[self.name] = [(template, install_dir, | |
1631 subst_dict)] | |
1632 | |
1633 | |
1634 def add_scripts(self,*files): | |
1635 """Add scripts to configuration. | |
1636 | |
1637 Add the sequence of files to the beginning of the scripts list. | |
1638 Scripts will be installed under the <prefix>/bin/ directory. | |
1639 | |
1640 """ | |
1641 scripts = self.paths(files) | |
1642 dist = self.get_distribution() | |
1643 if dist is not None: | |
1644 if dist.scripts is None: | |
1645 dist.scripts = [] | |
1646 dist.scripts.extend(scripts) | |
1647 else: | |
1648 self.scripts.extend(scripts) | |
1649 | |
1650 def dict_append(self,**dict): | |
1651 for key in self.list_keys: | |
1652 a = getattr(self, key) | |
1653 a.extend(dict.get(key, [])) | |
1654 for key in self.dict_keys: | |
1655 a = getattr(self, key) | |
1656 a.update(dict.get(key, {})) | |
1657 known_keys = self.list_keys + self.dict_keys + self.extra_keys | |
1658 for key in dict.keys(): | |
1659 if key not in known_keys: | |
1660 a = getattr(self, key, None) | |
1661 if a and a==dict[key]: continue | |
1662 self.warn('Inheriting attribute %r=%r from %r' \ | |
1663 % (key, dict[key], dict.get('name', '?'))) | |
1664 setattr(self, key, dict[key]) | |
1665 self.extra_keys.append(key) | |
1666 elif key in self.extra_keys: | |
1667 self.info('Ignoring attempt to set %r (from %r to %r)' \ | |
1668 % (key, getattr(self, key), dict[key])) | |
1669 elif key in known_keys: | |
1670 # key is already processed above | |
1671 pass | |
1672 else: | |
1673 raise ValueError("Don't know about key=%r" % (key)) | |
1674 | |
1675 def __str__(self): | |
1676 from pprint import pformat | |
1677 known_keys = self.list_keys + self.dict_keys + self.extra_keys | |
1678 s = '<'+5*'-' + '\n' | |
1679 s += 'Configuration of '+self.name+':\n' | |
1680 known_keys.sort() | |
1681 for k in known_keys: | |
1682 a = getattr(self, k, None) | |
1683 if a: | |
1684 s += '%s = %s\n' % (k, pformat(a)) | |
1685 s += 5*'-' + '>' | |
1686 return s | |
1687 | |
1688 def get_config_cmd(self): | |
1689 """ | |
1690 Returns the numpy.distutils config command instance. | |
1691 """ | |
1692 cmd = get_cmd('config') | |
1693 cmd.ensure_finalized() | |
1694 cmd.dump_source = 0 | |
1695 cmd.noisy = 0 | |
1696 old_path = os.environ.get('PATH') | |
1697 if old_path: | |
1698 path = os.pathsep.join(['.', old_path]) | |
1699 os.environ['PATH'] = path | |
1700 return cmd | |
1701 | |
1702 def get_build_temp_dir(self): | |
1703 """ | |
1704 Return a path to a temporary directory where temporary files should be | |
1705 placed. | |
1706 """ | |
1707 cmd = get_cmd('build') | |
1708 cmd.ensure_finalized() | |
1709 return cmd.build_temp | |
1710 | |
1711 def have_f77c(self): | |
1712 """Check for availability of Fortran 77 compiler. | |
1713 | |
1714 Use it inside source generating function to ensure that | |
1715 setup distribution instance has been initialized. | |
1716 | |
1717 Notes | |
1718 ----- | |
1719 True if a Fortran 77 compiler is available (because a simple Fortran 77 | |
1720 code was able to be compiled successfully). | |
1721 """ | |
1722 simple_fortran_subroutine = ''' | |
1723 subroutine simple | |
1724 end | |
1725 ''' | |
1726 config_cmd = self.get_config_cmd() | |
1727 flag = config_cmd.try_compile(simple_fortran_subroutine, lang='f77') | |
1728 return flag | |
1729 | |
1730 def have_f90c(self): | |
1731 """Check for availability of Fortran 90 compiler. | |
1732 | |
1733 Use it inside source generating function to ensure that | |
1734 setup distribution instance has been initialized. | |
1735 | |
1736 Notes | |
1737 ----- | |
1738 True if a Fortran 90 compiler is available (because a simple Fortran | |
1739 90 code was able to be compiled successfully) | |
1740 """ | |
1741 simple_fortran_subroutine = ''' | |
1742 subroutine simple | |
1743 end | |
1744 ''' | |
1745 config_cmd = self.get_config_cmd() | |
1746 flag = config_cmd.try_compile(simple_fortran_subroutine, lang='f90') | |
1747 return flag | |
1748 | |
1749 def append_to(self, extlib): | |
1750 """Append libraries, include_dirs to extension or library item. | |
1751 """ | |
1752 if is_sequence(extlib): | |
1753 lib_name, build_info = extlib | |
1754 dict_append(build_info, | |
1755 libraries=self.libraries, | |
1756 include_dirs=self.include_dirs) | |
1757 else: | |
1758 from numpy.distutils.core import Extension | |
1759 assert isinstance(extlib, Extension), repr(extlib) | |
1760 extlib.libraries.extend(self.libraries) | |
1761 extlib.include_dirs.extend(self.include_dirs) | |
1762 | |
1763 def _get_svn_revision(self, path): | |
1764 """Return path's SVN revision number. | |
1765 """ | |
1766 revision = None | |
1767 m = None | |
1768 cwd = os.getcwd() | |
1769 try: | |
1770 os.chdir(path or '.') | |
1771 p = subprocess.Popen(['svnversion'], shell=True, | |
1772 stdout=subprocess.PIPE, stderr=None, | |
1773 close_fds=True) | |
1774 sout = p.stdout | |
1775 m = re.match(r'(?P<revision>\d+)', sout.read()) | |
1776 except: | |
1777 pass | |
1778 os.chdir(cwd) | |
1779 if m: | |
1780 revision = int(m.group('revision')) | |
1781 return revision | |
1782 if sys.platform=='win32' and os.environ.get('SVN_ASP_DOT_NET_HACK', None): | |
1783 entries = njoin(path, '_svn', 'entries') | |
1784 else: | |
1785 entries = njoin(path, '.svn', 'entries') | |
1786 if os.path.isfile(entries): | |
1787 f = open(entries) | |
1788 fstr = f.read() | |
1789 f.close() | |
1790 if fstr[:5] == '<?xml': # pre 1.4 | |
1791 m = re.search(r'revision="(?P<revision>\d+)"', fstr) | |
1792 if m: | |
1793 revision = int(m.group('revision')) | |
1794 else: # non-xml entries file --- check to be sure that | |
1795 m = re.search(r'dir[\n\r]+(?P<revision>\d+)', fstr) | |
1796 if m: | |
1797 revision = int(m.group('revision')) | |
1798 return revision | |
1799 | |
1800 def _get_hg_revision(self, path): | |
1801 """Return path's Mercurial revision number. | |
1802 """ | |
1803 revision = None | |
1804 m = None | |
1805 cwd = os.getcwd() | |
1806 try: | |
1807 os.chdir(path or '.') | |
1808 p = subprocess.Popen(['hg identify --num'], shell=True, | |
1809 stdout=subprocess.PIPE, stderr=None, | |
1810 close_fds=True) | |
1811 sout = p.stdout | |
1812 m = re.match(r'(?P<revision>\d+)', sout.read()) | |
1813 except: | |
1814 pass | |
1815 os.chdir(cwd) | |
1816 if m: | |
1817 revision = int(m.group('revision')) | |
1818 return revision | |
1819 branch_fn = njoin(path, '.hg', 'branch') | |
1820 branch_cache_fn = njoin(path, '.hg', 'branch.cache') | |
1821 | |
1822 if os.path.isfile(branch_fn): | |
1823 branch0 = None | |
1824 f = open(branch_fn) | |
1825 revision0 = f.read().strip() | |
1826 f.close() | |
1827 | |
1828 branch_map = {} | |
1829 for line in file(branch_cache_fn, 'r'): | |
1830 branch1, revision1 = line.split()[:2] | |
1831 if revision1==revision0: | |
1832 branch0 = branch1 | |
1833 try: | |
1834 revision1 = int(revision1) | |
1835 except ValueError: | |
1836 continue | |
1837 branch_map[branch1] = revision1 | |
1838 | |
1839 revision = branch_map.get(branch0) | |
1840 return revision | |
1841 | |
1842 | |
1843 def get_version(self, version_file=None, version_variable=None): | |
1844 """Try to get version string of a package. | |
1845 | |
1846 Return a version string of the current package or None if the version | |
1847 information could not be detected. | |
1848 | |
1849 Notes | |
1850 ----- | |
1851 This method scans files named | |
1852 __version__.py, <packagename>_version.py, version.py, and | |
1853 __svn_version__.py for string variables version, __version\__, and | |
1854 <packagename>_version, until a version number is found. | |
1855 """ | |
1856 version = getattr(self, 'version', None) | |
1857 if version is not None: | |
1858 return version | |
1859 | |
1860 # Get version from version file. | |
1861 if version_file is None: | |
1862 files = ['__version__.py', | |
1863 self.name.split('.')[-1]+'_version.py', | |
1864 'version.py', | |
1865 '__svn_version__.py', | |
1866 '__hg_version__.py'] | |
1867 else: | |
1868 files = [version_file] | |
1869 if version_variable is None: | |
1870 version_vars = ['version', | |
1871 '__version__', | |
1872 self.name.split('.')[-1]+'_version'] | |
1873 else: | |
1874 version_vars = [version_variable] | |
1875 for f in files: | |
1876 fn = njoin(self.local_path, f) | |
1877 if os.path.isfile(fn): | |
1878 info = (open(fn), fn, ('.py', 'U', 1)) | |
1879 name = os.path.splitext(os.path.basename(fn))[0] | |
1880 n = dot_join(self.name, name) | |
1881 try: | |
1882 version_module = imp.load_module('_'.join(n.split('.')),*info) | |
1883 except ImportError: | |
1884 msg = get_exception() | |
1885 self.warn(str(msg)) | |
1886 version_module = None | |
1887 if version_module is None: | |
1888 continue | |
1889 | |
1890 for a in version_vars: | |
1891 version = getattr(version_module, a, None) | |
1892 if version is not None: | |
1893 break | |
1894 if version is not None: | |
1895 break | |
1896 | |
1897 if version is not None: | |
1898 self.version = version | |
1899 return version | |
1900 | |
1901 # Get version as SVN or Mercurial revision number | |
1902 revision = self._get_svn_revision(self.local_path) | |
1903 if revision is None: | |
1904 revision = self._get_hg_revision(self.local_path) | |
1905 | |
1906 if revision is not None: | |
1907 version = str(revision) | |
1908 self.version = version | |
1909 | |
1910 return version | |
1911 | |
1912 def make_svn_version_py(self, delete=True): | |
1913 """Appends a data function to the data_files list that will generate | |
1914 __svn_version__.py file to the current package directory. | |
1915 | |
1916 Generate package __svn_version__.py file from SVN revision number, | |
1917 it will be removed after python exits but will be available | |
1918 when sdist, etc commands are executed. | |
1919 | |
1920 Notes | |
1921 ----- | |
1922 If __svn_version__.py existed before, nothing is done. | |
1923 | |
1924 This is | |
1925 intended for working with source directories that are in an SVN | |
1926 repository. | |
1927 """ | |
1928 target = njoin(self.local_path, '__svn_version__.py') | |
1929 revision = self._get_svn_revision(self.local_path) | |
1930 if os.path.isfile(target) or revision is None: | |
1931 return | |
1932 else: | |
1933 def generate_svn_version_py(): | |
1934 if not os.path.isfile(target): | |
1935 version = str(revision) | |
1936 self.info('Creating %s (version=%r)' % (target, version)) | |
1937 f = open(target, 'w') | |
1938 f.write('version = %r\n' % (version)) | |
1939 f.close() | |
1940 | |
1941 import atexit | |
1942 def rm_file(f=target,p=self.info): | |
1943 if delete: | |
1944 try: os.remove(f); p('removed '+f) | |
1945 except OSError: pass | |
1946 try: os.remove(f+'c'); p('removed '+f+'c') | |
1947 except OSError: pass | |
1948 | |
1949 atexit.register(rm_file) | |
1950 | |
1951 return target | |
1952 | |
1953 self.add_data_files(('', generate_svn_version_py())) | |
1954 | |
1955 def make_hg_version_py(self, delete=True): | |
1956 """Appends a data function to the data_files list that will generate | |
1957 __hg_version__.py file to the current package directory. | |
1958 | |
1959 Generate package __hg_version__.py file from Mercurial revision, | |
1960 it will be removed after python exits but will be available | |
1961 when sdist, etc commands are executed. | |
1962 | |
1963 Notes | |
1964 ----- | |
1965 If __hg_version__.py existed before, nothing is done. | |
1966 | |
1967 This is intended for working with source directories that are | |
1968 in an Mercurial repository. | |
1969 """ | |
1970 target = njoin(self.local_path, '__hg_version__.py') | |
1971 revision = self._get_hg_revision(self.local_path) | |
1972 if os.path.isfile(target) or revision is None: | |
1973 return | |
1974 else: | |
1975 def generate_hg_version_py(): | |
1976 if not os.path.isfile(target): | |
1977 version = str(revision) | |
1978 self.info('Creating %s (version=%r)' % (target, version)) | |
1979 f = open(target, 'w') | |
1980 f.write('version = %r\n' % (version)) | |
1981 f.close() | |
1982 | |
1983 import atexit | |
1984 def rm_file(f=target,p=self.info): | |
1985 if delete: | |
1986 try: os.remove(f); p('removed '+f) | |
1987 except OSError: pass | |
1988 try: os.remove(f+'c'); p('removed '+f+'c') | |
1989 except OSError: pass | |
1990 | |
1991 atexit.register(rm_file) | |
1992 | |
1993 return target | |
1994 | |
1995 self.add_data_files(('', generate_hg_version_py())) | |
1996 | |
1997 def make_config_py(self,name='__config__'): | |
1998 """Generate package __config__.py file containing system_info | |
1999 information used during building the package. | |
2000 | |
2001 This file is installed to the | |
2002 package installation directory. | |
2003 | |
2004 """ | |
2005 self.py_modules.append((self.name, name, generate_config_py)) | |
2006 | |
2007 | |
2008 def get_info(self,*names): | |
2009 """Get resources information. | |
2010 | |
2011 Return information (from system_info.get_info) for all of the names in | |
2012 the argument list in a single dictionary. | |
2013 """ | |
2014 from .system_info import get_info, dict_append | |
2015 info_dict = {} | |
2016 for a in names: | |
2017 dict_append(info_dict,**get_info(a)) | |
2018 return info_dict | |
2019 | |
2020 | |
2021 def get_cmd(cmdname, _cache={}): | |
2022 if cmdname not in _cache: | |
2023 import distutils.core | |
2024 dist = distutils.core._setup_distribution | |
2025 if dist is None: | |
2026 from distutils.errors import DistutilsInternalError | |
2027 raise DistutilsInternalError( | |
2028 'setup distribution instance not initialized') | |
2029 cmd = dist.get_command_obj(cmdname) | |
2030 _cache[cmdname] = cmd | |
2031 return _cache[cmdname] | |
2032 | |
2033 def get_numpy_include_dirs(): | |
2034 # numpy_include_dirs are set by numpy/core/setup.py, otherwise [] | |
2035 include_dirs = Configuration.numpy_include_dirs[:] | |
2036 if not include_dirs: | |
2037 import numpy | |
2038 include_dirs = [ numpy.get_include() ] | |
2039 # else running numpy/core/setup.py | |
2040 return include_dirs | |
2041 | |
2042 def get_npy_pkg_dir(): | |
2043 """Return the path where to find the npy-pkg-config directory.""" | |
2044 # XXX: import here for bootstrapping reasons | |
2045 import numpy | |
2046 d = os.path.join(os.path.dirname(numpy.__file__), | |
2047 'core', 'lib', 'npy-pkg-config') | |
2048 return d | |
2049 | |
2050 def get_pkg_info(pkgname, dirs=None): | |
2051 """ | |
2052 Return library info for the given package. | |
2053 | |
2054 Parameters | |
2055 ---------- | |
2056 pkgname : str | |
2057 Name of the package (should match the name of the .ini file, without | |
2058 the extension, e.g. foo for the file foo.ini). | |
2059 dirs : sequence, optional | |
2060 If given, should be a sequence of additional directories where to look | |
2061 for npy-pkg-config files. Those directories are searched prior to the | |
2062 NumPy directory. | |
2063 | |
2064 Returns | |
2065 ------- | |
2066 pkginfo : class instance | |
2067 The `LibraryInfo` instance containing the build information. | |
2068 | |
2069 Raises | |
2070 ------ | |
2071 PkgNotFound | |
2072 If the package is not found. | |
2073 | |
2074 See Also | |
2075 -------- | |
2076 Configuration.add_npy_pkg_config, Configuration.add_installed_library, | |
2077 get_info | |
2078 | |
2079 """ | |
2080 from numpy.distutils.npy_pkg_config import read_config | |
2081 | |
2082 if dirs: | |
2083 dirs.append(get_npy_pkg_dir()) | |
2084 else: | |
2085 dirs = [get_npy_pkg_dir()] | |
2086 return read_config(pkgname, dirs) | |
2087 | |
2088 def get_info(pkgname, dirs=None): | |
2089 """ | |
2090 Return an info dict for a given C library. | |
2091 | |
2092 The info dict contains the necessary options to use the C library. | |
2093 | |
2094 Parameters | |
2095 ---------- | |
2096 pkgname : str | |
2097 Name of the package (should match the name of the .ini file, without | |
2098 the extension, e.g. foo for the file foo.ini). | |
2099 dirs : sequence, optional | |
2100 If given, should be a sequence of additional directories where to look | |
2101 for npy-pkg-config files. Those directories are searched prior to the | |
2102 NumPy directory. | |
2103 | |
2104 Returns | |
2105 ------- | |
2106 info : dict | |
2107 The dictionary with build information. | |
2108 | |
2109 Raises | |
2110 ------ | |
2111 PkgNotFound | |
2112 If the package is not found. | |
2113 | |
2114 See Also | |
2115 -------- | |
2116 Configuration.add_npy_pkg_config, Configuration.add_installed_library, | |
2117 get_pkg_info | |
2118 | |
2119 Examples | |
2120 -------- | |
2121 To get the necessary information for the npymath library from NumPy: | |
2122 | |
2123 >>> npymath_info = np.distutils.misc_util.get_info('npymath') | |
2124 >>> npymath_info #doctest: +SKIP | |
2125 {'define_macros': [], 'libraries': ['npymath'], 'library_dirs': | |
2126 ['.../numpy/core/lib'], 'include_dirs': ['.../numpy/core/include']} | |
2127 | |
2128 This info dict can then be used as input to a `Configuration` instance:: | |
2129 | |
2130 config.add_extension('foo', sources=['foo.c'], extra_info=npymath_info) | |
2131 | |
2132 """ | |
2133 from numpy.distutils.npy_pkg_config import parse_flags | |
2134 pkg_info = get_pkg_info(pkgname, dirs) | |
2135 | |
2136 # Translate LibraryInfo instance into a build_info dict | |
2137 info = parse_flags(pkg_info.cflags()) | |
2138 for k, v in parse_flags(pkg_info.libs()).items(): | |
2139 info[k].extend(v) | |
2140 | |
2141 # add_extension extra_info argument is ANAL | |
2142 info['define_macros'] = info['macros'] | |
2143 del info['macros'] | |
2144 del info['ignored'] | |
2145 | |
2146 return info | |
2147 | |
2148 def is_bootstrapping(): | |
2149 if sys.version_info[0] >= 3: | |
2150 import builtins | |
2151 else: | |
2152 import __builtin__ as builtins | |
2153 | |
2154 try: | |
2155 builtins.__NUMPY_SETUP__ | |
2156 return True | |
2157 except AttributeError: | |
2158 return False | |
2159 __NUMPY_SETUP__ = False | |
2160 | |
2161 | |
2162 ######################### | |
2163 | |
2164 def default_config_dict(name = None, parent_name = None, local_path=None): | |
2165 """Return a configuration dictionary for usage in | |
2166 configuration() function defined in file setup_<name>.py. | |
2167 """ | |
2168 import warnings | |
2169 warnings.warn('Use Configuration(%r,%r,top_path=%r) instead of '\ | |
2170 'deprecated default_config_dict(%r,%r,%r)' | |
2171 % (name, parent_name, local_path, | |
2172 name, parent_name, local_path, | |
2173 )) | |
2174 c = Configuration(name, parent_name, local_path) | |
2175 return c.todict() | |
2176 | |
2177 | |
2178 def dict_append(d, **kws): | |
2179 for k, v in kws.items(): | |
2180 if k in d: | |
2181 ov = d[k] | |
2182 if isinstance(ov, str): | |
2183 d[k] = v | |
2184 else: | |
2185 d[k].extend(v) | |
2186 else: | |
2187 d[k] = v | |
2188 | |
2189 def appendpath(prefix, path): | |
2190 if os.path.sep != '/': | |
2191 prefix = prefix.replace('/', os.path.sep) | |
2192 path = path.replace('/', os.path.sep) | |
2193 drive = '' | |
2194 if os.path.isabs(path): | |
2195 drive = os.path.splitdrive(prefix)[0] | |
2196 absprefix = os.path.splitdrive(os.path.abspath(prefix))[1] | |
2197 pathdrive, path = os.path.splitdrive(path) | |
2198 d = os.path.commonprefix([absprefix, path]) | |
2199 if os.path.join(absprefix[:len(d)], absprefix[len(d):]) != absprefix \ | |
2200 or os.path.join(path[:len(d)], path[len(d):]) != path: | |
2201 # Handle invalid paths | |
2202 d = os.path.dirname(d) | |
2203 subpath = path[len(d):] | |
2204 if os.path.isabs(subpath): | |
2205 subpath = subpath[1:] | |
2206 else: | |
2207 subpath = path | |
2208 return os.path.normpath(njoin(drive + prefix, subpath)) | |
2209 | |
2210 def generate_config_py(target): | |
2211 """Generate config.py file containing system_info information | |
2212 used during building the package. | |
2213 | |
2214 Usage: | |
2215 config['py_modules'].append((packagename, '__config__',generate_config_py)) | |
2216 """ | |
2217 from numpy.distutils.system_info import system_info | |
2218 from distutils.dir_util import mkpath | |
2219 mkpath(os.path.dirname(target)) | |
2220 f = open(target, 'w') | |
2221 f.write('# This file is generated by %s\n' % (os.path.abspath(sys.argv[0]))) | |
2222 f.write('# It contains system_info results at the time of building this package.\n') | |
2223 f.write('__all__ = ["get_info","show"]\n\n') | |
2224 for k, i in system_info.saved_results.items(): | |
2225 f.write('%s=%r\n' % (k, i)) | |
2226 f.write(r''' | |
2227 def get_info(name): | |
2228 g = globals() | |
2229 return g.get(name, g.get(name + "_info", {})) | |
2230 | |
2231 def show(): | |
2232 for name,info_dict in globals().items(): | |
2233 if name[0] == "_" or type(info_dict) is not type({}): continue | |
2234 print(name + ":") | |
2235 if not info_dict: | |
2236 print(" NOT AVAILABLE") | |
2237 for k,v in info_dict.items(): | |
2238 v = str(v) | |
2239 if k == "sources" and len(v) > 200: | |
2240 v = v[:60] + " ...\n... " + v[-60:] | |
2241 print(" %s = %s" % (k,v)) | |
2242 ''') | |
2243 | |
2244 f.close() | |
2245 return target | |
2246 | |
2247 def msvc_version(compiler): | |
2248 """Return version major and minor of compiler instance if it is | |
2249 MSVC, raise an exception otherwise.""" | |
2250 if not compiler.compiler_type == "msvc": | |
2251 raise ValueError("Compiler instance is not msvc (%s)"\ | |
2252 % compiler.compiler_type) | |
2253 return compiler._MSVCCompiler__version | |
2254 | |
2255 if sys.version[:3] >= '2.5': | |
2256 def get_build_architecture(): | |
2257 from distutils.msvccompiler import get_build_architecture | |
2258 return get_build_architecture() | |
2259 else: | |
2260 #copied from python 2.5.1 distutils/msvccompiler.py | |
2261 def get_build_architecture(): | |
2262 """Return the processor architecture. | |
2263 | |
2264 Possible results are "Intel", "Itanium", or "AMD64". | |
2265 """ | |
2266 prefix = " bit (" | |
2267 i = sys.version.find(prefix) | |
2268 if i == -1: | |
2269 return "Intel" | |
2270 j = sys.version.find(")", i) | |
2271 return sys.version[i+len(prefix):j] |