1 | # a waf tool to add autoconf-like macros to the configure section
|
---|
2 | # and for SAMBA_ macros for building libraries, binaries etc
|
---|
3 |
|
---|
4 | import os, sys, re, fnmatch, shlex
|
---|
5 | import Build, Options, Utils, Task, Logs, Configure
|
---|
6 | from TaskGen import feature, before, after
|
---|
7 | from Configure import conf, ConfigurationContext
|
---|
8 | from Logs import debug
|
---|
9 |
|
---|
10 | # TODO: make this a --option
|
---|
11 | LIB_PATH="shared"
|
---|
12 |
|
---|
13 |
|
---|
14 | # sigh, python octal constants are a mess
|
---|
15 | MODE_644 = int('644', 8)
|
---|
16 | MODE_755 = int('755', 8)
|
---|
17 |
|
---|
18 | @conf
|
---|
19 | def SET_TARGET_TYPE(ctx, target, value):
|
---|
20 | '''set the target type of a target'''
|
---|
21 | cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
|
---|
22 | if target in cache and cache[target] != 'EMPTY':
|
---|
23 | Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target]))
|
---|
24 | sys.exit(1)
|
---|
25 | LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
|
---|
26 | debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
|
---|
27 | return True
|
---|
28 |
|
---|
29 |
|
---|
30 | def GET_TARGET_TYPE(ctx, target):
|
---|
31 | '''get target type from cache'''
|
---|
32 | cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
|
---|
33 | if not target in cache:
|
---|
34 | return None
|
---|
35 | return cache[target]
|
---|
36 |
|
---|
37 |
|
---|
38 | def ADD_LD_LIBRARY_PATH(path):
|
---|
39 | '''add something to LD_LIBRARY_PATH'''
|
---|
40 | if 'LD_LIBRARY_PATH' in os.environ:
|
---|
41 | oldpath = os.environ['LD_LIBRARY_PATH']
|
---|
42 | else:
|
---|
43 | oldpath = ''
|
---|
44 | newpath = oldpath.split(':')
|
---|
45 | if not path in newpath:
|
---|
46 | newpath.append(path)
|
---|
47 | os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath)
|
---|
48 |
|
---|
49 |
|
---|
50 | def needs_private_lib(bld, target):
|
---|
51 | '''return True if a target links to a private library'''
|
---|
52 | for lib in getattr(target, "final_libs", []):
|
---|
53 | t = bld.get_tgen_by_name(lib)
|
---|
54 | if t and getattr(t, 'private_library', False):
|
---|
55 | return True
|
---|
56 | return False
|
---|
57 |
|
---|
58 |
|
---|
59 | def install_rpath(target):
|
---|
60 | '''the rpath value for installation'''
|
---|
61 | bld = target.bld
|
---|
62 | bld.env['RPATH'] = []
|
---|
63 | ret = set()
|
---|
64 | if bld.env.RPATH_ON_INSTALL:
|
---|
65 | ret.add(bld.EXPAND_VARIABLES(bld.env.LIBDIR))
|
---|
66 | if bld.env.RPATH_ON_INSTALL_PRIVATE and needs_private_lib(bld, target):
|
---|
67 | ret.add(bld.EXPAND_VARIABLES(bld.env.PRIVATELIBDIR))
|
---|
68 | return list(ret)
|
---|
69 |
|
---|
70 |
|
---|
71 | def build_rpath(bld):
|
---|
72 | '''the rpath value for build'''
|
---|
73 | rpaths = [os.path.normpath('%s/%s' % (bld.env.BUILD_DIRECTORY, d)) for d in ("shared", "shared/private")]
|
---|
74 | bld.env['RPATH'] = []
|
---|
75 | if bld.env.RPATH_ON_BUILD:
|
---|
76 | return rpaths
|
---|
77 | for rpath in rpaths:
|
---|
78 | ADD_LD_LIBRARY_PATH(rpath)
|
---|
79 | return []
|
---|
80 |
|
---|
81 |
|
---|
82 | @conf
|
---|
83 | def LOCAL_CACHE(ctx, name):
|
---|
84 | '''return a named build cache dictionary, used to store
|
---|
85 | state inside other functions'''
|
---|
86 | if name in ctx.env:
|
---|
87 | return ctx.env[name]
|
---|
88 | ctx.env[name] = {}
|
---|
89 | return ctx.env[name]
|
---|
90 |
|
---|
91 |
|
---|
92 | @conf
|
---|
93 | def LOCAL_CACHE_SET(ctx, cachename, key, value):
|
---|
94 | '''set a value in a local cache'''
|
---|
95 | cache = LOCAL_CACHE(ctx, cachename)
|
---|
96 | cache[key] = value
|
---|
97 |
|
---|
98 |
|
---|
99 | @conf
|
---|
100 | def ASSERT(ctx, expression, msg):
|
---|
101 | '''a build assert call'''
|
---|
102 | if not expression:
|
---|
103 | raise Utils.WafError("ERROR: %s\n" % msg)
|
---|
104 | Build.BuildContext.ASSERT = ASSERT
|
---|
105 |
|
---|
106 |
|
---|
107 | def SUBDIR(bld, subdir, list):
|
---|
108 | '''create a list of files by pre-pending each with a subdir name'''
|
---|
109 | ret = ''
|
---|
110 | for l in TO_LIST(list):
|
---|
111 | ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
|
---|
112 | return ret
|
---|
113 | Build.BuildContext.SUBDIR = SUBDIR
|
---|
114 |
|
---|
115 |
|
---|
116 | def dict_concat(d1, d2):
|
---|
117 | '''concatenate two dictionaries d1 += d2'''
|
---|
118 | for t in d2:
|
---|
119 | if t not in d1:
|
---|
120 | d1[t] = d2[t]
|
---|
121 |
|
---|
122 | def ADD_COMMAND(opt, name, function):
|
---|
123 | '''add a new top level command to waf'''
|
---|
124 | Utils.g_module.__dict__[name] = function
|
---|
125 | opt.name = function
|
---|
126 | Options.Handler.ADD_COMMAND = ADD_COMMAND
|
---|
127 |
|
---|
128 |
|
---|
129 | @feature('c', 'cc', 'cshlib', 'cprogram')
|
---|
130 | @before('apply_core','exec_rule')
|
---|
131 | def process_depends_on(self):
|
---|
132 | '''The new depends_on attribute for build rules
|
---|
133 | allow us to specify a dependency on output from
|
---|
134 | a source generation rule'''
|
---|
135 | if getattr(self , 'depends_on', None):
|
---|
136 | lst = self.to_list(self.depends_on)
|
---|
137 | for x in lst:
|
---|
138 | y = self.bld.get_tgen_by_name(x)
|
---|
139 | self.bld.ASSERT(y is not None, "Failed to find dependency %s of %s" % (x, self.name))
|
---|
140 | y.post()
|
---|
141 | if getattr(y, 'more_includes', None):
|
---|
142 | self.includes += " " + y.more_includes
|
---|
143 |
|
---|
144 |
|
---|
145 | os_path_relpath = getattr(os.path, 'relpath', None)
|
---|
146 | if os_path_relpath is None:
|
---|
147 | # Python < 2.6 does not have os.path.relpath, provide a replacement
|
---|
148 | # (imported from Python2.6.5~rc2)
|
---|
149 | def os_path_relpath(path, start):
|
---|
150 | """Return a relative version of a path"""
|
---|
151 | start_list = os.path.abspath(start).split("/")
|
---|
152 | path_list = os.path.abspath(path).split("/")
|
---|
153 |
|
---|
154 | # Work out how much of the filepath is shared by start and path.
|
---|
155 | i = len(os.path.commonprefix([start_list, path_list]))
|
---|
156 |
|
---|
157 | rel_list = ['..'] * (len(start_list)-i) + path_list[i:]
|
---|
158 | if not rel_list:
|
---|
159 | return start
|
---|
160 | return os.path.join(*rel_list)
|
---|
161 |
|
---|
162 |
|
---|
163 | def unique_list(seq):
|
---|
164 | '''return a uniquified list in the same order as the existing list'''
|
---|
165 | seen = {}
|
---|
166 | result = []
|
---|
167 | for item in seq:
|
---|
168 | if item in seen: continue
|
---|
169 | seen[item] = True
|
---|
170 | result.append(item)
|
---|
171 | return result
|
---|
172 |
|
---|
173 |
|
---|
174 | def TO_LIST(str, delimiter=None):
|
---|
175 | '''Split a list, preserving quoted strings and existing lists'''
|
---|
176 | if str is None:
|
---|
177 | return []
|
---|
178 | if isinstance(str, list):
|
---|
179 | # we need to return a new independent list...
|
---|
180 | return list(str)
|
---|
181 | if len(str) == 0:
|
---|
182 | return []
|
---|
183 | lst = str.split(delimiter)
|
---|
184 | # the string may have had quotes in it, now we
|
---|
185 | # check if we did have quotes, and use the slower shlex
|
---|
186 | # if we need to
|
---|
187 | for e in lst:
|
---|
188 | if e[0] == '"':
|
---|
189 | return shlex.split(str)
|
---|
190 | return lst
|
---|
191 |
|
---|
192 |
|
---|
193 | def subst_vars_error(string, env):
|
---|
194 | '''substitute vars, throw an error if a variable is not defined'''
|
---|
195 | lst = re.split('(\$\{\w+\})', string)
|
---|
196 | out = []
|
---|
197 | for v in lst:
|
---|
198 | if re.match('\$\{\w+\}', v):
|
---|
199 | vname = v[2:-1]
|
---|
200 | if not vname in env:
|
---|
201 | raise KeyError("Failed to find variable %s in %s" % (vname, string))
|
---|
202 | v = env[vname]
|
---|
203 | out.append(v)
|
---|
204 | return ''.join(out)
|
---|
205 |
|
---|
206 |
|
---|
207 | @conf
|
---|
208 | def SUBST_ENV_VAR(ctx, varname):
|
---|
209 | '''Substitute an environment variable for any embedded variables'''
|
---|
210 | return subst_vars_error(ctx.env[varname], ctx.env)
|
---|
211 | Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR
|
---|
212 |
|
---|
213 |
|
---|
214 | def ENFORCE_GROUP_ORDERING(bld):
|
---|
215 | '''enforce group ordering for the project. This
|
---|
216 | makes the group ordering apply only when you specify
|
---|
217 | a target with --target'''
|
---|
218 | if Options.options.compile_targets:
|
---|
219 | @feature('*')
|
---|
220 | @before('exec_rule', 'apply_core', 'collect')
|
---|
221 | def force_previous_groups(self):
|
---|
222 | if getattr(self.bld, 'enforced_group_ordering', False):
|
---|
223 | return
|
---|
224 | self.bld.enforced_group_ordering = True
|
---|
225 |
|
---|
226 | def group_name(g):
|
---|
227 | tm = self.bld.task_manager
|
---|
228 | return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0]
|
---|
229 |
|
---|
230 | my_id = id(self)
|
---|
231 | bld = self.bld
|
---|
232 | stop = None
|
---|
233 | for g in bld.task_manager.groups:
|
---|
234 | for t in g.tasks_gen:
|
---|
235 | if id(t) == my_id:
|
---|
236 | stop = id(g)
|
---|
237 | debug('group: Forcing up to group %s for target %s',
|
---|
238 | group_name(g), self.name or self.target)
|
---|
239 | break
|
---|
240 | if stop is not None:
|
---|
241 | break
|
---|
242 | if stop is None:
|
---|
243 | return
|
---|
244 |
|
---|
245 | for i in xrange(len(bld.task_manager.groups)):
|
---|
246 | g = bld.task_manager.groups[i]
|
---|
247 | bld.task_manager.current_group = i
|
---|
248 | if id(g) == stop:
|
---|
249 | break
|
---|
250 | debug('group: Forcing group %s', group_name(g))
|
---|
251 | for t in g.tasks_gen:
|
---|
252 | if not getattr(t, 'forced_groups', False):
|
---|
253 | debug('group: Posting %s', t.name or t.target)
|
---|
254 | t.forced_groups = True
|
---|
255 | t.post()
|
---|
256 | Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING
|
---|
257 |
|
---|
258 |
|
---|
259 | def recursive_dirlist(dir, relbase, pattern=None):
|
---|
260 | '''recursive directory list'''
|
---|
261 | ret = []
|
---|
262 | for f in os.listdir(dir):
|
---|
263 | f2 = dir + '/' + f
|
---|
264 | if os.path.isdir(f2):
|
---|
265 | ret.extend(recursive_dirlist(f2, relbase))
|
---|
266 | else:
|
---|
267 | if pattern and not fnmatch.fnmatch(f, pattern):
|
---|
268 | continue
|
---|
269 | ret.append(os_path_relpath(f2, relbase))
|
---|
270 | return ret
|
---|
271 |
|
---|
272 |
|
---|
273 | def mkdir_p(dir):
|
---|
274 | '''like mkdir -p'''
|
---|
275 | if not dir:
|
---|
276 | return
|
---|
277 | if dir.endswith("/"):
|
---|
278 | mkdir_p(dir[:-1])
|
---|
279 | return
|
---|
280 | if os.path.isdir(dir):
|
---|
281 | return
|
---|
282 | mkdir_p(os.path.dirname(dir))
|
---|
283 | os.mkdir(dir)
|
---|
284 |
|
---|
285 |
|
---|
286 | def SUBST_VARS_RECURSIVE(string, env):
|
---|
287 | '''recursively expand variables'''
|
---|
288 | if string is None:
|
---|
289 | return string
|
---|
290 | limit=100
|
---|
291 | while (string.find('${') != -1 and limit > 0):
|
---|
292 | string = subst_vars_error(string, env)
|
---|
293 | limit -= 1
|
---|
294 | return string
|
---|
295 |
|
---|
296 |
|
---|
297 | @conf
|
---|
298 | def EXPAND_VARIABLES(ctx, varstr, vars=None):
|
---|
299 | '''expand variables from a user supplied dictionary
|
---|
300 |
|
---|
301 | This is most useful when you pass vars=locals() to expand
|
---|
302 | all your local variables in strings
|
---|
303 | '''
|
---|
304 |
|
---|
305 | if isinstance(varstr, list):
|
---|
306 | ret = []
|
---|
307 | for s in varstr:
|
---|
308 | ret.append(EXPAND_VARIABLES(ctx, s, vars=vars))
|
---|
309 | return ret
|
---|
310 |
|
---|
311 | if not isinstance(varstr, str):
|
---|
312 | return varstr
|
---|
313 |
|
---|
314 | import Environment
|
---|
315 | env = Environment.Environment()
|
---|
316 | ret = varstr
|
---|
317 | # substitute on user supplied dict if avaiilable
|
---|
318 | if vars is not None:
|
---|
319 | for v in vars.keys():
|
---|
320 | env[v] = vars[v]
|
---|
321 | ret = SUBST_VARS_RECURSIVE(ret, env)
|
---|
322 |
|
---|
323 | # if anything left, subst on the environment as well
|
---|
324 | if ret.find('${') != -1:
|
---|
325 | ret = SUBST_VARS_RECURSIVE(ret, ctx.env)
|
---|
326 | # make sure there is nothing left. Also check for the common
|
---|
327 | # typo of $( instead of ${
|
---|
328 | if ret.find('${') != -1 or ret.find('$(') != -1:
|
---|
329 | Logs.error('Failed to substitute all variables in varstr=%s' % ret)
|
---|
330 | sys.exit(1)
|
---|
331 | return ret
|
---|
332 | Build.BuildContext.EXPAND_VARIABLES = EXPAND_VARIABLES
|
---|
333 |
|
---|
334 |
|
---|
335 | def RUN_COMMAND(cmd,
|
---|
336 | env=None,
|
---|
337 | shell=False):
|
---|
338 | '''run a external command, return exit code or signal'''
|
---|
339 | if env:
|
---|
340 | cmd = SUBST_VARS_RECURSIVE(cmd, env)
|
---|
341 |
|
---|
342 | status = os.system(cmd)
|
---|
343 | if os.WIFEXITED(status):
|
---|
344 | return os.WEXITSTATUS(status)
|
---|
345 | if os.WIFSIGNALED(status):
|
---|
346 | return - os.WTERMSIG(status)
|
---|
347 | Logs.error("Unknown exit reason %d for command: %s" (status, cmd))
|
---|
348 | return -1
|
---|
349 |
|
---|
350 |
|
---|
351 | def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None):
|
---|
352 | env = LOAD_ENVIRONMENT()
|
---|
353 | if pythonpath is None:
|
---|
354 | pythonpath = os.path.join(Utils.g_module.blddir, 'python')
|
---|
355 | result = 0
|
---|
356 | for interp in env.python_interpreters:
|
---|
357 | for testfile in testfiles:
|
---|
358 | cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile)
|
---|
359 | if extra_env:
|
---|
360 | for key, value in extra_env.items():
|
---|
361 | cmd = "%s=%s %s" % (key, value, cmd)
|
---|
362 | print('Running Python test with %s: %s' % (interp, testfile))
|
---|
363 | ret = RUN_COMMAND(cmd)
|
---|
364 | if ret:
|
---|
365 | print('Python test failed: %s' % cmd)
|
---|
366 | result = ret
|
---|
367 | return result
|
---|
368 |
|
---|
369 |
|
---|
370 | # make sure we have md5. some systems don't have it
|
---|
371 | try:
|
---|
372 | from hashlib import md5
|
---|
373 | # Even if hashlib.md5 exists, it may be unusable.
|
---|
374 | # Try to use MD5 function. In FIPS mode this will cause an exception
|
---|
375 | # and we'll get to the replacement code
|
---|
376 | foo = md5('abcd')
|
---|
377 | except:
|
---|
378 | try:
|
---|
379 | import md5
|
---|
380 | # repeat the same check here, mere success of import is not enough.
|
---|
381 | # Try to use MD5 function. In FIPS mode this will cause an exception
|
---|
382 | foo = md5.md5('abcd')
|
---|
383 | except:
|
---|
384 | import Constants
|
---|
385 | Constants.SIG_NIL = hash('abcd')
|
---|
386 | class replace_md5(object):
|
---|
387 | def __init__(self):
|
---|
388 | self.val = None
|
---|
389 | def update(self, val):
|
---|
390 | self.val = hash((self.val, val))
|
---|
391 | def digest(self):
|
---|
392 | return str(self.val)
|
---|
393 | def hexdigest(self):
|
---|
394 | return self.digest().encode('hex')
|
---|
395 | def replace_h_file(filename):
|
---|
396 | f = open(filename, 'rb')
|
---|
397 | m = replace_md5()
|
---|
398 | while (filename):
|
---|
399 | filename = f.read(100000)
|
---|
400 | m.update(filename)
|
---|
401 | f.close()
|
---|
402 | return m.digest()
|
---|
403 | Utils.md5 = replace_md5
|
---|
404 | Task.md5 = replace_md5
|
---|
405 | Utils.h_file = replace_h_file
|
---|
406 |
|
---|
407 |
|
---|
408 | def LOAD_ENVIRONMENT():
|
---|
409 | '''load the configuration environment, allowing access to env vars
|
---|
410 | from new commands'''
|
---|
411 | import Environment
|
---|
412 | env = Environment.Environment()
|
---|
413 | try:
|
---|
414 | env.load('.lock-wscript')
|
---|
415 | env.load(env.blddir + '/c4che/default.cache.py')
|
---|
416 | except:
|
---|
417 | pass
|
---|
418 | return env
|
---|
419 |
|
---|
420 |
|
---|
421 | def IS_NEWER(bld, file1, file2):
|
---|
422 | '''return True if file1 is newer than file2'''
|
---|
423 | t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime
|
---|
424 | t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime
|
---|
425 | return t1 > t2
|
---|
426 | Build.BuildContext.IS_NEWER = IS_NEWER
|
---|
427 |
|
---|
428 |
|
---|
429 | @conf
|
---|
430 | def RECURSE(ctx, directory):
|
---|
431 | '''recurse into a directory, relative to the curdir or top level'''
|
---|
432 | try:
|
---|
433 | visited_dirs = ctx.visited_dirs
|
---|
434 | except:
|
---|
435 | visited_dirs = ctx.visited_dirs = set()
|
---|
436 | d = os.path.join(ctx.curdir, directory)
|
---|
437 | if os.path.exists(d):
|
---|
438 | abspath = os.path.abspath(d)
|
---|
439 | else:
|
---|
440 | abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory))
|
---|
441 | ctxclass = ctx.__class__.__name__
|
---|
442 | key = ctxclass + ':' + abspath
|
---|
443 | if key in visited_dirs:
|
---|
444 | # already done it
|
---|
445 | return
|
---|
446 | visited_dirs.add(key)
|
---|
447 | relpath = os_path_relpath(abspath, ctx.curdir)
|
---|
448 | if ctxclass == 'Handler':
|
---|
449 | return ctx.sub_options(relpath)
|
---|
450 | if ctxclass == 'ConfigurationContext':
|
---|
451 | return ctx.sub_config(relpath)
|
---|
452 | if ctxclass == 'BuildContext':
|
---|
453 | return ctx.add_subdirs(relpath)
|
---|
454 | Logs.error('Unknown RECURSE context class', ctxclass)
|
---|
455 | raise
|
---|
456 | Options.Handler.RECURSE = RECURSE
|
---|
457 | Build.BuildContext.RECURSE = RECURSE
|
---|
458 |
|
---|
459 |
|
---|
460 | def CHECK_MAKEFLAGS(bld):
|
---|
461 | '''check for MAKEFLAGS environment variable in case we are being
|
---|
462 | called from a Makefile try to honor a few make command line flags'''
|
---|
463 | if not 'WAF_MAKE' in os.environ:
|
---|
464 | return
|
---|
465 | makeflags = os.environ.get('MAKEFLAGS')
|
---|
466 | if makeflags is None:
|
---|
467 | return
|
---|
468 | jobs_set = False
|
---|
469 | # we need to use shlex.split to cope with the escaping of spaces
|
---|
470 | # in makeflags
|
---|
471 | for opt in shlex.split(makeflags):
|
---|
472 | # options can come either as -x or as x
|
---|
473 | if opt[0:2] == 'V=':
|
---|
474 | Options.options.verbose = Logs.verbose = int(opt[2:])
|
---|
475 | if Logs.verbose > 0:
|
---|
476 | Logs.zones = ['runner']
|
---|
477 | if Logs.verbose > 2:
|
---|
478 | Logs.zones = ['*']
|
---|
479 | elif opt[0].isupper() and opt.find('=') != -1:
|
---|
480 | # this allows us to set waf options on the make command line
|
---|
481 | # for example, if you do "make FOO=blah", then we set the
|
---|
482 | # option 'FOO' in Options.options, to blah. If you look in wafsamba/wscript
|
---|
483 | # you will see that the command line accessible options have their dest=
|
---|
484 | # set to uppercase, to allow for passing of options from make in this way
|
---|
485 | # this is also how "make test TESTS=testpattern" works, and
|
---|
486 | # "make VERBOSE=1" as well as things like "make SYMBOLCHECK=1"
|
---|
487 | loc = opt.find('=')
|
---|
488 | setattr(Options.options, opt[0:loc], opt[loc+1:])
|
---|
489 | elif opt[0] != '-':
|
---|
490 | for v in opt:
|
---|
491 | if v == 'j':
|
---|
492 | jobs_set = True
|
---|
493 | elif v == 'k':
|
---|
494 | Options.options.keep = True
|
---|
495 | elif opt == '-j':
|
---|
496 | jobs_set = True
|
---|
497 | elif opt == '-k':
|
---|
498 | Options.options.keep = True
|
---|
499 | if not jobs_set:
|
---|
500 | # default to one job
|
---|
501 | Options.options.jobs = 1
|
---|
502 |
|
---|
503 | Build.BuildContext.CHECK_MAKEFLAGS = CHECK_MAKEFLAGS
|
---|
504 |
|
---|
505 | option_groups = {}
|
---|
506 |
|
---|
507 | def option_group(opt, name):
|
---|
508 | '''find or create an option group'''
|
---|
509 | global option_groups
|
---|
510 | if name in option_groups:
|
---|
511 | return option_groups[name]
|
---|
512 | gr = opt.add_option_group(name)
|
---|
513 | option_groups[name] = gr
|
---|
514 | return gr
|
---|
515 | Options.Handler.option_group = option_group
|
---|
516 |
|
---|
517 |
|
---|
518 | def save_file(filename, contents, create_dir=False):
|
---|
519 | '''save data to a file'''
|
---|
520 | if create_dir:
|
---|
521 | mkdir_p(os.path.dirname(filename))
|
---|
522 | try:
|
---|
523 | f = open(filename, 'w')
|
---|
524 | f.write(contents)
|
---|
525 | f.close()
|
---|
526 | except:
|
---|
527 | return False
|
---|
528 | return True
|
---|
529 |
|
---|
530 |
|
---|
531 | def load_file(filename):
|
---|
532 | '''return contents of a file'''
|
---|
533 | try:
|
---|
534 | f = open(filename, 'r')
|
---|
535 | r = f.read()
|
---|
536 | f.close()
|
---|
537 | except:
|
---|
538 | return None
|
---|
539 | return r
|
---|
540 |
|
---|
541 |
|
---|
542 | def reconfigure(ctx):
|
---|
543 | '''rerun configure if necessary'''
|
---|
544 | import Configure, samba_wildcard, Scripting
|
---|
545 | if not os.path.exists(".lock-wscript"):
|
---|
546 | raise Utils.WafError('configure has not been run')
|
---|
547 | bld = samba_wildcard.fake_build_environment()
|
---|
548 | Configure.autoconfig = True
|
---|
549 | Scripting.check_configured(bld)
|
---|
550 |
|
---|
551 |
|
---|
552 | def map_shlib_extension(ctx, name, python=False):
|
---|
553 | '''map a filename with a shared library extension of .so to the real shlib name'''
|
---|
554 | if name is None:
|
---|
555 | return None
|
---|
556 | if name[-1:].isdigit():
|
---|
557 | # some libraries have specified versions in the wscript rule
|
---|
558 | return name
|
---|
559 | (root1, ext1) = os.path.splitext(name)
|
---|
560 | if python:
|
---|
561 | return ctx.env.pyext_PATTERN % root1
|
---|
562 | else:
|
---|
563 | (root2, ext2) = os.path.splitext(ctx.env.shlib_PATTERN)
|
---|
564 | return root1+ext2
|
---|
565 | Build.BuildContext.map_shlib_extension = map_shlib_extension
|
---|
566 |
|
---|
567 | def apply_pattern(filename, pattern):
|
---|
568 | '''apply a filename pattern to a filename that may have a directory component'''
|
---|
569 | dirname = os.path.dirname(filename)
|
---|
570 | if not dirname:
|
---|
571 | return pattern % filename
|
---|
572 | basename = os.path.basename(filename)
|
---|
573 | return os.path.join(dirname, pattern % basename)
|
---|
574 |
|
---|
575 | def make_libname(ctx, name, nolibprefix=False, version=None, python=False):
|
---|
576 | """make a library filename
|
---|
577 | Options:
|
---|
578 | nolibprefix: don't include the lib prefix
|
---|
579 | version : add a version number
|
---|
580 | python : if we should use python module name conventions"""
|
---|
581 |
|
---|
582 | if python:
|
---|
583 | libname = apply_pattern(name, ctx.env.pyext_PATTERN)
|
---|
584 | else:
|
---|
585 | libname = apply_pattern(name, ctx.env.shlib_PATTERN)
|
---|
586 | if nolibprefix and libname[0:3] == 'lib':
|
---|
587 | libname = libname[3:]
|
---|
588 | if version:
|
---|
589 | if version[0] == '.':
|
---|
590 | version = version[1:]
|
---|
591 | (root, ext) = os.path.splitext(libname)
|
---|
592 | if ext == ".dylib":
|
---|
593 | # special case - version goes before the prefix
|
---|
594 | libname = "%s.%s%s" % (root, version, ext)
|
---|
595 | else:
|
---|
596 | libname = "%s%s.%s" % (root, ext, version)
|
---|
597 | return libname
|
---|
598 | Build.BuildContext.make_libname = make_libname
|
---|
599 |
|
---|
600 |
|
---|
601 | def get_tgt_list(bld):
|
---|
602 | '''return a list of build objects for samba'''
|
---|
603 |
|
---|
604 | targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
|
---|
605 |
|
---|
606 | # build a list of task generators we are interested in
|
---|
607 | tgt_list = []
|
---|
608 | for tgt in targets:
|
---|
609 | type = targets[tgt]
|
---|
610 | if not type in ['SUBSYSTEM', 'MODULE', 'BINARY', 'LIBRARY', 'ASN1', 'PYTHON']:
|
---|
611 | continue
|
---|
612 | t = bld.get_tgen_by_name(tgt)
|
---|
613 | if t is None:
|
---|
614 | Logs.error("Target %s of type %s has no task generator" % (tgt, type))
|
---|
615 | sys.exit(1)
|
---|
616 | tgt_list.append(t)
|
---|
617 | return tgt_list
|
---|
618 |
|
---|
619 | from Constants import WSCRIPT_FILE
|
---|
620 | def PROCESS_SEPARATE_RULE(self, rule):
|
---|
621 | ''' cause waf to process additional script based on `rule'.
|
---|
622 | You should have file named wscript_<stage>_rule in the current directory
|
---|
623 | where stage is either 'configure' or 'build'
|
---|
624 | '''
|
---|
625 | stage = ''
|
---|
626 | if isinstance(self, Configure.ConfigurationContext):
|
---|
627 | stage = 'configure'
|
---|
628 | elif isinstance(self, Build.BuildContext):
|
---|
629 | stage = 'build'
|
---|
630 | file_path = os.path.join(self.curdir, WSCRIPT_FILE+'_'+stage+'_'+rule)
|
---|
631 | txt = load_file(file_path)
|
---|
632 | if txt:
|
---|
633 | dc = {'ctx': self}
|
---|
634 | if getattr(self.__class__, 'pre_recurse', None):
|
---|
635 | dc = self.pre_recurse(txt, file_path, self.curdir)
|
---|
636 | exec(compile(txt, file_path, 'exec'), dc)
|
---|
637 | if getattr(self.__class__, 'post_recurse', None):
|
---|
638 | dc = self.post_recurse(txt, file_path, self.curdir)
|
---|
639 |
|
---|
640 | Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
|
---|
641 | ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
|
---|
642 |
|
---|
643 | def AD_DC_BUILD_IS_ENABLED(self):
|
---|
644 | if self.CONFIG_SET('AD_DC_BUILD_IS_ENABLED'):
|
---|
645 | return True
|
---|
646 | return False
|
---|
647 |
|
---|
648 | Build.BuildContext.AD_DC_BUILD_IS_ENABLED = AD_DC_BUILD_IS_ENABLED
|
---|
649 |
|
---|
650 | @feature('cprogram', 'cshlib', 'cstaticlib')
|
---|
651 | @after('apply_lib_vars')
|
---|
652 | @before('apply_obj_vars')
|
---|
653 | def samba_before_apply_obj_vars(self):
|
---|
654 | """before apply_obj_vars for uselib, this removes the standard paths"""
|
---|
655 |
|
---|
656 | def is_standard_libpath(env, path):
|
---|
657 | for _path in env.STANDARD_LIBPATH:
|
---|
658 | if _path == os.path.normpath(path):
|
---|
659 | return True
|
---|
660 | return False
|
---|
661 |
|
---|
662 | v = self.env
|
---|
663 |
|
---|
664 | for i in v['RPATH']:
|
---|
665 | if is_standard_libpath(v, i):
|
---|
666 | v['RPATH'].remove(i)
|
---|
667 |
|
---|
668 | for i in v['LIBPATH']:
|
---|
669 | if is_standard_libpath(v, i):
|
---|
670 | v['LIBPATH'].remove(i)
|
---|
671 |
|
---|