| 1 | # a waf tool to add autoconf-like macros to the configure section
|
|---|
| 2 | # and for SAMBA_ macros for building libraries, binaries etc
|
|---|
| 3 |
|
|---|
| 4 | import Build, os, sys, Options, Utils, Task, re, fnmatch, Logs
|
|---|
| 5 | from TaskGen import feature, before
|
|---|
| 6 | from Configure import conf
|
|---|
| 7 | from Logs import debug
|
|---|
| 8 | import shlex
|
|---|
| 9 |
|
|---|
| 10 | # TODO: make this a --option
|
|---|
| 11 | LIB_PATH="shared"
|
|---|
| 12 |
|
|---|
| 13 |
|
|---|
| 14 | # sigh, python octal constants are a mess
|
|---|
| 15 | MODE_644 = int('644', 8)
|
|---|
| 16 | MODE_755 = int('755', 8)
|
|---|
| 17 |
|
|---|
| 18 | @conf
|
|---|
| 19 | def SET_TARGET_TYPE(ctx, target, value):
|
|---|
| 20 | '''set the target type of a target'''
|
|---|
| 21 | cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
|
|---|
| 22 | if target in cache and cache[target] != 'EMPTY':
|
|---|
| 23 | Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target]))
|
|---|
| 24 | sys.exit(1)
|
|---|
| 25 | LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
|
|---|
| 26 | debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
|
|---|
| 27 | return True
|
|---|
| 28 |
|
|---|
| 29 |
|
|---|
| 30 | def GET_TARGET_TYPE(ctx, target):
|
|---|
| 31 | '''get target type from cache'''
|
|---|
| 32 | cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
|
|---|
| 33 | if not target in cache:
|
|---|
| 34 | return None
|
|---|
| 35 | return cache[target]
|
|---|
| 36 |
|
|---|
| 37 |
|
|---|
| 38 | ######################################################
|
|---|
| 39 | # this is used as a decorator to make functions only
|
|---|
| 40 | # run once. Based on the idea from
|
|---|
| 41 | # http://stackoverflow.com/questions/815110/is-there-a-decorator-to-simply-cache-function-return-values
|
|---|
| 42 | runonce_ret = {}
|
|---|
| 43 | def runonce(function):
|
|---|
| 44 | def runonce_wrapper(*args):
|
|---|
| 45 | if args in runonce_ret:
|
|---|
| 46 | return runonce_ret[args]
|
|---|
| 47 | else:
|
|---|
| 48 | ret = function(*args)
|
|---|
| 49 | runonce_ret[args] = ret
|
|---|
| 50 | return ret
|
|---|
| 51 | return runonce_wrapper
|
|---|
| 52 |
|
|---|
| 53 |
|
|---|
| 54 | def ADD_LD_LIBRARY_PATH(path):
|
|---|
| 55 | '''add something to LD_LIBRARY_PATH'''
|
|---|
| 56 | if 'LD_LIBRARY_PATH' in os.environ:
|
|---|
| 57 | oldpath = os.environ['LD_LIBRARY_PATH']
|
|---|
| 58 | else:
|
|---|
| 59 | oldpath = ''
|
|---|
| 60 | newpath = oldpath.split(':')
|
|---|
| 61 | if not path in newpath:
|
|---|
| 62 | newpath.append(path)
|
|---|
| 63 | os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath)
|
|---|
| 64 |
|
|---|
| 65 |
|
|---|
| 66 | def needs_private_lib(bld, target):
|
|---|
| 67 | '''return True if a target links to a private library'''
|
|---|
| 68 | for lib in getattr(target, "uselib_local", []):
|
|---|
| 69 | t = bld.name_to_obj(lib, bld.env)
|
|---|
| 70 | if t and getattr(t, 'private_library', False):
|
|---|
| 71 | return True
|
|---|
| 72 | return False
|
|---|
| 73 |
|
|---|
| 74 |
|
|---|
| 75 | def install_rpath(target):
|
|---|
| 76 | '''the rpath value for installation'''
|
|---|
| 77 | bld = target.bld
|
|---|
| 78 | bld.env['RPATH'] = []
|
|---|
| 79 | ret = set()
|
|---|
| 80 | if bld.env.RPATH_ON_INSTALL:
|
|---|
| 81 | ret.add(bld.EXPAND_VARIABLES(bld.env.LIBDIR))
|
|---|
| 82 | if bld.env.RPATH_ON_INSTALL_PRIVATE and needs_private_lib(bld, target):
|
|---|
| 83 | ret.add(bld.EXPAND_VARIABLES(bld.env.PRIVATELIBDIR))
|
|---|
| 84 | return list(ret)
|
|---|
| 85 |
|
|---|
| 86 |
|
|---|
| 87 | def build_rpath(bld):
|
|---|
| 88 | '''the rpath value for build'''
|
|---|
| 89 | rpaths = [os.path.normpath('%s/%s' % (bld.env.BUILD_DIRECTORY, d)) for d in ("shared", "shared/private")]
|
|---|
| 90 | bld.env['RPATH'] = []
|
|---|
| 91 | if bld.env.RPATH_ON_BUILD:
|
|---|
| 92 | return rpaths
|
|---|
| 93 | for rpath in rpaths:
|
|---|
| 94 | ADD_LD_LIBRARY_PATH(rpath)
|
|---|
| 95 | return []
|
|---|
| 96 |
|
|---|
| 97 |
|
|---|
| 98 | @conf
|
|---|
| 99 | def LOCAL_CACHE(ctx, name):
|
|---|
| 100 | '''return a named build cache dictionary, used to store
|
|---|
| 101 | state inside other functions'''
|
|---|
| 102 | if name in ctx.env:
|
|---|
| 103 | return ctx.env[name]
|
|---|
| 104 | ctx.env[name] = {}
|
|---|
| 105 | return ctx.env[name]
|
|---|
| 106 |
|
|---|
| 107 |
|
|---|
| 108 | @conf
|
|---|
| 109 | def LOCAL_CACHE_SET(ctx, cachename, key, value):
|
|---|
| 110 | '''set a value in a local cache'''
|
|---|
| 111 | cache = LOCAL_CACHE(ctx, cachename)
|
|---|
| 112 | cache[key] = value
|
|---|
| 113 |
|
|---|
| 114 |
|
|---|
| 115 | @conf
|
|---|
| 116 | def ASSERT(ctx, expression, msg):
|
|---|
| 117 | '''a build assert call'''
|
|---|
| 118 | if not expression:
|
|---|
| 119 | raise Utils.WafError("ERROR: %s\n" % msg)
|
|---|
| 120 | Build.BuildContext.ASSERT = ASSERT
|
|---|
| 121 |
|
|---|
| 122 |
|
|---|
| 123 | def SUBDIR(bld, subdir, list):
|
|---|
| 124 | '''create a list of files by pre-pending each with a subdir name'''
|
|---|
| 125 | ret = ''
|
|---|
| 126 | for l in TO_LIST(list):
|
|---|
| 127 | ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
|
|---|
| 128 | return ret
|
|---|
| 129 | Build.BuildContext.SUBDIR = SUBDIR
|
|---|
| 130 |
|
|---|
| 131 |
|
|---|
| 132 | def dict_concat(d1, d2):
|
|---|
| 133 | '''concatenate two dictionaries d1 += d2'''
|
|---|
| 134 | for t in d2:
|
|---|
| 135 | if t not in d1:
|
|---|
| 136 | d1[t] = d2[t]
|
|---|
| 137 |
|
|---|
| 138 |
|
|---|
| 139 | def exec_command(self, cmd, **kw):
|
|---|
| 140 | '''this overrides the 'waf -v' debug output to be in a nice
|
|---|
| 141 | unix like format instead of a python list.
|
|---|
| 142 | Thanks to ita on #waf for this'''
|
|---|
| 143 | import Utils, Logs
|
|---|
| 144 | _cmd = cmd
|
|---|
| 145 | if isinstance(cmd, list):
|
|---|
| 146 | _cmd = ' '.join(cmd)
|
|---|
| 147 | debug('runner: %s' % _cmd)
|
|---|
| 148 | if self.log:
|
|---|
| 149 | self.log.write('%s\n' % cmd)
|
|---|
| 150 | kw['log'] = self.log
|
|---|
| 151 | try:
|
|---|
| 152 | if not kw.get('cwd', None):
|
|---|
| 153 | kw['cwd'] = self.cwd
|
|---|
| 154 | except AttributeError:
|
|---|
| 155 | self.cwd = kw['cwd'] = self.bldnode.abspath()
|
|---|
| 156 | return Utils.exec_command(cmd, **kw)
|
|---|
| 157 | Build.BuildContext.exec_command = exec_command
|
|---|
| 158 |
|
|---|
| 159 |
|
|---|
| 160 | def ADD_COMMAND(opt, name, function):
|
|---|
| 161 | '''add a new top level command to waf'''
|
|---|
| 162 | Utils.g_module.__dict__[name] = function
|
|---|
| 163 | opt.name = function
|
|---|
| 164 | Options.Handler.ADD_COMMAND = ADD_COMMAND
|
|---|
| 165 |
|
|---|
| 166 |
|
|---|
| 167 | @feature('cc', 'cshlib', 'cprogram')
|
|---|
| 168 | @before('apply_core','exec_rule')
|
|---|
| 169 | def process_depends_on(self):
|
|---|
| 170 | '''The new depends_on attribute for build rules
|
|---|
| 171 | allow us to specify a dependency on output from
|
|---|
| 172 | a source generation rule'''
|
|---|
| 173 | if getattr(self , 'depends_on', None):
|
|---|
| 174 | lst = self.to_list(self.depends_on)
|
|---|
| 175 | for x in lst:
|
|---|
| 176 | y = self.bld.name_to_obj(x, self.env)
|
|---|
| 177 | self.bld.ASSERT(y is not None, "Failed to find dependency %s of %s" % (x, self.name))
|
|---|
| 178 | y.post()
|
|---|
| 179 | if getattr(y, 'more_includes', None):
|
|---|
| 180 | self.includes += " " + y.more_includes
|
|---|
| 181 |
|
|---|
| 182 |
|
|---|
| 183 | os_path_relpath = getattr(os.path, 'relpath', None)
|
|---|
| 184 | if os_path_relpath is None:
|
|---|
| 185 | # Python < 2.6 does not have os.path.relpath, provide a replacement
|
|---|
| 186 | # (imported from Python2.6.5~rc2)
|
|---|
| 187 | def os_path_relpath(path, start):
|
|---|
| 188 | """Return a relative version of a path"""
|
|---|
| 189 | start_list = os.path.abspath(start).split("/")
|
|---|
| 190 | path_list = os.path.abspath(path).split("/")
|
|---|
| 191 |
|
|---|
| 192 | # Work out how much of the filepath is shared by start and path.
|
|---|
| 193 | i = len(os.path.commonprefix([start_list, path_list]))
|
|---|
| 194 |
|
|---|
| 195 | rel_list = ['..'] * (len(start_list)-i) + path_list[i:]
|
|---|
| 196 | if not rel_list:
|
|---|
| 197 | return start
|
|---|
| 198 | return os.path.join(*rel_list)
|
|---|
| 199 |
|
|---|
| 200 |
|
|---|
| 201 | def unique_list(seq):
|
|---|
| 202 | '''return a uniquified list in the same order as the existing list'''
|
|---|
| 203 | seen = {}
|
|---|
| 204 | result = []
|
|---|
| 205 | for item in seq:
|
|---|
| 206 | if item in seen: continue
|
|---|
| 207 | seen[item] = True
|
|---|
| 208 | result.append(item)
|
|---|
| 209 | return result
|
|---|
| 210 |
|
|---|
| 211 |
|
|---|
| 212 | def TO_LIST(str, delimiter=None):
|
|---|
| 213 | '''Split a list, preserving quoted strings and existing lists'''
|
|---|
| 214 | if str is None:
|
|---|
| 215 | return []
|
|---|
| 216 | if isinstance(str, list):
|
|---|
| 217 | return str
|
|---|
| 218 | lst = str.split(delimiter)
|
|---|
| 219 | # the string may have had quotes in it, now we
|
|---|
| 220 | # check if we did have quotes, and use the slower shlex
|
|---|
| 221 | # if we need to
|
|---|
| 222 | for e in lst:
|
|---|
| 223 | if e[0] == '"':
|
|---|
| 224 | return shlex.split(str)
|
|---|
| 225 | return lst
|
|---|
| 226 |
|
|---|
| 227 |
|
|---|
| 228 | def subst_vars_error(string, env):
|
|---|
| 229 | '''substitute vars, throw an error if a variable is not defined'''
|
|---|
| 230 | lst = re.split('(\$\{\w+\})', string)
|
|---|
| 231 | out = []
|
|---|
| 232 | for v in lst:
|
|---|
| 233 | if re.match('\$\{\w+\}', v):
|
|---|
| 234 | vname = v[2:-1]
|
|---|
| 235 | if not vname in env:
|
|---|
| 236 | Logs.error("Failed to find variable %s in %s" % (vname, string))
|
|---|
| 237 | sys.exit(1)
|
|---|
| 238 | v = env[vname]
|
|---|
| 239 | out.append(v)
|
|---|
| 240 | return ''.join(out)
|
|---|
| 241 |
|
|---|
| 242 |
|
|---|
| 243 | @conf
|
|---|
| 244 | def SUBST_ENV_VAR(ctx, varname):
|
|---|
| 245 | '''Substitute an environment variable for any embedded variables'''
|
|---|
| 246 | return subst_vars_error(ctx.env[varname], ctx.env)
|
|---|
| 247 | Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR
|
|---|
| 248 |
|
|---|
| 249 |
|
|---|
| 250 | def ENFORCE_GROUP_ORDERING(bld):
|
|---|
| 251 | '''enforce group ordering for the project. This
|
|---|
| 252 | makes the group ordering apply only when you specify
|
|---|
| 253 | a target with --target'''
|
|---|
| 254 | if Options.options.compile_targets:
|
|---|
| 255 | @feature('*')
|
|---|
| 256 | @before('exec_rule', 'apply_core', 'collect')
|
|---|
| 257 | def force_previous_groups(self):
|
|---|
| 258 | if getattr(self.bld, 'enforced_group_ordering', False) == True:
|
|---|
| 259 | return
|
|---|
| 260 | self.bld.enforced_group_ordering = True
|
|---|
| 261 |
|
|---|
| 262 | def group_name(g):
|
|---|
| 263 | tm = self.bld.task_manager
|
|---|
| 264 | return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0]
|
|---|
| 265 |
|
|---|
| 266 | my_id = id(self)
|
|---|
| 267 | bld = self.bld
|
|---|
| 268 | stop = None
|
|---|
| 269 | for g in bld.task_manager.groups:
|
|---|
| 270 | for t in g.tasks_gen:
|
|---|
| 271 | if id(t) == my_id:
|
|---|
| 272 | stop = id(g)
|
|---|
| 273 | debug('group: Forcing up to group %s for target %s',
|
|---|
| 274 | group_name(g), self.name or self.target)
|
|---|
| 275 | break
|
|---|
| 276 | if stop != None:
|
|---|
| 277 | break
|
|---|
| 278 | if stop is None:
|
|---|
| 279 | return
|
|---|
| 280 |
|
|---|
| 281 | for i in xrange(len(bld.task_manager.groups)):
|
|---|
| 282 | g = bld.task_manager.groups[i]
|
|---|
| 283 | bld.task_manager.current_group = i
|
|---|
| 284 | if id(g) == stop:
|
|---|
| 285 | break
|
|---|
| 286 | debug('group: Forcing group %s', group_name(g))
|
|---|
| 287 | for t in g.tasks_gen:
|
|---|
| 288 | if not getattr(t, 'forced_groups', False):
|
|---|
| 289 | debug('group: Posting %s', t.name or t.target)
|
|---|
| 290 | t.forced_groups = True
|
|---|
| 291 | t.post()
|
|---|
| 292 | Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING
|
|---|
| 293 |
|
|---|
| 294 |
|
|---|
| 295 | def recursive_dirlist(dir, relbase, pattern=None):
|
|---|
| 296 | '''recursive directory list'''
|
|---|
| 297 | ret = []
|
|---|
| 298 | for f in os.listdir(dir):
|
|---|
| 299 | f2 = dir + '/' + f
|
|---|
| 300 | if os.path.isdir(f2):
|
|---|
| 301 | ret.extend(recursive_dirlist(f2, relbase))
|
|---|
| 302 | else:
|
|---|
| 303 | if pattern and not fnmatch.fnmatch(f, pattern):
|
|---|
| 304 | continue
|
|---|
| 305 | ret.append(os_path_relpath(f2, relbase))
|
|---|
| 306 | return ret
|
|---|
| 307 |
|
|---|
| 308 |
|
|---|
| 309 | def mkdir_p(dir):
|
|---|
| 310 | '''like mkdir -p'''
|
|---|
| 311 | if not dir:
|
|---|
| 312 | return
|
|---|
| 313 | if dir.endswith("/"):
|
|---|
| 314 | mkdir_p(dir[:-1])
|
|---|
| 315 | return
|
|---|
| 316 | if os.path.isdir(dir):
|
|---|
| 317 | return
|
|---|
| 318 | mkdir_p(os.path.dirname(dir))
|
|---|
| 319 | os.mkdir(dir)
|
|---|
| 320 |
|
|---|
| 321 |
|
|---|
| 322 | def SUBST_VARS_RECURSIVE(string, env):
|
|---|
| 323 | '''recursively expand variables'''
|
|---|
| 324 | if string is None:
|
|---|
| 325 | return string
|
|---|
| 326 | limit=100
|
|---|
| 327 | while (string.find('${') != -1 and limit > 0):
|
|---|
| 328 | string = subst_vars_error(string, env)
|
|---|
| 329 | limit -= 1
|
|---|
| 330 | return string
|
|---|
| 331 |
|
|---|
| 332 |
|
|---|
| 333 | @conf
|
|---|
| 334 | def EXPAND_VARIABLES(ctx, varstr, vars=None):
|
|---|
| 335 | '''expand variables from a user supplied dictionary
|
|---|
| 336 |
|
|---|
| 337 | This is most useful when you pass vars=locals() to expand
|
|---|
| 338 | all your local variables in strings
|
|---|
| 339 | '''
|
|---|
| 340 |
|
|---|
| 341 | if isinstance(varstr, list):
|
|---|
| 342 | ret = []
|
|---|
| 343 | for s in varstr:
|
|---|
| 344 | ret.append(EXPAND_VARIABLES(ctx, s, vars=vars))
|
|---|
| 345 | return ret
|
|---|
| 346 |
|
|---|
| 347 | if not isinstance(varstr, str):
|
|---|
| 348 | return varstr
|
|---|
| 349 |
|
|---|
| 350 | import Environment
|
|---|
| 351 | env = Environment.Environment()
|
|---|
| 352 | ret = varstr
|
|---|
| 353 | # substitute on user supplied dict if avaiilable
|
|---|
| 354 | if vars is not None:
|
|---|
| 355 | for v in vars.keys():
|
|---|
| 356 | env[v] = vars[v]
|
|---|
| 357 | ret = SUBST_VARS_RECURSIVE(ret, env)
|
|---|
| 358 |
|
|---|
| 359 | # if anything left, subst on the environment as well
|
|---|
| 360 | if ret.find('${') != -1:
|
|---|
| 361 | ret = SUBST_VARS_RECURSIVE(ret, ctx.env)
|
|---|
| 362 | # make sure there is nothing left. Also check for the common
|
|---|
| 363 | # typo of $( instead of ${
|
|---|
| 364 | if ret.find('${') != -1 or ret.find('$(') != -1:
|
|---|
| 365 | Logs.error('Failed to substitute all variables in varstr=%s' % ret)
|
|---|
| 366 | sys.exit(1)
|
|---|
| 367 | return ret
|
|---|
| 368 | Build.BuildContext.EXPAND_VARIABLES = EXPAND_VARIABLES
|
|---|
| 369 |
|
|---|
| 370 |
|
|---|
| 371 | def RUN_COMMAND(cmd,
|
|---|
| 372 | env=None,
|
|---|
| 373 | shell=False):
|
|---|
| 374 | '''run a external command, return exit code or signal'''
|
|---|
| 375 | if env:
|
|---|
| 376 | cmd = SUBST_VARS_RECURSIVE(cmd, env)
|
|---|
| 377 |
|
|---|
| 378 | status = os.system(cmd)
|
|---|
| 379 | if os.WIFEXITED(status):
|
|---|
| 380 | return os.WEXITSTATUS(status)
|
|---|
| 381 | if os.WIFSIGNALED(status):
|
|---|
| 382 | return - os.WTERMSIG(status)
|
|---|
| 383 | Logs.error("Unknown exit reason %d for command: %s" (status, cmd))
|
|---|
| 384 | return -1
|
|---|
| 385 |
|
|---|
| 386 |
|
|---|
| 387 | # make sure we have md5. some systems don't have it
|
|---|
| 388 | try:
|
|---|
| 389 | from hashlib import md5
|
|---|
| 390 | except:
|
|---|
| 391 | try:
|
|---|
| 392 | import md5
|
|---|
| 393 | except:
|
|---|
| 394 | import Constants
|
|---|
| 395 | Constants.SIG_NIL = hash('abcd')
|
|---|
| 396 | class replace_md5(object):
|
|---|
| 397 | def __init__(self):
|
|---|
| 398 | self.val = None
|
|---|
| 399 | def update(self, val):
|
|---|
| 400 | self.val = hash((self.val, val))
|
|---|
| 401 | def digest(self):
|
|---|
| 402 | return str(self.val)
|
|---|
| 403 | def hexdigest(self):
|
|---|
| 404 | return self.digest().encode('hex')
|
|---|
| 405 | def replace_h_file(filename):
|
|---|
| 406 | f = open(filename, 'rb')
|
|---|
| 407 | m = replace_md5()
|
|---|
| 408 | while (filename):
|
|---|
| 409 | filename = f.read(100000)
|
|---|
| 410 | m.update(filename)
|
|---|
| 411 | f.close()
|
|---|
| 412 | return m.digest()
|
|---|
| 413 | Utils.md5 = replace_md5
|
|---|
| 414 | Task.md5 = replace_md5
|
|---|
| 415 | Utils.h_file = replace_h_file
|
|---|
| 416 |
|
|---|
| 417 |
|
|---|
| 418 | def LOAD_ENVIRONMENT():
|
|---|
| 419 | '''load the configuration environment, allowing access to env vars
|
|---|
| 420 | from new commands'''
|
|---|
| 421 | import Environment
|
|---|
| 422 | env = Environment.Environment()
|
|---|
| 423 | try:
|
|---|
| 424 | env.load('.lock-wscript')
|
|---|
| 425 | env.load(env.blddir + '/c4che/default.cache.py')
|
|---|
| 426 | except:
|
|---|
| 427 | pass
|
|---|
| 428 | return env
|
|---|
| 429 |
|
|---|
| 430 |
|
|---|
| 431 | def IS_NEWER(bld, file1, file2):
|
|---|
| 432 | '''return True if file1 is newer than file2'''
|
|---|
| 433 | t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime
|
|---|
| 434 | t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime
|
|---|
| 435 | return t1 > t2
|
|---|
| 436 | Build.BuildContext.IS_NEWER = IS_NEWER
|
|---|
| 437 |
|
|---|
| 438 |
|
|---|
| 439 | @conf
|
|---|
| 440 | def RECURSE(ctx, directory):
|
|---|
| 441 | '''recurse into a directory, relative to the curdir or top level'''
|
|---|
| 442 | try:
|
|---|
| 443 | visited_dirs = ctx.visited_dirs
|
|---|
| 444 | except:
|
|---|
| 445 | visited_dirs = ctx.visited_dirs = set()
|
|---|
| 446 | d = os.path.join(ctx.curdir, directory)
|
|---|
| 447 | if os.path.exists(d):
|
|---|
| 448 | abspath = os.path.abspath(d)
|
|---|
| 449 | else:
|
|---|
| 450 | abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory))
|
|---|
| 451 | ctxclass = ctx.__class__.__name__
|
|---|
| 452 | key = ctxclass + ':' + abspath
|
|---|
| 453 | if key in visited_dirs:
|
|---|
| 454 | # already done it
|
|---|
| 455 | return
|
|---|
| 456 | visited_dirs.add(key)
|
|---|
| 457 | relpath = os_path_relpath(abspath, ctx.curdir)
|
|---|
| 458 | if ctxclass == 'Handler':
|
|---|
| 459 | return ctx.sub_options(relpath)
|
|---|
| 460 | if ctxclass == 'ConfigurationContext':
|
|---|
| 461 | return ctx.sub_config(relpath)
|
|---|
| 462 | if ctxclass == 'BuildContext':
|
|---|
| 463 | return ctx.add_subdirs(relpath)
|
|---|
| 464 | Logs.error('Unknown RECURSE context class', ctxclass)
|
|---|
| 465 | raise
|
|---|
| 466 | Options.Handler.RECURSE = RECURSE
|
|---|
| 467 | Build.BuildContext.RECURSE = RECURSE
|
|---|
| 468 |
|
|---|
| 469 |
|
|---|
| 470 | def CHECK_MAKEFLAGS(bld):
|
|---|
| 471 | '''check for MAKEFLAGS environment variable in case we are being
|
|---|
| 472 | called from a Makefile try to honor a few make command line flags'''
|
|---|
| 473 | if not 'WAF_MAKE' in os.environ:
|
|---|
| 474 | return
|
|---|
| 475 | makeflags = os.environ.get('MAKEFLAGS')
|
|---|
| 476 | if makeflags is None:
|
|---|
| 477 | return
|
|---|
| 478 | jobs_set = False
|
|---|
| 479 | # we need to use shlex.split to cope with the escaping of spaces
|
|---|
| 480 | # in makeflags
|
|---|
| 481 | for opt in shlex.split(makeflags):
|
|---|
| 482 | # options can come either as -x or as x
|
|---|
| 483 | if opt[0:2] == 'V=':
|
|---|
| 484 | Options.options.verbose = Logs.verbose = int(opt[2:])
|
|---|
| 485 | if Logs.verbose > 0:
|
|---|
| 486 | Logs.zones = ['runner']
|
|---|
| 487 | if Logs.verbose > 2:
|
|---|
| 488 | Logs.zones = ['*']
|
|---|
| 489 | elif opt[0].isupper() and opt.find('=') != -1:
|
|---|
| 490 | loc = opt.find('=')
|
|---|
| 491 | setattr(Options.options, opt[0:loc], opt[loc+1:])
|
|---|
| 492 | elif opt[0] != '-':
|
|---|
| 493 | for v in opt:
|
|---|
| 494 | if v == 'j':
|
|---|
| 495 | jobs_set = True
|
|---|
| 496 | elif v == 'k':
|
|---|
| 497 | Options.options.keep = True
|
|---|
| 498 | elif opt == '-j':
|
|---|
| 499 | jobs_set = True
|
|---|
| 500 | elif opt == '-k':
|
|---|
| 501 | Options.options.keep = True
|
|---|
| 502 | if not jobs_set:
|
|---|
| 503 | # default to one job
|
|---|
| 504 | Options.options.jobs = 1
|
|---|
| 505 |
|
|---|
| 506 | Build.BuildContext.CHECK_MAKEFLAGS = CHECK_MAKEFLAGS
|
|---|
| 507 |
|
|---|
| 508 | option_groups = {}
|
|---|
| 509 |
|
|---|
| 510 | def option_group(opt, name):
|
|---|
| 511 | '''find or create an option group'''
|
|---|
| 512 | global option_groups
|
|---|
| 513 | if name in option_groups:
|
|---|
| 514 | return option_groups[name]
|
|---|
| 515 | gr = opt.add_option_group(name)
|
|---|
| 516 | option_groups[name] = gr
|
|---|
| 517 | return gr
|
|---|
| 518 | Options.Handler.option_group = option_group
|
|---|
| 519 |
|
|---|
| 520 |
|
|---|
| 521 | def save_file(filename, contents, create_dir=False):
|
|---|
| 522 | '''save data to a file'''
|
|---|
| 523 | if create_dir:
|
|---|
| 524 | mkdir_p(os.path.dirname(filename))
|
|---|
| 525 | try:
|
|---|
| 526 | f = open(filename, 'w')
|
|---|
| 527 | f.write(contents)
|
|---|
| 528 | f.close()
|
|---|
| 529 | except:
|
|---|
| 530 | return False
|
|---|
| 531 | return True
|
|---|
| 532 |
|
|---|
| 533 |
|
|---|
| 534 | def load_file(filename):
|
|---|
| 535 | '''return contents of a file'''
|
|---|
| 536 | try:
|
|---|
| 537 | f = open(filename, 'r')
|
|---|
| 538 | r = f.read()
|
|---|
| 539 | f.close()
|
|---|
| 540 | except:
|
|---|
| 541 | return None
|
|---|
| 542 | return r
|
|---|
| 543 |
|
|---|
| 544 |
|
|---|
| 545 | def reconfigure(ctx):
|
|---|
| 546 | '''rerun configure if necessary'''
|
|---|
| 547 | import Configure, samba_wildcard, Scripting
|
|---|
| 548 | if not os.path.exists(".lock-wscript"):
|
|---|
| 549 | raise Utils.WafError('configure has not been run')
|
|---|
| 550 | bld = samba_wildcard.fake_build_environment()
|
|---|
| 551 | Configure.autoconfig = True
|
|---|
| 552 | Scripting.check_configured(bld)
|
|---|
| 553 |
|
|---|
| 554 |
|
|---|
| 555 | def map_shlib_extension(ctx, name, python=False):
|
|---|
| 556 | '''map a filename with a shared library extension of .so to the real shlib name'''
|
|---|
| 557 | if name is None:
|
|---|
| 558 | return None
|
|---|
| 559 | if name[-1:].isdigit():
|
|---|
| 560 | # some libraries have specified versions in the wscript rule
|
|---|
| 561 | return name
|
|---|
| 562 | (root1, ext1) = os.path.splitext(name)
|
|---|
| 563 | if python:
|
|---|
| 564 | (root2, ext2) = os.path.splitext(ctx.env.pyext_PATTERN)
|
|---|
| 565 | else:
|
|---|
| 566 | (root2, ext2) = os.path.splitext(ctx.env.shlib_PATTERN)
|
|---|
| 567 | return root1+ext2
|
|---|
| 568 | Build.BuildContext.map_shlib_extension = map_shlib_extension
|
|---|
| 569 |
|
|---|
| 570 | def apply_pattern(filename, pattern):
|
|---|
| 571 | '''apply a filename pattern to a filename that may have a directory component'''
|
|---|
| 572 | dirname = os.path.dirname(filename)
|
|---|
| 573 | if not dirname:
|
|---|
| 574 | return pattern % filename
|
|---|
| 575 | basename = os.path.basename(filename)
|
|---|
| 576 | return os.path.join(dirname, pattern % basename)
|
|---|
| 577 |
|
|---|
| 578 | def make_libname(ctx, name, nolibprefix=False, version=None, python=False):
|
|---|
| 579 | """make a library filename
|
|---|
| 580 | Options:
|
|---|
| 581 | nolibprefix: don't include the lib prefix
|
|---|
| 582 | version : add a version number
|
|---|
| 583 | python : if we should use python module name conventions"""
|
|---|
| 584 |
|
|---|
| 585 | if python:
|
|---|
| 586 | libname = apply_pattern(name, ctx.env.pyext_PATTERN)
|
|---|
| 587 | else:
|
|---|
| 588 | libname = apply_pattern(name, ctx.env.shlib_PATTERN)
|
|---|
| 589 | if nolibprefix and libname[0:3] == 'lib':
|
|---|
| 590 | libname = libname[3:]
|
|---|
| 591 | if version:
|
|---|
| 592 | if version[0] == '.':
|
|---|
| 593 | version = version[1:]
|
|---|
| 594 | (root, ext) = os.path.splitext(libname)
|
|---|
| 595 | if ext == ".dylib":
|
|---|
| 596 | # special case - version goes before the prefix
|
|---|
| 597 | libname = "%s.%s%s" % (root, version, ext)
|
|---|
| 598 | else:
|
|---|
| 599 | libname = "%s%s.%s" % (root, ext, version)
|
|---|
| 600 | return libname
|
|---|
| 601 | Build.BuildContext.make_libname = make_libname
|
|---|
| 602 |
|
|---|
| 603 |
|
|---|
| 604 | def get_tgt_list(bld):
|
|---|
| 605 | '''return a list of build objects for samba'''
|
|---|
| 606 |
|
|---|
| 607 | targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
|
|---|
| 608 |
|
|---|
| 609 | # build a list of task generators we are interested in
|
|---|
| 610 | tgt_list = []
|
|---|
| 611 | for tgt in targets:
|
|---|
| 612 | type = targets[tgt]
|
|---|
| 613 | if not type in ['SUBSYSTEM', 'MODULE', 'BINARY', 'LIBRARY', 'ASN1', 'PYTHON']:
|
|---|
| 614 | continue
|
|---|
| 615 | t = bld.name_to_obj(tgt, bld.env)
|
|---|
| 616 | if t is None:
|
|---|
| 617 | Logs.error("Target %s of type %s has no task generator" % (tgt, type))
|
|---|
| 618 | sys.exit(1)
|
|---|
| 619 | tgt_list.append(t)
|
|---|
| 620 | return tgt_list
|
|---|