瀏覽代碼

sdk: waf: update to 2.1.4

Signed-off-by: Gerard Marull-Paretas <gerard@teslabs.com>
Gerard Marull-Paretas 6 月之前
父節點
當前提交
38c25a6acf
共有 100 個文件被更改,包括 13088 次插入5478 次删除
  1. 3 0
      .gitignore
  2. 2 2
      sdk/update-waf.sh
  3. 25 16
      sdk/waf/waf-light
  4. 513 300
      sdk/waf/waflib/Build.py
  5. 56 37
      sdk/waf/waflib/ConfigSet.py
  6. 303 221
      sdk/waf/waflib/Configure.py
  7. 306 169
      sdk/waf/waflib/Context.py
  8. 9 11
      sdk/waf/waflib/Errors.py
  9. 199 124
      sdk/waf/waflib/Logs.py
  10. 413 280
      sdk/waf/waflib/Node.py
  11. 194 103
      sdk/waf/waflib/Options.py
  12. 433 185
      sdk/waf/waflib/Runner.py
  13. 253 195
      sdk/waf/waflib/Scripting.py
  14. 424 309
      sdk/waf/waflib/Task.py
  15. 257 161
      sdk/waf/waflib/TaskGen.py
  16. 1 1
      sdk/waf/waflib/Tools/__init__.py
  17. 5 3
      sdk/waf/waflib/Tools/ar.py
  18. 44 12
      sdk/waf/waflib/Tools/asm.py
  19. 6 6
      sdk/waf/waflib/Tools/bison.py
  20. 11 9
      sdk/waf/waflib/Tools/c.py
  21. 42 24
      sdk/waf/waflib/Tools/c_aliases.py
  22. 313 363
      sdk/waf/waflib/Tools/c_config.py
  23. 42 37
      sdk/waf/waflib/Tools/c_osx.py
  24. 280 218
      sdk/waf/waflib/Tools/c_preproc.py
  25. 27 16
      sdk/waf/waflib/Tools/c_tests.py
  26. 239 87
      sdk/waf/waflib/Tools/ccroot.py
  27. 29 0
      sdk/waf/waflib/Tools/clang.py
  28. 30 0
      sdk/waf/waflib/Tools/clangxx.py
  29. 46 33
      sdk/waf/waflib/Tools/compiler_c.py
  30. 43 33
      sdk/waf/waflib/Tools/compiler_cxx.py
  31. 41 15
      sdk/waf/waflib/Tools/compiler_d.py
  32. 34 27
      sdk/waf/waflib/Tools/compiler_fc.py
  33. 42 53
      sdk/waf/waflib/Tools/cs.py
  34. 9 9
      sdk/waf/waflib/Tools/cxx.py
  35. 2 2
      sdk/waf/waflib/Tools/d.py
  36. 13 12
      sdk/waf/waflib/Tools/d_config.py
  37. 16 14
      sdk/waf/waflib/Tools/d_scan.py
  38. 7 7
      sdk/waf/waflib/Tools/dbus.py
  39. 24 32
      sdk/waf/waflib/Tools/dmd.py
  40. 56 39
      sdk/waf/waflib/Tools/errcheck.py
  41. 55 51
      sdk/waf/waflib/Tools/fc.py
  42. 161 141
      sdk/waf/waflib/Tools/fc_config.py
  43. 18 19
      sdk/waf/waflib/Tools/fc_scan.py
  44. 18 8
      sdk/waf/waflib/Tools/flex.py
  45. 6 7
      sdk/waf/waflib/Tools/g95.py
  46. 2 1
      sdk/waf/waflib/Tools/gas.py
  47. 63 59
      sdk/waf/waflib/Tools/gcc.py
  48. 17 22
      sdk/waf/waflib/Tools/gdc.py
  49. 12 10
      sdk/waf/waflib/Tools/gfortran.py
  50. 182 74
      sdk/waf/waflib/Tools/glib2.py
  51. 53 48
      sdk/waf/waflib/Tools/gnu_dirs.py
  52. 64 59
      sdk/waf/waflib/Tools/gxx.py
  53. 6 19
      sdk/waf/waflib/Tools/icc.py
  54. 5 17
      sdk/waf/waflib/Tools/icpc.py
  55. 387 26
      sdk/waf/waflib/Tools/ifort.py
  56. 107 54
      sdk/waf/waflib/Tools/intltool.py
  57. 22 31
      sdk/waf/waflib/Tools/irixcc.py
  58. 235 113
      sdk/waf/waflib/Tools/javaw.py
  59. 19 22
      sdk/waf/waflib/Tools/ldc2.py
  60. 3 3
      sdk/waf/waflib/Tools/lua.py
  61. 41 0
      sdk/waf/waflib/Tools/md5_tstamp.py
  62. 363 330
      sdk/waf/waflib/Tools/msvc.py
  63. 9 2
      sdk/waf/waflib/Tools/nasm.py
  64. 24 0
      sdk/waf/waflib/Tools/nobuild.py
  65. 33 34
      sdk/waf/waflib/Tools/perl.py
  66. 368 250
      sdk/waf/waflib/Tools/python.py
  67. 894 0
      sdk/waf/waflib/Tools/qt5.py
  68. 29 36
      sdk/waf/waflib/Tools/ruby.py
  69. 29 39
      sdk/waf/waflib/Tools/suncc.py
  70. 29 40
      sdk/waf/waflib/Tools/suncxx.py
  71. 255 127
      sdk/waf/waflib/Tools/tex.py
  72. 104 81
      sdk/waf/waflib/Tools/vala.py
  73. 220 90
      sdk/waf/waflib/Tools/waf_unit_test.py
  74. 688 0
      sdk/waf/waflib/Tools/wafcache.py
  75. 74 61
      sdk/waf/waflib/Tools/winres.py
  76. 26 30
      sdk/waf/waflib/Tools/xlc.py
  77. 26 30
      sdk/waf/waflib/Tools/xlcxx.py
  78. 436 298
      sdk/waf/waflib/Utils.py
  79. 1 1
      sdk/waf/waflib/__init__.py
  80. 177 80
      sdk/waf/waflib/ansiterm.py
  81. 3 0
      sdk/waf/waflib/extras/__init__.py
  82. 173 0
      sdk/waf/waflib/extras/batched_cc.py
  83. 58 0
      sdk/waf/waflib/extras/biber.py
  84. 128 0
      sdk/waf/waflib/extras/bjam.py
  85. 108 0
      sdk/waf/waflib/extras/blender.py
  86. 526 0
      sdk/waf/waflib/extras/boost.py
  87. 24 0
      sdk/waf/waflib/extras/build_file_timestamp.py
  88. 110 0
      sdk/waf/waflib/extras/build_logs.py
  89. 85 0
      sdk/waf/waflib/extras/buildcopy.py
  90. 32 0
      sdk/waf/waflib/extras/c_bgxlc.py
  91. 72 0
      sdk/waf/waflib/extras/c_dumbpreproc.py
  92. 82 0
      sdk/waf/waflib/extras/c_emscripten.py
  93. 77 0
      sdk/waf/waflib/extras/c_nec.py
  94. 110 0
      sdk/waf/waflib/extras/cfg_altoptions.py
  95. 124 0
      sdk/waf/waflib/extras/clang_cl.py
  96. 137 0
      sdk/waf/waflib/extras/clang_compilation_database.py
  97. 92 0
      sdk/waf/waflib/extras/clang_cross.py
  98. 113 0
      sdk/waf/waflib/extras/clang_cross_common.py
  99. 106 0
      sdk/waf/waflib/extras/clangxx_cross.py
  100. 875 0
      sdk/waf/waflib/extras/codelite.py

+ 3 - 0
.gitignore

@@ -57,3 +57,6 @@ analyze_mcu_flash_usage_treemap.jsonp
 .vscode
 .venv
 *.egg-info
+
+waflib.zip
+*~

+ 2 - 2
sdk/update-waf.sh

@@ -19,8 +19,8 @@
 
 set -x
 
-VERSION=1.7.11
-DOWNLOAD="http://waf.googlecode.com/files/waf-$VERSION.tar.bz2"
+VERSION=2.1.4
+DOWNLOAD="https://waf.io/waf-$VERSION.tar.bz2"
 
 TMPFILE=`mktemp -t waf-tar-bz`
 

+ 25 - 16
sdk/waf/waf-light

@@ -1,7 +1,7 @@
-#!/usr/bin/env python
-# encoding: ISO8859-1
-# Thomas Nagy, 2005-2012
-
+#!/usr/bin/env python3
+# encoding: latin-1
+# Thomas Nagy, 2005-2018
+#
 """
 Redistribution and use in source and binary forms, with or without
 modification, are permitted provided that the following conditions
@@ -30,13 +30,15 @@ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
 POSSIBILITY OF SUCH DAMAGE.
 """
 
-import os, sys
+import os, sys, inspect
 
-VERSION="1.7.11"
+VERSION="2.1.4"
 REVISION="x"
+GIT="x"
 INSTALL="x"
 C1='x'
 C2='x'
+C3='x'
 cwd = os.getcwd()
 join = os.path.join
 
@@ -55,8 +57,8 @@ def err(m):
 	print(('\033[91mError: %s\033[0m' % m))
 	sys.exit(1)
 
-def unpack_wafdir(dir):
-	f = open(sys.argv[0],'rb')
+def unpack_wafdir(dir, src):
+	f = open(src,'rb')
 	c = 'corrupt archive (%d)'
 	while 1:
 		line = f.readline()
@@ -67,13 +69,13 @@ def unpack_wafdir(dir):
 			if f.readline() != b('#<==\n'): err(c % 2)
 			break
 	if not txt: err(c % 3)
-	txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r'))
+	txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00'))
 
 	import shutil, tarfile
 	try: shutil.rmtree(dir)
 	except OSError: pass
 	try:
-		for x in ['Tools', 'extras']:
+		for x in ('Tools', 'extras'):
 			os.makedirs(join(dir, 'waflib', x))
 	except OSError:
 		err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir)
@@ -98,11 +100,15 @@ def unpack_wafdir(dir):
 			err("Waf cannot be unpacked, check that bzip2 support is present")
 
 	try:
-		for x in t: t.extract(x)
+		for x in t:
+			if hasattr(tarfile, 'data_filter'):
+				t.extract(x, filter='data')
+			else:
+				t.extract(x)
 	finally:
 		t.close()
 
-	for x in ['Tools', 'extras']:
+	for x in ('Tools', 'extras'):
 		os.chmod(join('waflib',x), 493)
 
 	if sys.hexversion<0x300000f:
@@ -129,8 +135,8 @@ def test(dir):
 		pass
 
 def find_lib():
-	name = sys.argv[0]
-	base = os.path.dirname(os.path.abspath(name))
+	src = os.path.abspath(inspect.getfile(inspect.getmodule(err)))
+	base, name = os.path.split(src)
 
 	#devs use $WAFDIR
 	w=test(os.environ.get('WAFDIR', ''))
@@ -140,10 +146,13 @@ def find_lib():
 	if name.endswith('waf-light'):
 		w = test(base)
 		if w: return w
+		for dir in sys.path:
+			if test(dir):
+				return dir
 		err('waf-light requires waflib -> export WAFDIR=/folder')
 
 	dirname = '%s-%s-%s' % (WAF, VERSION, REVISION)
-	for i in [INSTALL,'/usr','/usr/local','/opt']:
+	for i in (INSTALL,'/usr','/usr/local','/opt'):
 		w = test(i + '/lib/' + dirname)
 		if w: return w
 
@@ -153,7 +162,7 @@ def find_lib():
 	if w: return w
 
 	#unpack
-	unpack_wafdir(dir)
+	unpack_wafdir(dir, src)
 	return dir
 
 wafdir = find_lib()

File diff suppressed because it is too large
+ 513 - 300
sdk/waf/waflib/Build.py


+ 56 - 37
sdk/waf/waflib/ConfigSet.py

@@ -1,21 +1,21 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2018 (ita)
 
 """
 
 ConfigSet: a special dict
 
-The values put in :py:class:`ConfigSet` must be lists
+The values put in :py:class:`ConfigSet` must be serializable (dicts, lists, strings)
 """
 
 import copy, re, os
 from waflib import Logs, Utils
-re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
+re_imp = re.compile(r'^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
 
 class ConfigSet(object):
 	"""
-	A dict that honor serialization and parent relationships. The serialization format
+	A copy-on-write dict with human-readable serialized format. The serialization format
 	is human-readable (python-like) and performed by using eval() and repr().
 	For high performance prefer pickle. Do not store functions as they are not serializable.
 
@@ -39,17 +39,20 @@ class ConfigSet(object):
 
 	def __contains__(self, key):
 		"""
-		Enable the *in* syntax::
+		Enables the *in* syntax::
 
 			if 'foo' in env:
 				print(env['foo'])
 		"""
-		if key in self.table: return True
-		try: return self.parent.__contains__(key)
-		except AttributeError: return False # parent may not exist
+		if key in self.table:
+			return True
+		try:
+			return self.parent.__contains__(key)
+		except AttributeError:
+			return False # parent may not exist
 
 	def keys(self):
-		"""Dict interface (unknown purpose)"""
+		"""Dict interface"""
 		keys = set()
 		cur = self
 		while cur:
@@ -59,6 +62,9 @@ class ConfigSet(object):
 		keys.sort()
 		return keys
 
+	def __iter__(self):
+		return iter(self.keys())
+
 	def __str__(self):
 		"""Text representation of the ConfigSet (for debugging purposes)"""
 		return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
@@ -73,7 +79,7 @@ class ConfigSet(object):
 		"""
 		try:
 			while 1:
-				x = self.table.get(key, None)
+				x = self.table.get(key)
 				if not x is None:
 					return x
 				self = self.parent
@@ -82,13 +88,13 @@ class ConfigSet(object):
 
 	def __setitem__(self, key, value):
 		"""
-		Dictionary interface: get value from key
+		Dictionary interface: set value from key
 		"""
 		self.table[key] = value
 
 	def __delitem__(self, key):
 		"""
-		Dictionary interface: get value from key
+		Dictionary interface: mark the value as missing
 		"""
 		self[key] = []
 
@@ -101,7 +107,7 @@ class ConfigSet(object):
 				conf.env['value']
 		"""
 		if name in self.__slots__:
-			return object.__getattr__(self, name)
+			return object.__getattribute__(self, name)
 		else:
 			return self[name]
 
@@ -152,7 +158,7 @@ class ConfigSet(object):
 
 	def detach(self):
 		"""
-		Detach self from its parent (if existing)
+		Detaches this instance from its parent (if present)
 
 		Modifying the parent :py:class:`ConfigSet` will not change the current object
 		Modifying this :py:class:`ConfigSet` will not modify the parent one.
@@ -167,21 +173,23 @@ class ConfigSet(object):
 			for x in keys:
 				tbl[x] = copy.deepcopy(tbl[x])
 			self.table = tbl
+		return self
 
 	def get_flat(self, key):
 		"""
-		Return a value as a string. If the input is a list, the value returned is space-separated.
+		Returns a value as a string. If the input is a list, the value returned is space-separated.
 
 		:param key: key to use
 		:type key: string
 		"""
 		s = self[key]
-		if isinstance(s, str): return s
+		if isinstance(s, str):
+			return s
 		return ' '.join(s)
 
 	def _get_list_value_for_modification(self, key):
 		"""
-		Return a list value for further modification.
+		Returns a list value for further modification.
 
 		The list may be modified inplace and there is no need to do this afterwards::
 
@@ -190,16 +198,20 @@ class ConfigSet(object):
 		try:
 			value = self.table[key]
 		except KeyError:
-			try: value = self.parent[key]
-			except AttributeError: value = []
-			if isinstance(value, list):
-				value = value[:]
+			try:
+				value = self.parent[key]
+			except AttributeError:
+				value = []
 			else:
-				value = [value]
+				if isinstance(value, list):
+					# force a copy
+					value = value[:]
+				else:
+					value = [value]
+			self.table[key] = value
 		else:
 			if not isinstance(value, list):
-				value = [value]
-		self.table[key] = value
+				self.table[key] = value = [value]
 		return value
 
 	def append_value(self, var, val):
@@ -211,9 +223,9 @@ class ConfigSet(object):
 
 		The value must be a list or a tuple
 		"""
-		current_value = self._get_list_value_for_modification(var)
 		if isinstance(val, str): # if there were string everywhere we could optimize this
 			val = [val]
+		current_value = self._get_list_value_for_modification(var)
 		current_value.extend(val)
 
 	def prepend_value(self, var, val):
@@ -231,7 +243,7 @@ class ConfigSet(object):
 
 	def append_unique(self, var, val):
 		"""
-		Append a value to the specified item only if it's not already present::
+		Appends a value to the specified item only if it's not already present::
 
 			def build(bld):
 				bld.env.append_unique('CFLAGS', ['-O2', '-g'])
@@ -248,7 +260,7 @@ class ConfigSet(object):
 
 	def get_merged_dict(self):
 		"""
-		Compute the merged dictionary from the fusion of self and all its parent
+		Computes the merged dictionary from the fusion of self and all its parent
 
 		:rtype: a ConfigSet object
 		"""
@@ -256,8 +268,10 @@ class ConfigSet(object):
 		env = self
 		while 1:
 			table_list.insert(0, env.table)
-			try: env = env.parent
-			except AttributeError: break
+			try:
+				env = env.parent
+			except AttributeError:
+				break
 		merged_table = {}
 		for table in table_list:
 			merged_table.update(table)
@@ -265,7 +279,7 @@ class ConfigSet(object):
 
 	def store(self, filename):
 		"""
-		Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files.
+		Serializes the :py:class:`ConfigSet` data to a file. See :py:meth:`ConfigSet.load` for reading such files.
 
 		:param filename: file to use
 		:type filename: string
@@ -292,31 +306,30 @@ class ConfigSet(object):
 
 	def load(self, filename):
 		"""
-		Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files
+		Restores contents from a file (current values are not cleared). Files are written using :py:meth:`ConfigSet.store`.
 
 		:param filename: file to use
 		:type filename: string
 		"""
 		tbl = self.table
-		code = Utils.readf(filename, m='rU')
+		code = Utils.readf(filename, m='r')
 		for m in re_imp.finditer(code):
 			g = m.group
 			tbl[g(2)] = eval(g(3))
-		Logs.debug('env: %s' % str(self.table))
+		Logs.debug('env: %s', self.table)
 
 	def update(self, d):
 		"""
-		Dictionary interface: replace values from another dict
+		Dictionary interface: replace values with the ones from another dict
 
 		:param d: object to use the value from
 		:type d: dict-like object
 		"""
-		for k, v in d.items():
-			self[k] = v
+		self.table.update(d)
 
 	def stash(self):
 		"""
-		Store the object state, to provide a kind of transaction support::
+		Stores the object state to provide transactionality semantics::
 
 			env = ConfigSet()
 			env.stash()
@@ -334,6 +347,12 @@ class ConfigSet(object):
 			tbl[x] = copy.deepcopy(tbl[x])
 		self.undo_stack = self.undo_stack + [orig]
 
+	def commit(self):
+		"""
+		Commits transactional changes. See :py:meth:`ConfigSet.stash`
+		"""
+		self.undo_stack.pop(-1)
+
 	def revert(self):
 		"""
 		Reverts the object to a previous state. See :py:meth:`ConfigSet.stash`

+ 303 - 221
sdk/waf/waflib/Configure.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2018 (ita)
 
 """
 Configuration system
@@ -12,22 +12,9 @@ A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``w
 * hold configuration routines such as ``find_program``, etc
 """
 
-import os, shlex, sys, time
+import os, re, shlex, shutil, sys, time, traceback
 from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors
 
-try:
-	from urllib import request
-except ImportError:
-	from urllib import urlopen
-else:
-	urlopen = request.urlopen
-
-BREAK    = 'break'
-"""In case of a configuration error, break"""
-
-CONTINUE = 'continue'
-"""In case of a configuration error, continue"""
-
 WAF_CONFIG_LOG = 'config.log'
 """Name of the configuration log file"""
 
@@ -39,59 +26,11 @@ conf_template = '''# project %(app)s configured on %(now)s by
 # using %(args)s
 #'''
 
-def download_check(node):
-	"""
-	Hook to check for the tools which are downloaded. Replace with your function if necessary.
-	"""
-	pass
-
-def download_tool(tool, force=False, ctx=None):
-	"""
-	Download a Waf tool from the remote repository defined in :py:const:`waflib.Context.remote_repo`::
-
-		$ waf configure --download
-	"""
-	for x in Utils.to_list(Context.remote_repo):
-		for sub in Utils.to_list(Context.remote_locs):
-			url = '/'.join((x, sub, tool + '.py'))
-			try:
-				web = urlopen(url)
-				try:
-					if web.getcode() != 200:
-						continue
-				except AttributeError:
-					pass
-			except Exception:
-				# on python3 urlopen throws an exception
-				# python 2.3 does not have getcode and throws an exception to fail
-				continue
-			else:
-				tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
-				tmp.write(web.read(), 'wb')
-				Logs.warn('Downloaded %s from %s' % (tool, url))
-				download_check(tmp)
-				try:
-					module = Context.load_tool(tool)
-				except Exception:
-					Logs.warn('The tool %s from %s is unusable' % (tool, url))
-					try:
-						tmp.delete()
-					except Exception:
-						pass
-					continue
-				return module
-	raise Errors.WafError('Could not load the Waf tool')
-
 class ConfigurationContext(Context.Context):
 	'''configures the project'''
 
 	cmd = 'configure'
 
-	error_handlers = []
-	"""
-	Additional functions to handle configuration errors
-	"""
-
 	def __init__(self, **kw):
 		super(ConfigurationContext, self).__init__(**kw)
 		self.environ = dict(os.environ)
@@ -156,7 +95,7 @@ class ConfigurationContext(Context.Context):
 
 		top = self.top_dir
 		if not top:
-			top = Options.options.top
+			top = getattr(Options.options, 'top', None)
 		if not top:
 			top = getattr(Context.g_module, Context.TOP, None)
 		if not top:
@@ -168,17 +107,20 @@ class ConfigurationContext(Context.Context):
 
 		out = self.out_dir
 		if not out:
-			out = Options.options.out
+			out = getattr(Options.options, 'out', None)
 		if not out:
 			out = getattr(Context.g_module, Context.OUT, None)
 		if not out:
 			out = Options.lockfile.replace('.lock-waf_%s_' % sys.platform, '').replace('.lock-waf', '')
 
+		# someone can be messing with symlinks
+		out = os.path.realpath(out)
+
 		self.bldnode = (os.path.isabs(out) and self.root or self.path).make_node(out)
 		self.bldnode.mkdir()
 
 		if not os.path.isdir(self.bldnode.abspath()):
-			conf.fatal('Could not create the build directory %s' % self.bldnode.abspath())
+			self.fatal('Could not create the build directory %s' % self.bldnode.abspath())
 
 	def execute(self):
 		"""
@@ -187,6 +129,8 @@ class ConfigurationContext(Context.Context):
 		self.init_dirs()
 
 		self.cachedir = self.bldnode.make_node(Build.CACHE_DIR)
+		if os.path.exists(self.cachedir.abspath()):
+			shutil.rmtree(self.cachedir.abspath())
 		self.cachedir.mkdir()
 
 		path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG)
@@ -198,19 +142,13 @@ class ConfigurationContext(Context.Context):
 			if ver:
 				app = "%s (%s)" % (app, ver)
 
-		now = time.ctime()
-		pyver = sys.hexversion
-		systype = sys.platform
-		args = " ".join(sys.argv)
-		wafver = Context.WAFVERSION
-		abi = Context.ABI
-		self.to_log(conf_template % vars())
-
+		params = {'now': time.ctime(), 'pyver': sys.hexversion, 'systype': sys.platform, 'args': " ".join(sys.argv), 'wafver': Context.WAFVERSION, 'abi': Context.ABI, 'app': app}
+		self.to_log(conf_template % params)
 		self.msg('Setting top to', self.srcnode.abspath())
 		self.msg('Setting out to', self.bldnode.abspath())
 
 		if id(self.srcnode) == id(self.bldnode):
-			Logs.warn('Setting top == out (remember to use "update_outputs")')
+			Logs.warn('Setting top == out')
 		elif id(self.path) != id(self.srcnode):
 			if self.srcnode.is_child_of(self.path):
 				Logs.warn('Are you certain that you do not want to set top="." ?')
@@ -226,8 +164,9 @@ class ConfigurationContext(Context.Context):
 		# consider the current path as the root directory (see prepare_impl).
 		# to remove: use 'waf distclean'
 		env = ConfigSet.ConfigSet()
-		env['argv'] = sys.argv
-		env['options'] = Options.options.__dict__
+		env.argv = sys.argv
+		env.options = Options.options.__dict__
+		env.config_cmd = self.cmd
 
 		env.run_dir = Context.run_dir
 		env.top_dir = Context.top_dir
@@ -235,16 +174,17 @@ class ConfigurationContext(Context.Context):
 
 		# conf.hash & conf.files hold wscript files paths and hash
 		# (used only by Configure.autoconfig)
-		env['hash'] = self.hash
-		env['files'] = self.files
-		env['environ'] = dict(self.environ)
-
-		if not self.env.NO_LOCK_IN_RUN:
-			env.store(Context.run_dir + os.sep + Options.lockfile)
-		if not self.env.NO_LOCK_IN_TOP:
-			env.store(Context.top_dir + os.sep + Options.lockfile)
-		if not self.env.NO_LOCK_IN_OUT:
-			env.store(Context.out_dir + os.sep + Options.lockfile)
+		env.hash = self.hash
+		env.files = self.files
+		env.environ = dict(self.environ)
+		env.launch_dir = Context.launch_dir
+
+		if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN') or getattr(Options.options, 'no_lock_in_run')):
+			env.store(os.path.join(Context.run_dir, Options.lockfile))
+		if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP') or getattr(Options.options, 'no_lock_in_top')):
+			env.store(os.path.join(Context.top_dir, Options.lockfile))
+		if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT') or getattr(Options.options, 'no_lock_in_out')):
+			env.store(os.path.join(Context.out_dir, Options.lockfile))
 
 	def prepare_env(self, env):
 		"""
@@ -254,14 +194,20 @@ class ConfigurationContext(Context.Context):
 		:param env: a ConfigSet, usually ``conf.env``
 		"""
 		if not env.PREFIX:
-			if Options.options.prefix or Utils.is_win32:
-				env.PREFIX = os.path.abspath(os.path.expanduser(Options.options.prefix))
+			if getattr(Options.options, 'prefix', None):
+				env.PREFIX = Options.options.prefix
 			else:
-				env.PREFIX = ''
+				env.PREFIX = '/'
 		if not env.BINDIR:
-			env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
+			if getattr(Options.options, 'bindir', None):
+				env.BINDIR = Options.options.bindir
+			else:
+				env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
 		if not env.LIBDIR:
-			env.LIBDIR = Utils.subst_vars('${PREFIX}/lib', env)
+			if getattr(Options.options, 'libdir', None):
+				env.LIBDIR = Options.options.libdir
+			else:
+				env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env)
 
 	def store(self):
 		"""Save the config results into the cache file"""
@@ -275,45 +221,43 @@ class ConfigurationContext(Context.Context):
 			tmpenv = self.all_envs[key]
 			tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))
 
-	def load(self, input, tooldir=None, funs=None, download=True):
+	def load(self, tool_list, **kw):
 		"""
 		Load Waf tools, which will be imported whenever a build is started.
 
-		:param input: waf tools to import
-		:type input: list of string
+		:param tool_list: waf tools to import
+		:type tool_list: list of string
 		:param tooldir: paths for the imports
 		:type tooldir: list of string
 		:param funs: functions to execute from the waf tools
 		:type funs: list of string
-		:param download: whether to download the tool from the waf repository
-		:type download: bool
+		:param cache: whether to prevent the tool from running twice (false by default)
+		:type cache: bool
 		"""
 
-		tools = Utils.to_list(input)
-		if tooldir: tooldir = Utils.to_list(tooldir)
+		tools = Utils.to_list(tool_list)
+		tooldir = Utils.to_list(kw.get('tooldir', ''))
+		with_sys_path = kw.get('with_sys_path', True)
+		funs = kw.get('funs')
 		for tool in tools:
 			# avoid loading the same tool more than once with the same functions
 			# used by composite projects
 
-			mag = (tool, id(self.env), funs)
-			if mag in self.tool_cache:
-				self.to_log('(tool %s is already loaded, skipping)' % tool)
-				continue
-			self.tool_cache.append(mag)
+			if kw.get('cache'):
+				mag = (tool, id(self.env), tuple(tooldir), funs)
+				if mag in self.tool_cache:
+					self.to_log('(tool %s is already loaded, skipping)' % tool)
+					continue
+				self.tool_cache.append(mag)
 
 			module = None
 			try:
-				module = Context.load_tool(tool, tooldir)
+				module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
 			except ImportError as e:
-				if Options.options.download:
-					module = download_tool(tool, ctx=self)
-					if not module:
-						self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
-				else:
-					self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e))
+				self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, getattr(e, 'waf_sys_path', sys.path), e))
 			except Exception as e:
 				self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
-				self.to_log(Utils.ex_stack())
+				self.to_log(traceback.format_exc())
 				raise
 
 			if funs is not None:
@@ -321,8 +265,10 @@ class ConfigurationContext(Context.Context):
 			else:
 				func = getattr(module, 'configure', None)
 				if func:
-					if type(func) is type(Utils.readf): func(self)
-					else: self.eval_rules(func)
+					if type(func) is type(Utils.readf):
+						func(self)
+					else:
+						self.eval_rules(func)
 
 			self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
 
@@ -334,13 +280,12 @@ class ConfigurationContext(Context.Context):
 		:type node: :py:class:`waflib.Node.Node`
 		"""
 		super(ConfigurationContext, self).post_recurse(node)
-		self.hash = hash((self.hash, node.read('rb')))
+		self.hash = Utils.h_list((self.hash, node.read('rb')))
 		self.files.append(node.abspath())
 
 	def eval_rules(self, rules):
 		"""
-		Execute the configuration tests. The method :py:meth:`waflib.Configure.ConfigurationContext.err_handler`
-		is used to process the eventual exceptions
+		Execute configuration tests provided as list of functions to run
 
 		:param rules: list of configuration method names
 		:type rules: list of string
@@ -348,28 +293,9 @@ class ConfigurationContext(Context.Context):
 		self.rules = Utils.to_list(rules)
 		for x in self.rules:
 			f = getattr(self, x)
-			if not f: self.fatal("No such method '%s'." % x)
-			try:
-				f()
-			except Exception as e:
-				ret = self.err_handler(x, e)
-				if ret == BREAK:
-					break
-				elif ret == CONTINUE:
-					continue
-				else:
-					raise
-
-	def err_handler(self, fun, error):
-		"""
-		Error handler for the configuration tests, the default is to let the exception raise
-
-		:param fun: configuration test
-		:type fun: method
-		:param error: exception
-		:type error: exception
-		"""
-		pass
+			if not f:
+				self.fatal('No such configuration function %r' % x)
+			f()
 
 def conf(f):
 	"""
@@ -384,23 +310,20 @@ def conf(f):
 	:type f: function
 	"""
 	def fun(*k, **kw):
-		mandatory = True
-		if 'mandatory' in kw:
-			mandatory = kw['mandatory']
-			del kw['mandatory']
-
+		mandatory = kw.pop('mandatory', True)
 		try:
 			return f(*k, **kw)
 		except Errors.ConfigurationError:
 			if mandatory:
 				raise
 
+	fun.__name__ = f.__name__
 	setattr(ConfigurationContext, f.__name__, fun)
 	setattr(Build.BuildContext, f.__name__, fun)
 	return f
 
 @conf
-def add_os_flags(self, var, dest=None):
+def add_os_flags(self, var, dest=None, dup=False):
 	"""
 	Import operating system environment values into ``conf.env`` dict::
 
@@ -411,10 +334,15 @@ def add_os_flags(self, var, dest=None):
 	:type var: string
 	:param dest: destination variable, by default the same as var
 	:type dest: string
+	:param dup: add the same set of flags again
+	:type dup: bool
 	"""
-	# do not use 'get' to make certain the variable is not defined
-	try: self.env.append_value(dest or var, shlex.split(self.environ[var]))
-	except KeyError: pass
+	try:
+		flags = shlex.split(self.environ[var])
+	except KeyError:
+		return
+	if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])):
+		self.env.append_value(dest or var, flags)
 
 @conf
 def cmd_to_list(self, cmd):
@@ -424,35 +352,39 @@ def cmd_to_list(self, cmd):
 	:param cmd: command
 	:type cmd: a string or a list of string
 	"""
-	if isinstance(cmd, str) and cmd.find(' '):
-		try:
-			os.stat(cmd)
-		except OSError:
+	if isinstance(cmd, str):
+		if os.path.isfile(cmd):
+			# do not take any risk
+			return [cmd]
+		if os.sep == '/':
 			return shlex.split(cmd)
 		else:
-			return [cmd]
+			try:
+				return shlex.split(cmd, posix=False)
+			except TypeError:
+				# Python 2.5 on windows?
+				return shlex.split(cmd)
 	return cmd
 
 @conf
-def check_waf_version(self, mini='1.6.99', maxi='1.8.0'):
+def check_waf_version(self, mini='1.9.99', maxi='2.2.0', **kw):
 	"""
 	Raise a Configuration error if the Waf version does not strictly match the given bounds::
 
-		conf.check_waf_version(mini='1.7.0', maxi='1.8.0')
+		conf.check_waf_version(mini='1.9.99', maxi='2.2.0')
 
 	:type  mini: number, tuple or string
 	:param mini: Minimum required version
 	:type  maxi: number, tuple or string
 	:param maxi: Maximum allowed version
 	"""
-	self.start_msg('Checking for waf version in %s-%s' % (str(mini), str(maxi)))
+	self.start_msg('Checking for waf version in %s-%s' % (str(mini), str(maxi)), **kw)
 	ver = Context.HEXVERSION
 	if Utils.num2ver(mini) > ver:
 		self.fatal('waf version should be at least %r (%r found)' % (Utils.num2ver(mini), ver))
-
 	if Utils.num2ver(maxi) < ver:
 		self.fatal('waf version should be at most %r (%r found)' % (Utils.num2ver(maxi), ver))
-	self.end_msg('ok')
+	self.end_msg('ok', **kw)
 
 @conf
 def find_file(self, filename, path_list=[]):
@@ -461,11 +393,11 @@ def find_file(self, filename, path_list=[]):
 
 	:param filename: name of the file to search for
 	:param path_list: list of directories to search
-	:return: the first occurrence filename or '' if filename could not be found
+	:return: the first matching filename; else a configuration exception is raised
 	"""
 	for n in Utils.to_list(filename):
 		for d in Utils.to_list(path_list):
-			p = os.path.join(d, n)
+			p = os.path.expanduser(os.path.join(d, n))
 			if os.path.exists(p):
 				return p
 	self.fatal('Could not find %r' % filename)
@@ -477,96 +409,246 @@ def find_program(self, filename, **kw):
 
 	When var is used, you may set os.environ[var] to help find a specific program version, for example::
 
-		$ VALAC=/usr/bin/valac_test waf configure
+		$ CC='ccache gcc' waf configure
 
 	:param path_list: paths to use for searching
 	:type param_list: list of string
-	:param var: store the result to conf.env[var], by default use filename.upper()
+	:param var: store the result to conf.env[var] where var defaults to filename.upper() if not provided; the result is stored as a list of strings
 	:type var: string
-	:param ext: list of extensions for the binary (do not add an extension for portability)
-	:type ext: list of string
+	:param value: obtain the program from the value passed exclusively
+	:type value: list or string (list is preferred)
+	:param exts: list of extensions for the binary (do not add an extension for portability)
+	:type exts: list of string
+	:param msg: name to display in the log, by default filename is used
+	:type msg: string
+	:param interpreter: interpreter for the program
+	:type interpreter: ConfigSet variable key
+	:raises: :py:class:`waflib.Errors.ConfigurationError`
 	"""
 
 	exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')
 
-	environ = kw.get('environ', os.environ)
+	environ = kw.get('environ', getattr(self, 'environ', os.environ))
 
 	ret = ''
+
 	filename = Utils.to_list(filename)
+	msg = kw.get('msg', ', '.join(filename))
 
 	var = kw.get('var', '')
 	if not var:
-		var = filename[0].upper()
-
-	if self.env[var]:
-		ret = self.env[var]
-	elif var in environ:
-		ret = environ[var]
+		var = re.sub(r'\W', '_', filename[0].upper())
 
 	path_list = kw.get('path_list', '')
-	if not ret:
-		if path_list:
-			path_list = Utils.to_list(path_list)
+	if path_list:
+		path_list = Utils.to_list(path_list)
+	else:
+		path_list = environ.get('PATH', '').split(os.pathsep)
+
+	if kw.get('value'):
+		# user-provided in command-line options and passed to find_program
+		ret = self.cmd_to_list(kw['value'])
+	elif environ.get(var):
+		# user-provided in the os environment
+		ret = self.cmd_to_list(environ[var])
+	elif self.env[var]:
+		# a default option in the wscript file
+		ret = self.cmd_to_list(self.env[var])
+	else:
+		if not ret:
+			ret = self.find_binary(filename, exts.split(','), path_list)
+		if not ret and Utils.winreg:
+			ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename)
+		if not ret and Utils.winreg:
+			ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename)
+		ret = self.cmd_to_list(ret)
+
+	if ret:
+		if len(ret) == 1:
+			retmsg = ret[0]
 		else:
-			path_list = environ.get('PATH', '').split(os.pathsep)
-
-		if not isinstance(filename, list):
-			filename = [filename]
-
-		for a in exts.split(','):
-			if ret:
-				break
-			for b in filename:
-				if ret:
-					break
-				for c in path_list:
-					if ret:
-						break
-					x = os.path.expanduser(os.path.join(c, b + a))
-					if os.path.isfile(x):
-						ret = x
-
-	if not ret and Utils.winreg:
-		ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename)
-	if not ret and Utils.winreg:
-		ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename)
-
-	self.msg('Checking for program ' + ','.join(filename), ret or False)
-	self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))
+			retmsg = ret
+	else:
+		retmsg = False
+
+	self.msg('Checking for program %r' % msg, retmsg, **kw)
+	if not kw.get('quiet'):
+		self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))
 
 	if not ret:
-		self.fatal(kw.get('errmsg', '') or 'Could not find the program %s' % ','.join(filename))
+		self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename)
 
-	if var:
+	interpreter = kw.get('interpreter')
+	if interpreter is None:
+		if not Utils.check_exe(ret[0], env=environ):
+			self.fatal('Program %r is not executable' % ret)
 		self.env[var] = ret
+	else:
+		self.env[var] = self.env[interpreter] + ret
+
 	return ret
 
+@conf
+def find_binary(self, filenames, exts, paths):
+	for f in filenames:
+		for ext in exts:
+			exe_name = f + ext
+			if os.path.isabs(exe_name):
+				if os.path.isfile(exe_name) and os.access(exe_name, os.X_OK):
+					return exe_name
+			else:
+				for path in paths:
+					x = os.path.expanduser(os.path.join(path, exe_name))
+					if os.path.isfile(x) and os.access(x, os.X_OK):
+						return x
+	return None
 
 @conf
-def find_perl_program(self, filename, path_list=[], var=None, environ=None, exts=''):
+def run_build(self, *k, **kw):
 	"""
-	Search for a perl program on the operating system
+	Create a temporary build context to execute a build. A temporary reference to that build
+	context is kept on self.test_bld for debugging purposes.
+	The arguments to this function are passed to a single task generator for that build.
+	Only three parameters are mandatory:
+
+	:param features: features to pass to a task generator created in the build
+	:type features: list of string
+	:param compile_filename: file to create for the compilation (default: *test.c*)
+	:type compile_filename: string
+	:param code: input file contents
+	:type code: string
+
+	Though this function returns *0* by default, the build may bind attribute named *retval* on the
+	build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.
+
+	The temporary builds creates a temporary folder; the name of that folder is calculated
+	by hashing input arguments to this function, with the exception of :py:class:`waflib.ConfigSet.ConfigSet`
+	objects which are used for both reading and writing values.
+
+	This function also features a cache which is disabled by default; that cache relies
+	on the hash value calculated as indicated above::
+
+		def options(opt):
+			opt.add_option('--confcache', dest='confcache', default=0,
+				action='count', help='Use a configuration cache')
+
+	And execute the configuration with the following command-line::
+
+		$ waf configure --confcache
 
-	:param filename: file to search for
-	:type filename: string
-	:param path_list: list of paths to look into
-	:type path_list: list of string
-	:param var: store the results into *conf.env.var*
-	:type var: string
-	:param environ: operating system environment to pass to :py:func:`waflib.Configure.find_program`
-	:type environ: dict
-	:param exts: extensions given to :py:func:`waflib.Configure.find_program`
-	:type exts: list
 	"""
+	buf = []
+	for key in sorted(kw.keys()):
+		v = kw[key]
+		if isinstance(v, ConfigSet.ConfigSet):
+			# values are being written to, so they are excluded from contributing to the hash
+			continue
+		elif hasattr(v, '__call__'):
+			buf.append(Utils.h_fun(v))
+		else:
+			buf.append(str(v))
+	h = Utils.h_list(buf)
+	dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)
+
+	cachemode = kw.get('confcache', getattr(Options.options, 'confcache', None))
+
+	if not cachemode and os.path.exists(dir):
+		shutil.rmtree(dir)
+
+	try:
+		os.makedirs(dir)
+	except OSError:
+		pass
+
+	try:
+		os.stat(dir)
+	except OSError:
+		self.fatal('cannot use the configuration test folder %r' % dir)
 
+	if cachemode == 1:
+		try:
+			proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
+		except EnvironmentError:
+			pass
+		else:
+			ret = proj['cache_run_build']
+			if isinstance(ret, str) and ret.startswith('Test does not build'):
+				self.fatal(ret)
+			return ret
+
+	bdir = os.path.join(dir, 'testbuild')
+
+	if not os.path.exists(bdir):
+		os.makedirs(bdir)
+
+	cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build')
+	self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir)
+	bld.init_dirs()
+	bld.progress_bar = 0
+	bld.targets = '*'
+
+	bld.logger = self.logger
+	bld.all_envs.update(self.all_envs) # not really necessary
+	bld.env = kw['env']
+
+	bld.kw = kw
+	bld.conf = self
+	kw['build_fun'](bld)
+	ret = -1
 	try:
-		app = self.find_program(filename, path_list=path_list, var=var, environ=environ, exts=exts)
-	except Exception:
-		self.find_program('perl', var='PERL')
-		app = self.find_file(filename, os.environ['PATH'].split(os.pathsep))
-		if not app:
+		try:
+			bld.compile()
+		except Errors.WafError:
+			ret = 'Test does not build: %s' % traceback.format_exc()
+			self.fatal(ret)
+		else:
+			ret = getattr(bld, 'retval', 0)
+	finally:
+		if cachemode:
+			# cache the results each time
+			proj = ConfigSet.ConfigSet()
+			proj['cache_run_build'] = ret
+			proj.store(os.path.join(dir, 'cache_run_build'))
+		else:
+			shutil.rmtree(dir)
+	return ret
+
+@conf
+def ret_msg(self, msg, args):
+	if isinstance(msg, str):
+		return msg
+	return msg(args)
+
+@conf
+def test(self, *k, **kw):
+
+	if not 'env' in kw:
+		kw['env'] = self.env.derive()
+
+	# validate_c for example
+	if kw.get('validate'):
+		kw['validate'](kw)
+
+	self.start_msg(kw['msg'], **kw)
+	ret = None
+	try:
+		ret = self.run_build(*k, **kw)
+	except self.errors.ConfigurationError:
+		self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+		if Logs.verbose > 1:
 			raise
-		if var:
-			self.env[var] = Utils.to_list(self.env['PERL']) + [app]
-	self.msg('Checking for %r' % filename, app)
+		else:
+			self.fatal('The configuration failed')
+	else:
+		kw['success'] = ret
+
+	if kw.get('post_check'):
+		ret = kw['post_check'](kw)
+
+	if ret:
+		self.end_msg(kw['errmsg'], 'YELLOW', **kw)
+		self.fatal('The configuration failed %r' % ret)
+	else:
+		self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
+	return ret
 

+ 306 - 169
sdk/waf/waflib/Context.py

@@ -1,26 +1,36 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2010 (ita)
+# Thomas Nagy, 2010-2018 (ita)
 
 """
-Classes and functions required for waf commands
+Classes and functions enabling the command system
 """
 
-import os, imp, sys
+import os, re, sys
 from waflib import Utils, Errors, Logs
 import waflib.Node
 
+if sys.hexversion > 0x3040000:
+	import types
+	class imp(object):
+		new_module = lambda x: types.ModuleType(x)
+else:
+	import imp
+
 # the following 3 constants are updated on each new release (do not touch)
-HEXVERSION=0x1070b00
+HEXVERSION=0x2010400
 """Constant updated on new releases"""
 
-WAFVERSION="1.7.11"
+WAFVERSION="2.1.4"
 """Constant updated on new releases"""
 
-WAFREVISION="50f631bc5e00bdda966c68094229b99be9a21084"
-"""Constant updated on new releases"""
+WAFREVISION="627780cbb74b86b31016c822dcf2b0bfcbb337cb"
+"""Git revision when the waf version is updated"""
 
-ABI = 98
+WAFNAME="waf"
+"""Application name displayed on --help"""
+
+ABI = 20
 """Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""
 
 DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI)
@@ -41,7 +51,6 @@ OUT  = 'out'
 WSCRIPT_FILE = 'wscript'
 """Name of the waf script files"""
 
-
 launch_dir = ''
 """Directory from which waf has been called"""
 run_dir = ''
@@ -53,23 +62,12 @@ out_dir = ''
 waf_dir = ''
 """Directory containing the waf modules"""
 
-local_repo = ''
-"""Local repository containing additional Waf tools (plugins)"""
-remote_repo = 'http://waf.googlecode.com/git/'
-"""
-Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::
-
-	$ waf configure --download
-"""
-
-remote_locs = ['waflib/extras', 'waflib/Tools']
-"""
-Remote directories for use with :py:const:`waflib.Context.remote_repo`
-"""
+default_encoding = Utils.console_encoding()
+"""Encoding to use when reading outputs from other processes"""
 
 g_module = None
 """
-Module representing the main wscript file (see :py:const:`waflib.Context.run_dir`)
+Module representing the top-level wscript file (see :py:const:`waflib.Context.run_dir`)
 """
 
 STDOUT = 1
@@ -82,20 +80,20 @@ List of :py:class:`waflib.Context.Context` subclasses that can be used as waf co
 are added automatically by a metaclass.
 """
 
-
 def create_context(cmd_name, *k, **kw):
 	"""
-	Create a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
+	Returns a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
 	Used in particular by :py:func:`waflib.Scripting.run_command`
 
-	:param cmd_name: command
+	:param cmd_name: command name
 	:type cmd_name: string
 	:param k: arguments to give to the context class initializer
 	:type k: list
 	:param k: keyword arguments to give to the context class initializer
 	:type k: dict
+	:return: Context object
+	:rtype: :py:class:`waflib.Context.Context`
 	"""
-	global classes
 	for x in classes:
 		if x.cmd == cmd_name:
 			return x(*k, **kw)
@@ -105,14 +103,15 @@ def create_context(cmd_name, *k, **kw):
 
 class store_context(type):
 	"""
-	Metaclass for storing the command classes into the list :py:const:`waflib.Context.classes`
-	Context classes must provide an attribute 'cmd' representing the command to execute
+	Metaclass that registers command classes into the list :py:const:`waflib.Context.classes`
+	Context classes must provide an attribute 'cmd' representing the command name, and a function
+	attribute 'fun' representing the function name that the command uses.
 	"""
-	def __init__(cls, name, bases, dict):
-		super(store_context, cls).__init__(name, bases, dict)
+	def __init__(cls, name, bases, dct):
+		super(store_context, cls).__init__(name, bases, dct)
 		name = cls.__name__
 
-		if name == 'ctx' or name == 'Context':
+		if name in ('ctx', 'Context'):
 			return
 
 		try:
@@ -123,11 +122,10 @@ class store_context(type):
 		if not getattr(cls, 'fun', None):
 			cls.fun = cls.cmd
 
-		global classes
 		classes.insert(0, cls)
 
 ctx = store_context('ctx', (object,), {})
-"""Base class for the :py:class:`waflib.Context.Context` classes"""
+"""Base class for all :py:class:`waflib.Context.Context` classes"""
 
 class Context(ctx):
 	"""
@@ -138,7 +136,7 @@ class Context(ctx):
 		def foo(ctx):
 			print(ctx.__class__.__name__) # waflib.Context.Context
 
-	Subclasses must define the attribute 'cmd':
+	Subclasses must define the class attributes 'cmd' and 'fun':
 
 	:param cmd: command to execute as in ``waf cmd``
 	:type cmd: string
@@ -146,7 +144,7 @@ class Context(ctx):
 	:type fun: string
 
 	.. inheritance-diagram:: waflib.Context.Context waflib.Build.BuildContext waflib.Build.InstallContext waflib.Build.UninstallContext waflib.Build.StepContext waflib.Build.ListContext waflib.Configure.ConfigurationContext waflib.Scripting.Dist waflib.Scripting.DistCheck waflib.Build.CleanContext
-
+	   :top-classes: waflib.Context.Context
 	"""
 
 	errors = Errors
@@ -156,22 +154,18 @@ class Context(ctx):
 
 	tools = {}
 	"""
-	A cache for modules (wscript files) read by :py:meth:`Context.Context.load`
+	A module cache for wscript files; see :py:meth:`Context.Context.load`
 	"""
 
 	def __init__(self, **kw):
 		try:
 			rd = kw['run_dir']
 		except KeyError:
-			global run_dir
 			rd = run_dir
 
 		# binds the context to the nodes in use to avoid a context singleton
-		class node_class(waflib.Node.Node):
-			pass
-		self.node_class = node_class
-		self.node_class.__module__ = "waflib.Node"
-		self.node_class.__name__ = "Nod3"
+		self.node_class = type('Nod3', (waflib.Node.Node,), {})
+		self.node_class.__module__ = 'waflib.Node'
 		self.node_class.ctx = self
 
 		self.root = self.node_class('', None)
@@ -182,43 +176,49 @@ class Context(ctx):
 		self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self}
 		self.logger = None
 
-	def __hash__(self):
+	def finalize(self):
 		"""
-		Return a hash value for storing context objects in dicts or sets. The value is not persistent.
-
-		:return: hash value
-		:rtype: int
+		Called to free resources such as logger files
 		"""
-		return id(self)
+		try:
+			logger = self.logger
+		except AttributeError:
+			pass
+		else:
+			Logs.free_logger(logger)
+			delattr(self, 'logger')
 
-	def load(self, tool_list, *k, **kw):
+	def load(self, tool_list, **kw):
 		"""
-		Load a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` from it.
-		A ``tooldir`` value may be provided as a list of module paths.
+		Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` from it.
 
+		:param tool_list: list of Waf tool names to load
 		:type tool_list: list of string or space-separated string
-		:param tool_list: list of Waf tools to use
+		:param tooldir: paths for the imports
+		:type tooldir: list of string
 		"""
 		tools = Utils.to_list(tool_list)
 		path = Utils.to_list(kw.get('tooldir', ''))
+		with_sys_path = kw.get('with_sys_path', True)
 
 		for t in tools:
-			module = load_tool(t, path)
+			module = load_tool(t, path, with_sys_path=with_sys_path)
 			fun = getattr(module, kw.get('name', self.fun), None)
 			if fun:
 				fun(self)
 
 	def execute(self):
 		"""
-		Execute the command. Redefine this method in subclasses.
+		Here, it calls the function name in the top-level wscript file. Most subclasses
+		redefine this method to provide additional functionality.
 		"""
-		global g_module
 		self.recurse([os.path.dirname(g_module.root_path)])
 
 	def pre_recurse(self, node):
 		"""
-		Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`. The node given is set
-		as an attribute ``self.cur_script``, and as the current path ``self.path``
+		Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`.
+		The current script is bound as a Node object on ``self.cur_script``, and the current path
+		is bound to ``self.path``
 
 		:param node: script
 		:type node: :py:class:`waflib.Node.Node`
@@ -230,7 +230,7 @@ class Context(ctx):
 
 	def post_recurse(self, node):
 		"""
-		Restore ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.
+		Restores ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.
 
 		:param node: script
 		:type node: :py:class:`waflib.Node.Node`
@@ -239,12 +239,15 @@ class Context(ctx):
 		if self.cur_script:
 			self.path = self.cur_script.parent
 
-	def recurse(self, dirs, name=None, mandatory=True, once=True):
+	def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None):
 		"""
-		Run user code from the supplied list of directories.
+		Runs user-provided functions from the supplied list of directories.
 		The directories can be either absolute, or relative to the directory
-		of the wscript file. The methods :py:meth:`waflib.Context.Context.pre_recurse` and :py:meth:`waflib.Context.Context.post_recurse`
-		are called immediately before and after a script has been executed.
+		of the wscript file
+
+		The methods :py:meth:`waflib.Context.Context.pre_recurse` and
+		:py:meth:`waflib.Context.Context.post_recurse` are called immediately before
+		and after a script has been executed.
 
 		:param dirs: List of directories to visit
 		:type dirs: list of string or space-separated string
@@ -274,7 +277,7 @@ class Context(ctx):
 				cache[node] = True
 				self.pre_recurse(node)
 				try:
-					function_code = node.read('rU')
+					function_code = node.read('r', encoding)
 					exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
 				finally:
 					self.post_recurse(node)
@@ -285,39 +288,57 @@ class Context(ctx):
 					cache[tup] = True
 					self.pre_recurse(node)
 					try:
-						wscript_module = load_module(node.abspath())
+						wscript_module = load_module(node.abspath(), encoding=encoding)
 						user_function = getattr(wscript_module, (name or self.fun), None)
 						if not user_function:
 							if not mandatory:
 								continue
-							raise Errors.WafError('No function %s defined in %s' % (name or self.fun, node.abspath()))
+							raise Errors.WafError('No function %r defined in %s' % (name or self.fun, node.abspath()))
 						user_function(self)
 					finally:
 						self.post_recurse(node)
 				elif not node:
 					if not mandatory:
 						continue
+					try:
+						os.listdir(d)
+					except OSError:
+						raise Errors.WafError('Cannot read the folder %r' % d)
 					raise Errors.WafError('No wscript file in directory %s' % d)
 
+	def log_command(self, cmd, kw):
+		if Logs.verbose:
+			fmt = os.environ.get('WAF_CMD_FORMAT')
+			if fmt == 'string':
+				if not isinstance(cmd, str):
+					cmd = Utils.shell_escape(cmd)
+			Logs.debug('runner: %r', cmd)
+			Logs.debug('runner_env: kw=%s', kw)
+
 	def exec_command(self, cmd, **kw):
 		"""
-		Execute a command and return the exit status. If the context has the attribute 'log',
-		capture and log the process stderr/stdout for logging purposes::
+		Runs an external process and returns the exit status::
 
 			def run(tsk):
 				ret = tsk.generator.bld.exec_command('touch foo.txt')
 				return ret
 
-		This method captures the standard/error outputs (Issue 1101), but it does not return the values
-		unlike :py:meth:`waflib.Context.Context.cmd_and_log`
+		If the context has the attribute 'log', then captures and logs the process stderr/stdout.
+		Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
+		stdout/stderr values captured.
 
 		:param cmd: command argument for subprocess.Popen
-		:param kw: keyword arguments for subprocess.Popen
+		:type cmd: string or list
+		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
+		:type kw: dict
+		:returns: process exit status
+		:rtype: integer
+		:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
+		:raises: :py:class:`waflib.Errors.WafError` in case of execution failure
 		"""
 		subprocess = Utils.subprocess
 		kw['shell'] = isinstance(cmd, str)
-		Logs.debug('runner: %r' % cmd)
-		Logs.debug('runner_env: kw=%s' % kw)
+		self.log_command(cmd, kw)
 
 		if self.logger:
 			self.logger.info(cmd)
@@ -327,89 +348,131 @@ class Context(ctx):
 		if 'stderr' not in kw:
 			kw['stderr'] = subprocess.PIPE
 
+		if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0], env=kw.get('env', os.environ)):
+			# This call isn't a shell command, and if the specified exe doesn't exist, check for a relative path being set
+			# with cwd and if so assume the caller knows what they're doing and don't pre-emptively fail
+			if not (cmd[0][0] == '.' and 'cwd' in kw):
+				raise Errors.WafError('Program %s not found!' % cmd[0])
+
+		cargs = {}
+		if 'timeout' in kw:
+			if sys.hexversion >= 0x3030000:
+				cargs['timeout'] = kw['timeout']
+				if not 'start_new_session' in kw:
+					kw['start_new_session'] = True
+			del kw['timeout']
+		if 'input' in kw:
+			if kw['input']:
+				cargs['input'] = kw['input']
+				kw['stdin'] = subprocess.PIPE
+			del kw['input']
+
+		if 'cwd' in kw:
+			if not isinstance(kw['cwd'], str):
+				kw['cwd'] = kw['cwd'].abspath()
+
+		encoding = kw.pop('decode_as', default_encoding)
+
 		try:
-			if kw['stdout'] or kw['stderr']:
-				p = subprocess.Popen(cmd, **kw)
-				(out, err) = p.communicate()
-				ret = p.returncode
-			else:
-				out, err = (None, None)
-				ret = subprocess.Popen(cmd, **kw).wait()
+			ret, out, err = Utils.run_process(cmd, kw, cargs)
 		except Exception as e:
 			raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
 
 		if out:
 			if not isinstance(out, str):
-				out = out.decode(sys.stdout.encoding or 'iso8859-1')
+				out = out.decode(encoding, errors='replace')
 			if self.logger:
-				self.logger.debug('out: %s' % out)
+				self.logger.debug('out: %s', out)
 			else:
-				sys.stdout.write(out)
+				Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
 		if err:
 			if not isinstance(err, str):
-				err = err.decode(sys.stdout.encoding or 'iso8859-1')
+				err = err.decode(encoding, errors='replace')
 			if self.logger:
 				self.logger.error('err: %s' % err)
 			else:
-				sys.stderr.write(err)
+				Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
 
 		return ret
 
 	def cmd_and_log(self, cmd, **kw):
 		"""
-		Execute a command and return stdout if the execution is successful.
+		Executes a process and returns stdout/stderr if the execution is successful.
 		An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
-		will be bound to the WafError object::
+		will be bound to the WafError object (configuration tests)::
 
 			def configure(conf):
 				out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
 				(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
+				(out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
 				try:
 					conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
-				except Exception as e:
+				except Errors.WafError as e:
 					print(e.stdout, e.stderr)
 
 		:param cmd: args for subprocess.Popen
-		:param kw: keyword arguments for subprocess.Popen
+		:type cmd: list or string
+		:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
+		:type kw: dict
+		:returns: a tuple containing the contents of stdout and stderr
+		:rtype: string
+		:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
+		:raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
 		"""
 		subprocess = Utils.subprocess
 		kw['shell'] = isinstance(cmd, str)
-		Logs.debug('runner: %r' % cmd)
+		self.log_command(cmd, kw)
 
-		if 'quiet' in kw:
-			quiet = kw['quiet']
-			del kw['quiet']
-		else:
-			quiet = None
+		quiet = kw.pop('quiet', None)
+		to_ret = kw.pop('output', STDOUT)
 
-		if 'output' in kw:
-			to_ret = kw['output']
-			del kw['output']
-		else:
-			to_ret = STDOUT
+		if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0], env=kw.get('env', os.environ)):
+			# This call isn't a shell command, and if the specified exe doesn't exist, check for a relative path being set
+			# with cwd and if so assume the caller knows what they're doing and don't pre-emptively fail
+			if not (cmd[0][0] == '.' and 'cwd' in kw):
+				raise Errors.WafError('Program %s not found!' % cmd[0])
 
 		kw['stdout'] = kw['stderr'] = subprocess.PIPE
 		if quiet is None:
 			self.to_log(cmd)
+
+		cargs = {}
+		if 'timeout' in kw:
+			if sys.hexversion >= 0x3030000:
+				cargs['timeout'] = kw['timeout']
+				if not 'start_new_session' in kw:
+					kw['start_new_session'] = True
+			del kw['timeout']
+		if 'input' in kw:
+			if kw['input']:
+				cargs['input'] = kw['input']
+				kw['stdin'] = subprocess.PIPE
+			del kw['input']
+
+		if 'cwd' in kw:
+			if not isinstance(kw['cwd'], str):
+				kw['cwd'] = kw['cwd'].abspath()
+
+		encoding = kw.pop('decode_as', default_encoding)
+
 		try:
-			p = subprocess.Popen(cmd, **kw)
-			(out, err) = p.communicate()
+			ret, out, err = Utils.run_process(cmd, kw, cargs)
 		except Exception as e:
 			raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
 
 		if not isinstance(out, str):
-			out = out.decode(sys.stdout.encoding or 'iso8859-1')
+			out = out.decode(encoding, errors='replace')
 		if not isinstance(err, str):
-			err = err.decode(sys.stdout.encoding or 'iso8859-1')
+			err = err.decode(encoding, errors='replace')
 
 		if out and quiet != STDOUT and quiet != BOTH:
 			self.to_log('out: %s' % out)
 		if err and quiet != STDERR and quiet != BOTH:
 			self.to_log('err: %s' % err)
 
-		if p.returncode:
-			e = Errors.WafError('Command %r returned %r' % (cmd, p.returncode))
-			e.returncode = p.returncode
+		if ret:
+			e = Errors.WafError('Command %r returned %r' % (cmd, ret))
+			e.returncode = ret
 			e.stderr = err
 			e.stdout = out
 			raise e
@@ -422,7 +485,8 @@ class Context(ctx):
 
 	def fatal(self, msg, ex=None):
 		"""
-		Raise a configuration error to interrupt the execution immediately::
+		Prints an error message in red and stops command execution; this is
+		usually used in the configuration section::
 
 			def configure(conf):
 				conf.fatal('a requirement is missing')
@@ -431,24 +495,31 @@ class Context(ctx):
 		:type msg: string
 		:param ex: optional exception object
 		:type ex: exception
+		:raises: :py:class:`waflib.Errors.ConfigurationError`
 		"""
 		if self.logger:
 			self.logger.info('from %s: %s' % (self.path.abspath(), msg))
 		try:
-			msg = '%s\n(complete log in %s)' % (msg, self.logger.handlers[0].baseFilename)
-		except Exception:
+			logfile = self.logger.handlers[0].baseFilename
+		except AttributeError:
 			pass
+		else:
+			if os.environ.get('WAF_PRINT_FAILURE_LOG'):
+				# see #1930
+				msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile))
+			else:
+				msg = '%s\n(complete log in %s)' % (msg, logfile)
 		raise self.errors.ConfigurationError(msg, ex=ex)
 
 	def to_log(self, msg):
 		"""
-		Log some information to the logger (if present), or to stderr. If the message is empty,
-		it is not printed::
+		Logs information to the logger (if present), or to stderr.
+		Empty messages are not printed::
 
 			def build(bld):
 				bld.to_log('starting the build')
 
-		When in doubt, override this method, or provide a logger on the context class.
+		Provide a logger on the context class or override this method if necessary.
 
 		:param msg: message
 		:type msg: string
@@ -462,11 +533,11 @@ class Context(ctx):
 			sys.stderr.flush()
 
 
-	def msg(self, msg, result, color=None):
+	def msg(self, *k, **kw):
 		"""
-		Print a configuration message of the form ``msg: result``.
+		Prints a configuration message of the form ``msg: result``.
 		The second part of the message will be in colors. The output
-		can be disabled easly by setting ``in_msg`` to a positive value::
+		can be disabled easily by setting ``in_msg`` to a positive value::
 
 			def configure(conf):
 				self.in_msg = 1
@@ -480,17 +551,32 @@ class Context(ctx):
 		:param color: color to use, see :py:const:`waflib.Logs.colors_lst`
 		:type color: string
 		"""
-		self.start_msg(msg)
+		try:
+			msg = kw['msg']
+		except KeyError:
+			msg = k[0]
 
+		self.start_msg(msg, **kw)
+
+		try:
+			result = kw['result']
+		except KeyError:
+			result = k[1]
+
+		color = kw.get('color')
 		if not isinstance(color, str):
 			color = result and 'GREEN' or 'YELLOW'
 
-		self.end_msg(result, color)
+		self.end_msg(result, color, **kw)
 
-	def start_msg(self, msg):
+	def start_msg(self, *k, **kw):
 		"""
-		Print the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
+		Prints the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
 		"""
+		if kw.get('quiet'):
+			return
+
+		msg = kw.get('msg') or k[0]
 		try:
 			if self.in_msg:
 				self.in_msg += 1
@@ -507,41 +593,79 @@ class Context(ctx):
 			self.to_log(x)
 		Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
 
-	def end_msg(self, result, color=None):
-		"""Print the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
+	def end_msg(self, *k, **kw):
+		"""Prints the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
+		if kw.get('quiet'):
+			return
 		self.in_msg -= 1
 		if self.in_msg:
 			return
 
+		result = kw.get('result') or k[0]
+
 		defcolor = 'GREEN'
-		if result == True:
+		if result is True:
 			msg = 'ok'
-		elif result == False:
+		elif not result:
 			msg = 'not found'
 			defcolor = 'YELLOW'
 		else:
 			msg = str(result)
 
 		self.to_log(msg)
-		Logs.pprint(color or defcolor, msg)
-
+		try:
+			color = kw['color']
+		except KeyError:
+			if len(k) > 1 and k[1] in Logs.colors_lst:
+				# compatibility waf 1.7
+				color = k[1]
+			else:
+				color = defcolor
+		Logs.pprint(color, msg)
 
 	def load_special_tools(self, var, ban=[]):
-		global waf_dir
-		lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
-		for x in lst:
-			if not x.name in ban:
-				load_tool(x.name.replace('.py', ''))
+		"""
+		Loads third-party extensions modules for certain programming languages
+		by trying to list certain files in the extras/ directory. This method
+		is typically called once for a programming language group, see for
+		example :py:mod:`waflib.Tools.compiler_c`
+
+		:param var: glob expression, for example 'cxx\\_\\*.py'
+		:type var: string
+		:param ban: list of exact file names to exclude
+		:type ban: list of string
+		"""
+		if os.path.isdir(waf_dir):
+			lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
+			for x in lst:
+				if not x.name in ban:
+					load_tool(x.name.replace('.py', ''))
+		else:
+			from zipfile import PyZipFile
+			waflibs = PyZipFile(waf_dir)
+			lst = waflibs.namelist()
+			for x in lst:
+				if not re.match('waflib/extras/%s' % var.replace('*', '.*'), var):
+					continue
+				f = os.path.basename(x)
+				doban = False
+				for b in ban:
+					r = b.replace('*', '.*')
+					if re.match(r, f):
+						doban = True
+				if not doban:
+					f = f.replace('.py', '')
+					load_tool(f)
 
 cache_modules = {}
 """
-Dictionary holding already loaded modules, keyed by their absolute path.
+Dictionary holding already loaded modules (wscript), indexed by their absolute path.
 The modules are added automatically by :py:func:`waflib.Context.load_module`
 """
 
-def load_module(path):
+def load_module(path, encoding=None):
 	"""
-	Load a source file as a python module.
+	Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules`
 
 	:param path: file path
 	:type path: string
@@ -555,63 +679,76 @@ def load_module(path):
 
 	module = imp.new_module(WSCRIPT_FILE)
 	try:
-		code = Utils.readf(path, m='rU')
-	except (IOError, OSError):
+		code = Utils.readf(path, m='r', encoding=encoding)
+	except EnvironmentError:
 		raise Errors.WafError('Could not read the file %r' % path)
 
 	module_dir = os.path.dirname(path)
 	sys.path.insert(0, module_dir)
-
-	exec(compile(code, path, 'exec'), module.__dict__)
-	sys.path.remove(module_dir)
+	try:
+		exec(compile(code, path, 'exec'), module.__dict__)
+	finally:
+		sys.path.remove(module_dir)
 
 	cache_modules[path] = module
-
 	return module
 
-def load_tool(tool, tooldir=None):
+def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
 	"""
-	Import a Waf tool (python module), and store it in the dict :py:const:`waflib.Context.Context.tools`
+	Imports a Waf tool as a python module, and stores it in the dict :py:const:`waflib.Context.Context.tools`
 
 	:type  tool: string
 	:param tool: Name of the tool
 	:type  tooldir: list
 	:param tooldir: List of directories to search for the tool module
+	:type  with_sys_path: boolean
+	:param with_sys_path: whether or not to search the regular sys.path, besides waf_dir and potentially given tooldirs
 	"""
 	if tool == 'java':
 		tool = 'javaw' # jython
-	elif tool == 'compiler_cc':
-		tool = 'compiler_c' # TODO remove in waf 1.8
 	else:
 		tool = tool.replace('++', 'xx')
 
-	if tooldir:
-		assert isinstance(tooldir, list)
-		sys.path = tooldir + sys.path
-		try:
-			__import__(tool)
+	if not with_sys_path:
+		back_path = sys.path
+		sys.path = []
+	try:
+		if tooldir:
+			assert isinstance(tooldir, list)
+			sys.path = tooldir + sys.path
+			try:
+				__import__(tool)
+			except ImportError as e:
+				e.waf_sys_path = list(sys.path)
+				raise
+			finally:
+				for d in tooldir:
+					sys.path.remove(d)
 			ret = sys.modules[tool]
 			Context.tools[tool] = ret
 			return ret
-		finally:
-			for d in tooldir:
-				sys.path.remove(d)
-	else:
-		global waf_dir
-		try:
-			os.stat(os.path.join(waf_dir, 'waflib', 'extras', tool + '.py'))
-		except OSError:
-			try:
-				os.stat(os.path.join(waf_dir, 'waflib', 'Tools', tool + '.py'))
-			except OSError:
-				d = tool # user has messed with sys.path
-			else:
-				d = 'waflib.Tools.%s' % tool
 		else:
-			d = 'waflib.extras.%s' % tool
-
-		__import__(d)
-		ret = sys.modules[d]
-		Context.tools[tool] = ret
-		return ret
+			if not with_sys_path:
+				sys.path.insert(0, waf_dir)
+			try:
+				for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'):
+					try:
+						__import__(x % tool)
+						break
+					except ImportError:
+						x = None
+				else: # raise an exception
+					__import__(tool)
+			except ImportError as e:
+				e.waf_sys_path = list(sys.path)
+				raise
+			finally:
+				if not with_sys_path:
+					sys.path.remove(waf_dir)
+			ret = sys.modules[x % tool]
+			Context.tools[tool] = ret
+			return ret
+	finally:
+		if not with_sys_path:
+			sys.path += back_path
 

+ 9 - 11
sdk/waf/waflib/Errors.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2010 (ita)
+# Thomas Nagy, 2010-2018 (ita)
 
 """
 Exceptions used in the Waf code
@@ -17,6 +17,7 @@ class WafError(Exception):
 		:param ex: exception causing this error (optional)
 		:type ex: exception
 		"""
+		Exception.__init__(self)
 		self.msg = msg
 		assert not isinstance(msg, Exception)
 
@@ -35,9 +36,7 @@ class WafError(Exception):
 		return str(self.msg)
 
 class BuildError(WafError):
-	"""
-	Errors raised during the build and install phases
-	"""
+	"""Error raised during the build and install phases"""
 	def __init__(self, error_tasks=[]):
 		"""
 		:param error_tasks: tasks that could not complete normally
@@ -47,24 +46,23 @@ class BuildError(WafError):
 		WafError.__init__(self, self.format_error())
 
 	def format_error(self):
-		"""format the error messages from the tasks that failed"""
+		"""Formats the error messages from the tasks that failed"""
 		lst = ['Build failed']
 		for tsk in self.tasks:
 			txt = tsk.format_error()
-			if txt: lst.append(txt)
+			if txt:
+				lst.append(txt)
 		return '\n'.join(lst)
 
 class ConfigurationError(WafError):
-	"""
-	Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`
-	"""
+	"""Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`"""
 	pass
 
 class TaskRescan(WafError):
-	"""task-specific exception type, trigger a signature recomputation"""
+	"""Task-specific exception type signalling required signature recalculations"""
 	pass
 
 class TaskNotReady(WafError):
-	"""task-specific exception type, raised when the task signature cannot be computed"""
+	"""Task-specific exception type signalling that task signatures cannot be computed"""
 	pass
 

+ 199 - 124
sdk/waf/waflib/Logs.py

@@ -1,61 +1,37 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2018 (ita)
 
 """
 logging, colors, terminal width and pretty-print
 """
 
 import os, re, traceback, sys
+from waflib import Utils, ansiterm
 
-try:
-	import threading
-except ImportError:
-	pass
-else:
-	wlock = threading.Lock()
-
-	class sync_stream(object):
-		def __init__(self, stream):
-			self.stream = stream
-			self.encoding = self.stream.encoding
-
-		def write(self, txt):
-			try:
-				wlock.acquire()
-				self.stream.write(txt)
-				self.stream.flush()
-			finally:
-				wlock.release()
-
-		def fileno(self):
-			return self.stream.fileno()
-
-		def flush(self):
-			self.stream.flush()
-
-		def isatty(self):
-			return self.stream.isatty()
-
-	_nocolor = os.environ.get('NOCOLOR', 'no') not in ('no', '0', 'false')
-	try:
-		if not _nocolor:
-			import waflib.ansiterm
-	except ImportError:
-		pass
+if not os.environ.get('NOSYNC', False):
+	# synchronized output is nearly mandatory to prevent garbled output
+	if sys.stdout.isatty() and id(sys.stdout) == id(sys.__stdout__):
+		sys.stdout = ansiterm.AnsiTerm(sys.stdout)
+	if sys.stderr.isatty() and id(sys.stderr) == id(sys.__stderr__):
+		sys.stderr = ansiterm.AnsiTerm(sys.stderr)
 
-	if not os.environ.get('NOSYNC', False):
-		if id(sys.stdout) == id(sys.__stdout__):
-			sys.stdout = sync_stream(sys.stdout)
-			sys.stderr = sync_stream(sys.stderr)
+# import the logging module after since it holds a reference on sys.stderr
+# in case someone uses the root logger
+import logging
 
-import logging # import other modules only after
+LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s')
+HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S')
 
-LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
-HOUR_FORMAT = "%H:%M:%S"
+zones = []
+"""
+See :py:class:`waflib.Logs.log_filter`
+"""
 
-zones = ''
 verbose = 0
+"""
+Global verbosity level, see :py:func:`waflib.Logs.debug` and :py:func:`waflib.Logs.error`
+"""
 
 colors_lst = {
 'USE' : True,
@@ -66,59 +42,71 @@ colors_lst = {
 'PINK'  :'\x1b[35m',
 'BLUE'  :'\x1b[01;34m',
 'CYAN'  :'\x1b[36m',
+'GREY'  :'\x1b[37m',
 'NORMAL':'\x1b[0m',
 'cursor_on'  :'\x1b[?25h',
 'cursor_off' :'\x1b[?25l',
 }
 
-got_tty = not os.environ.get('TERM', 'dumb') in ['dumb', 'emacs']
-if got_tty:
-	try:
-		got_tty = sys.stderr.isatty() and sys.stdout.isatty()
-	except AttributeError:
-		got_tty = False
+indicator = '\r\x1b[K%s%s%s'
 
-if (not got_tty and os.environ.get('TERM', 'dumb') != 'msys') or _nocolor:
-	colors_lst['USE'] = False
+try:
+	unicode
+except NameError:
+	unicode = None
 
-def get_term_cols():
-	return 80
+def enable_colors(use):
+	"""
+	If *1* is given, then the system will perform a few verifications
+	before enabling colors, such as checking whether the interpreter
+	is running in a terminal. A value of zero will disable colors,
+	and a value above *1* will force colors.
+
+	:param use: whether to enable colors or not
+	:type use: integer
+	"""
+	if use == 1:
+		if not (sys.stderr.isatty() or sys.stdout.isatty()):
+			use = 0
+		if Utils.is_win32 and os.name != 'java':
+			term = os.environ.get('TERM', '') # has ansiterm
+		else:
+			term = os.environ.get('TERM', 'dumb')
+
+		if term in ('dumb', 'emacs'):
+			use = 0
+
+	if use >= 1:
+		os.environ['TERM'] = 'vt100'
+
+	colors_lst['USE'] = use
 
 # If console packages are available, replace the dummy function with a real
 # implementation
 try:
-	import struct, fcntl, termios
-except ImportError:
-	pass
-else:
-	if got_tty:
-		def get_term_cols_real():
-			"""
-			Private use only.
-			"""
-
-			dummy_lines, cols = struct.unpack("HHHH", \
-			fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
-			struct.pack("HHHH", 0, 0, 0, 0)))[:2]
-			return cols
-		# try the function once to see if it really works
-		try:
-			get_term_cols_real()
-		except Exception:
-			pass
-		else:
-			get_term_cols = get_term_cols_real
+	get_term_cols = ansiterm.get_term_cols
+except AttributeError:
+	def get_term_cols():
+		return 80
 
 get_term_cols.__doc__ = """
-	Get the console width in characters.
+	Returns the console width in characters.
 
 	:return: the number of characters per line
 	:rtype: int
 	"""
 
 def get_color(cl):
-	if not colors_lst['USE']: return ''
-	return colors_lst.get(cl, '')
+	"""
+	Returns the ansi sequence corresponding to the given color name.
+	An empty string is returned when coloring is globally disabled.
+
+	:param cl: color name in capital letters
+	:type cl: string
+	"""
+	if colors_lst['USE']:
+		return colors_lst.get(cl, '')
+	return ''
 
 class color_dict(object):
 	"""attribute-based color access, eg: colors.PINK"""
@@ -132,7 +120,7 @@ colors = color_dict()
 re_log = re.compile(r'(\w+): (.*)', re.M)
 class log_filter(logging.Filter):
 	"""
-	The waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
+	Waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
 	For example, the following::
 
 		from waflib import Logs
@@ -142,29 +130,17 @@ class log_filter(logging.Filter):
 
 		$ waf --zones=test
 	"""
-	def __init__(self, name=None):
-		pass
+	def __init__(self, name=''):
+		logging.Filter.__init__(self, name)
 
 	def filter(self, rec):
 		"""
-		filter a record, adding the colors automatically
+		Filters log records by zone and by logging level
 
-		* error: red
-		* warning: yellow
-
-		:param rec: message to record
+		:param rec: log entry
 		"""
-
-		rec.c1 = colors.PINK
-		rec.c2 = colors.NORMAL
 		rec.zone = rec.module
 		if rec.levelno >= logging.INFO:
-			if rec.levelno >= logging.ERROR:
-				rec.c1 = colors.RED
-			elif rec.levelno >= logging.WARNING:
-				rec.c1 = colors.YELLOW
-			else:
-				rec.c1 = colors.GREEN
 			return True
 
 		m = re_log.match(rec.msg)
@@ -178,19 +154,98 @@ class log_filter(logging.Filter):
 			return False
 		return True
 
+class log_handler(logging.StreamHandler):
+	"""Dispatches messages to stderr/stdout depending on the severity level"""
+	def emit(self, record):
+		"""
+		Delegates the functionality to :py:meth:`waflib.Log.log_handler.emit_override`
+		"""
+		# default implementation
+		try:
+			try:
+				self.stream = record.stream
+			except AttributeError:
+				if record.levelno >= logging.WARNING:
+					record.stream = self.stream = sys.stderr
+				else:
+					record.stream = self.stream = sys.stdout
+			self.emit_override(record)
+			self.flush()
+		except (KeyboardInterrupt, SystemExit):
+			raise
+		except: # from the python library -_-
+			self.handleError(record)
+
+	def emit_override(self, record, **kw):
+		"""
+		Writes the log record to the desired stream (stderr/stdout)
+		"""
+		self.terminator = getattr(record, 'terminator', '\n')
+		stream = self.stream
+		if unicode:
+			# python2
+			msg = self.formatter.format(record)
+			fs = '%s' + self.terminator
+			try:
+				if (isinstance(msg, unicode) and getattr(stream, 'encoding', None)):
+					fs = fs.decode(stream.encoding)
+					try:
+						stream.write(fs % msg)
+					except UnicodeEncodeError:
+						stream.write((fs % msg).encode(stream.encoding))
+				else:
+					stream.write(fs % msg)
+			except UnicodeError:
+				stream.write((fs % msg).encode('utf-8'))
+		else:
+			logging.StreamHandler.emit(self, record)
+
 class formatter(logging.Formatter):
 	"""Simple log formatter which handles colors"""
 	def __init__(self):
 		logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
 
 	def format(self, rec):
-		"""Messages in warning, error or info mode are displayed in color by default"""
-		if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
-			try:
-				msg = rec.msg.decode('utf-8')
-			except Exception:
-				msg = rec.msg
-			return '%s%s%s' % (rec.c1, msg, rec.c2)
+		"""
+		Formats records and adds colors as needed. The records do not get
+		a leading hour format if the logging level is above *INFO*.
+		"""
+		try:
+			msg = rec.msg.decode('utf-8')
+		except Exception:
+			msg = rec.msg
+
+		use = colors_lst['USE']
+		if (use == 1 and rec.stream.isatty()) or use == 2:
+
+			c1 = getattr(rec, 'c1', None)
+			if c1 is None:
+				c1 = ''
+				if rec.levelno >= logging.ERROR:
+					c1 = colors.RED
+				elif rec.levelno >= logging.WARNING:
+					c1 = colors.YELLOW
+				elif rec.levelno >= logging.INFO:
+					c1 = colors.GREEN
+			c2 = getattr(rec, 'c2', colors.NORMAL)
+			msg = '%s%s%s' % (c1, msg, c2)
+		else:
+			# remove single \r that make long lines in text files
+			# and other terminal commands
+			msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg)
+
+		if rec.levelno >= logging.INFO:
+			# the goal of this is to format without the leading "Logs, hour" prefix
+			if rec.args:
+				try:
+					return msg % rec.args
+				except UnicodeDecodeError:
+					return msg.encode('utf-8') % rec.args
+			return msg
+
+		rec.msg = msg
+		rec.c1 = colors.PINK
+		rec.c2 = colors.NORMAL
 		return logging.Formatter.format(self, rec)
 
 log = None
@@ -198,19 +253,17 @@ log = None
 
 def debug(*k, **kw):
 	"""
-	Wrap logging.debug, the output is filtered for performance reasons
+	Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` ≤ 0
 	"""
 	if verbose:
 		k = list(k)
 		k[0] = k[0].replace('\n', ' ')
-		global log
 		log.debug(*k, **kw)
 
 def error(*k, **kw):
 	"""
-	Wrap logging.errors, display the origin of the message when '-vv' is set
+	Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` ≥ 2
 	"""
-	global log
 	log.error(*k, **kw)
 	if verbose > 2:
 		st = traceback.extract_stack()
@@ -218,34 +271,33 @@ def error(*k, **kw):
 			st = st[:-1]
 			buf = []
 			for filename, lineno, name, line in st:
-				buf.append('  File "%s", line %d, in %s' % (filename, lineno, name))
+				buf.append('  File %r, line %d, in %s' % (filename, lineno, name))
 				if line:
 					buf.append('	%s' % line.strip())
-			if buf: log.error("\n".join(buf))
+			if buf:
+				log.error('\n'.join(buf))
 
 def warn(*k, **kw):
 	"""
-	Wrap logging.warn
+	Wraps logging.warning
 	"""
-	global log
-	log.warn(*k, **kw)
+	log.warning(*k, **kw)
 
 def info(*k, **kw):
 	"""
-	Wrap logging.info
+	Wraps logging.info
 	"""
-	global log
 	log.info(*k, **kw)
 
 def init_log():
 	"""
-	Initialize the loggers globally
+	Initializes the logger :py:attr:`waflib.Logs.log`
 	"""
 	global log
 	log = logging.getLogger('waflib')
 	log.handlers = []
 	log.filters = []
-	hdlr = logging.StreamHandler()
+	hdlr = log_handler()
 	hdlr.setFormatter(formatter())
 	log.addHandler(hdlr)
 	log.addFilter(log_filter())
@@ -253,29 +305,40 @@ def init_log():
 
 def make_logger(path, name):
 	"""
-	Create a simple logger, which is often used to redirect the context command output::
+	Creates a simple logger, which is often used to redirect the context command output::
 
 		from waflib import Logs
 		bld.logger = Logs.make_logger('test.log', 'build')
 		bld.check(header_name='sadlib.h', features='cxx cprogram', mandatory=False)
+
+		# have the file closed immediately
+		Logs.free_logger(bld.logger)
+
+		# stop logging
 		bld.logger = None
 
+	The method finalize() of the command will try to free the logger, if any
+
 	:param path: file name to write the log output to
 	:type path: string
 	:param name: logger name (loggers are reused)
 	:type name: string
 	"""
 	logger = logging.getLogger(name)
-	hdlr = logging.FileHandler(path, 'w')
+	if sys.hexversion > 0x3000000:
+		encoding = sys.stdout.encoding
+	else:
+		encoding = None
+	hdlr = logging.FileHandler(path, 'w', encoding=encoding)
 	formatter = logging.Formatter('%(message)s')
 	hdlr.setFormatter(formatter)
 	logger.addHandler(hdlr)
 	logger.setLevel(logging.DEBUG)
 	return logger
 
-def make_mem_logger(name, to_log, size=10000):
+def make_mem_logger(name, to_log, size=8192):
 	"""
-	Create a memory logger to avoid writing concurrently to the main logger
+	Creates a memory logger to avoid writing concurrently to the main logger
 	"""
 	from logging.handlers import MemoryHandler
 	logger = logging.getLogger(name)
@@ -287,21 +350,33 @@ def make_mem_logger(name, to_log, size=10000):
 	logger.setLevel(logging.DEBUG)
 	return logger
 
-def pprint(col, str, label='', sep='\n'):
+def free_logger(logger):
+	"""
+	Frees the resources held by the loggers created through make_logger or make_mem_logger.
+	This is used for file cleanup and for handler removal (logger objects are re-used).
+	"""
+	try:
+		for x in logger.handlers:
+			x.close()
+			logger.removeHandler(x)
+	except Exception:
+		pass
+
+def pprint(col, msg, label='', sep='\n'):
 	"""
-	Print messages in color immediately on stderr::
+	Prints messages in color immediately on stderr::
 
 		from waflib import Logs
 		Logs.pprint('RED', 'Something bad just happened')
 
 	:param col: color name to use in :py:const:`Logs.colors_lst`
 	:type col: string
-	:param str: message to display
-	:type str: string or a value that can be printed by %s
+	:param msg: message to display
+	:type msg: string or a value that can be printed by %s
 	:param label: a message to add after the colored output
 	:type label: string
 	:param sep: a string to append at the end (line separator)
 	:type sep: string
 	"""
-	sys.stderr.write("%s%s%s %s%s" % (colors(col), str, colors.NORMAL, label, sep))
+	info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep})
 

File diff suppressed because it is too large
+ 413 - 280
sdk/waf/waflib/Node.py


+ 194 - 103
sdk/waf/waflib/Options.py

@@ -1,107 +1,85 @@
 #!/usr/bin/env python
 # encoding: utf-8
 # Scott Newton, 2005 (scottn)
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 
 """
 Support for waf command-line options
 
-Provides default command-line options,
-as well as custom ones, used by the ``options`` wscript function.
-
+Provides default and command-line options, as well the command
+that reads the ``options`` wscript function.
 """
 
-import os, tempfile, optparse, sys, re
-from waflib import Logs, Utils, Context
+import os, tempfile, argparse, sys, re
+from waflib import Logs, Utils, Context, Errors
 
-cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
-"""
-Constant representing the default waf commands displayed in::
 
-	$ waf --help
+class OptionValues:
+	def __str__(self):
+		return str(self.__dict__)
 
+options = OptionValues()
 """
-
-options = {}
-"""
-A dictionary representing the command-line options::
+A global dictionary representing user-provided command-line options::
 
 	$ waf --foo=bar
-
 """
 
 commands = []
 """
-List of commands to execute extracted from the command-line. This list is consumed during the execution, see :py:func:`waflib.Scripting.run_commands`.
+List of commands to execute extracted from the command-line. This list
+is consumed during the execution by :py:func:`waflib.Scripting.run_commands`.
 """
 
 lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform)
-try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
-except KeyError: cache_global = ''
-platform = Utils.unversioned_sys_platform()
-
+"""
+Name of the lock file that marks a project as configured
+"""
 
-class opt_parser(optparse.OptionParser):
+class ArgParser(argparse.ArgumentParser):
 	"""
 	Command-line options parser.
 	"""
 	def __init__(self, ctx):
-		optparse.OptionParser.__init__(self, conflict_handler="resolve", version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
-
-		self.formatter.width = Logs.get_term_cols()
-		p = self.add_option
+		argparse.ArgumentParser.__init__(self, add_help=False, conflict_handler='resolve')
 		self.ctx = ctx
 
-		jobs = ctx.jobs()
-		p('-j', '--jobs',     dest='jobs',    default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs)
-		p('-k', '--keep',     dest='keep',    default=0,     action='count', help='keep running happily even if errors are found')
-		p('-v', '--verbose',  dest='verbose', default=0,     action='count', help='verbosity level -v -vv or -vvv [default: 0]')
-		p('--nocache',        dest='nocache', default=False, action='store_true', help='ignore the WAFCACHE (if set)')
-		p('--zones',          dest='zones',   default='',    action='store', help='debugging zones (task_gen, deps, tasks, etc)')
-
-		gr = optparse.OptionGroup(self, 'configure options')
-		self.add_option_group(gr)
-
-		gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out')
-		gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top')
-
-		default_prefix = os.environ.get('PREFIX')
-		if not default_prefix:
-			if platform == 'win32':
-				d = tempfile.gettempdir()
-				default_prefix = d[0].upper() + d[1:]
-				# win32 preserves the case, but gettempdir does not
-			else:
-				default_prefix = '/usr/local/'
-		gr.add_option('--prefix', dest='prefix', default=default_prefix, help='installation prefix [default: %r]' % default_prefix)
-		gr.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing')
-
-
-		gr = optparse.OptionGroup(self, 'build and install options')
-		self.add_option_group(gr)
-
-		gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output')
-		gr.add_option('--targets',        dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"')
-
-		gr = optparse.OptionGroup(self, 'step options')
-		self.add_option_group(gr)
-		gr.add_option('--files',          dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
-
-		default_destdir = os.environ.get('DESTDIR', '')
-		gr = optparse.OptionGroup(self, 'install/uninstall options')
-		self.add_option_group(gr)
-		gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir')
-		gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='force file installation')
-
-		gr.add_option('--distcheck-args', help='arguments to pass to distcheck', default=None, action='store')
+	def format_help(self):
+		self.usage = self.get_usage()
+		return super(ArgParser, self).format_help()
+
+	def format_usage(self):
+		return self.format_help()
+
+	def _get_formatter(self):
+		"""Initialize the argument parser to the adequate terminal width"""
+		return self.formatter_class(prog=self.prog, width=Logs.get_term_cols())
+
+	def get_option(self, name):
+		if name in self._option_string_actions:
+			return self._option_string_actions[name]
+
+	def remove_option(self, name):
+		if name in self._option_string_actions:
+			action = self._option_string_actions[name]
+			self._remove_action(action)
+			action.option_strings.remove(name)
+			self._option_string_actions.pop(name, None)
+			for group in self._action_groups:
+				try:
+					group._group_actions.remove(action)
+				except ValueError:
+					pass
 
 	def get_usage(self):
 		"""
-		Return the message to print on ``waf --help``
+		Builds the message to print on ``waf --help``
+
+		:rtype: string
 		"""
 		cmds_str = {}
 		for cls in Context.classes:
-			if not cls.cmd or cls.cmd == 'options':
+			if not cls.cmd or cls.cmd == 'options' or cls.cmd.startswith( '_' ):
 				continue
 
 			s = cls.__doc__ or ''
@@ -109,11 +87,11 @@ class opt_parser(optparse.OptionParser):
 
 		if Context.g_module:
 			for (k, v) in Context.g_module.__dict__.items():
-				if k in ['options', 'init', 'shutdown']:
+				if k in ('options', 'init', 'shutdown'):
 					continue
 
 				if type(v) is type(Context.create_context):
-					if v.__doc__ and not k.startswith('_'):
+					if v.__doc__ and len(v.__doc__.splitlines()) < 3 and not k.startswith('_'):
 						cmds_str[k] = v.__doc__
 
 		just = 0
@@ -124,34 +102,85 @@ class opt_parser(optparse.OptionParser):
 		lst.sort()
 		ret = '\n'.join(lst)
 
-		return '''waf [commands] [options]
+		return '''%s [commands] [options]
 
-Main commands (example: ./waf build -j4)
+Main commands (example: ./%s build -j4)
 %s
-''' % ret
+''' % (Context.WAFNAME, Context.WAFNAME, ret)
 
 
 class OptionsContext(Context.Context):
 	"""
-	Collect custom options from wscript files and parses the command line.
-	Set the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
+	Collects custom options from wscript files and parses the command line.
+	Sets the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
 	"""
-
 	cmd = 'options'
 	fun = 'options'
 
 	def __init__(self, **kw):
 		super(OptionsContext, self).__init__(**kw)
 
-		self.parser = opt_parser(self)
-		"""Instance of :py:class:`waflib.Options.opt_parser`"""
+		self.parser = ArgParser(self)
+		"""Instance of :py:class:`waflib.Options.ArgParser`"""
 
 		self.option_groups = {}
 
+		jobs = self.jobs()
+		p = self.add_option
+		color = os.environ.get('NOCOLOR', '') and 'no' or 'auto'
+		if os.environ.get('CLICOLOR', '') == '0':
+			color = 'no'
+		elif os.environ.get('CLICOLOR_FORCE', '') == '1':
+			color = 'yes'
+		p('-c', '--color',    dest='colors',  default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto'))
+		p('-j', '--jobs',     dest='jobs',    default=jobs,  type=int, help='amount of parallel jobs (%r)' % jobs)
+		p('-k', '--keep',     dest='keep',    default=0,     action='count', help='continue despite errors (-kk to try harder)')
+		p('-v', '--verbose',  dest='verbose', default=0,     action='count', help='verbosity level -v -vv or -vvv [default: 0]')
+		p('--zones',          dest='zones',   default='',    action='store', help='debugging zones (task_gen, deps, tasks, etc)')
+		p('--profile',        dest='profile', default=0,     action='store_true', help=argparse.SUPPRESS)
+		p('--pdb',            dest='pdb',     default=0,     action='store_true', help=argparse.SUPPRESS)
+		p('-h', '--help',     dest='whelp',   default=0,     action='store_true', help='show this help message and exit')
+		p('--version',        dest='version', default=False, action='store_true', help='show the Waf version and exit')
+
+		gr = self.add_option_group('Configuration options')
+
+		gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out')
+		gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top')
+
+		gr.add_option('--no-lock-in-run', action='store_true', default=os.environ.get('NO_LOCK_IN_RUN', ''), help=argparse.SUPPRESS, dest='no_lock_in_run')
+		gr.add_option('--no-lock-in-out', action='store_true', default=os.environ.get('NO_LOCK_IN_OUT', ''), help=argparse.SUPPRESS, dest='no_lock_in_out')
+		gr.add_option('--no-lock-in-top', action='store_true', default=os.environ.get('NO_LOCK_IN_TOP', ''), help=argparse.SUPPRESS, dest='no_lock_in_top')
+
+		default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX'))
+		if not default_prefix:
+			if Utils.unversioned_sys_platform() == 'win32':
+				d = tempfile.gettempdir()
+				default_prefix = d[0].upper() + d[1:]
+				# win32 preserves the case, but gettempdir does not
+			else:
+				default_prefix = '/usr/local/'
+		gr.add_option('--prefix', dest='prefix', default=default_prefix, help='installation prefix [default: %r]' % default_prefix)
+		gr.add_option('--bindir', dest='bindir', help='bindir')
+		gr.add_option('--libdir', dest='libdir', help='libdir')
+
+		gr = self.add_option_group('Build and installation options')
+		gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output')
+		gr.add_option('--targets',        dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"')
+
+		gr = self.add_option_group('Step options')
+		gr.add_option('--files',          dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
+
+		default_destdir = os.environ.get('DESTDIR', '')
+
+		gr = self.add_option_group('Installation and uninstallation options')
+		gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir')
+		gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='disable file installation caching')
+		gr.add_option('--distcheck-args', metavar='ARGS', help='arguments to pass to distcheck', default=None, action='store')
+
 	def jobs(self):
 		"""
-		Find the amount of cpu cores to set the default amount of tasks executed in parallel. At
-		runtime the options can be obtained from :py:const:`waflib.Options.options` ::
+		Finds the optimal amount of cpu cores to use for parallel jobs.
+		At runtime the options can be obtained from :py:const:`waflib.Options.options` ::
 
 			from waflib.Options import options
 			njobs = options.jobs
@@ -174,7 +203,7 @@ class OptionsContext(Context.Context):
 				if not count and os.name not in ('nt', 'java'):
 					try:
 						tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0)
-					except Exception:
+					except Errors.WafError:
 						pass
 					else:
 						if re.match('^[0-9]+$', tmp):
@@ -186,64 +215,126 @@ class OptionsContext(Context.Context):
 		return count
 
 	def add_option(self, *k, **kw):
+		if 'type' in kw and type(kw['type']) == str:
+			Logs.warn('Invalid "type=str" in add_option (must be a class, not a string)')
+			if kw['type'] == 'int':
+				kw['type'] = int
+			elif kw['type'] == 'string':
+				kw['type'] = str
+		return self.add_argument(*k, **kw)
+
+	def add_argument(self, *k, **kw):
 		"""
-		Wrapper for optparse.add_option::
+		Wraps ``argparse.add_argument``::
 
 			def options(ctx):
-				ctx.add_option('-u', '--use', dest='use', default=False, action='store_true',
-					help='a boolean option')
+				ctx.add_option('-u', '--use', dest='use', default=False,
+					action='store_true', help='a boolean option')
+
+		:rtype: argparse option object
 		"""
-		return self.parser.add_option(*k, **kw)
+		return self.parser.add_argument(*k, **kw)
 
 	def add_option_group(self, *k, **kw):
 		"""
-		Wrapper for optparse.add_option_group::
+		Wraps ``optparse.add_option_group``::
 
 			def options(ctx):
 				gr = ctx.add_option_group('some options')
 				gr.add_option('-u', '--use', dest='use', default=False, action='store_true')
+
+		:rtype: optparse option group object
 		"""
-		try:
-			gr = self.option_groups[k[0]]
-		except KeyError:
-			gr = self.parser.add_option_group(*k, **kw)
-		self.option_groups[k[0]] = gr
+		gr = self.get_option_group(k[0])
+		if not gr:
+			gr = self.parser.add_argument_group(*k, **kw)
+			gr.add_option = gr.add_argument
+			self.option_groups[k[0]] = gr
 		return gr
 
 	def get_option_group(self, opt_str):
 		"""
-		Wrapper for optparse.get_option_group::
+		Wraps ``optparse.get_option_group``::
 
 			def options(ctx):
 				gr = ctx.get_option_group('configure options')
 				gr.add_option('-o', '--out', action='store', default='',
 					help='build dir for the project', dest='out')
 
+		:rtype: optparse option group object
 		"""
 		try:
 			return self.option_groups[opt_str]
 		except KeyError:
-			for group in self.parser.option_groups:
+			for group in self.parser._action_groups:
 				if group.title == opt_str:
 					return group
 			return None
 
+	def sanitize_path(self, path, cwd=None):
+		if not cwd:
+			cwd = Context.launch_dir
+		p = os.path.expanduser(path)
+		p = os.path.join(cwd, p)
+		p = os.path.normpath(p)
+		p = os.path.abspath(p)
+		return p
+
+	def parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False):
+		"""
+		Just parse the arguments
+		"""
+		(options, leftover_args) = self.parser.parse_known_args(args=_args)
+		commands = []
+		for arg in leftover_args:
+			if not allow_unknown and arg.startswith('-'):
+				self.parser.print_help()
+				raise Errors.WafError('Unknown option: %r' % arg)
+			commands.append(arg)
+
+		if options.jobs < 1:
+			options.jobs = 1
+		for name in 'top out destdir prefix bindir libdir'.split():
+			# those paths are usually expanded from Context.launch_dir
+			if getattr(options, name, None):
+				path = self.sanitize_path(getattr(options, name), cwd)
+				setattr(options, name, path)
+		return options, commands
+
+	def init_logs(self, options, commands):
+		Logs.verbose = options.verbose
+		if options.verbose >= 1:
+			self.load('errcheck')
+
+		colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors]
+		Logs.enable_colors(colors)
+
+		if options.zones:
+			Logs.zones = options.zones.split(',')
+			if not Logs.verbose:
+				Logs.verbose = 1
+		elif Logs.verbose > 0:
+			Logs.zones = ['runner']
+		if Logs.verbose > 2:
+			Logs.zones = ['*']
+
 	def parse_args(self, _args=None):
 		"""
-		Parse arguments from a list (not bound to the command-line).
+		Parses arguments from a list which is not necessarily the command-line.
+		Initializes the module variables options and commands
+		If help is requested, prints it and exit the application
 
 		:param _args: arguments
 		:type _args: list of strings
 		"""
-		global options, commands
-		(options, leftover_args) = self.parser.parse_args(args=_args)
-		commands = leftover_args
+		arg_options, arg_commands = self.parse_cmd_args(_args)
+		self.init_logs(arg_options, commands)
 
-		if options.destdir:
-			options.destdir = os.path.abspath(os.path.expanduser(options.destdir))
+		options.__dict__.clear()
+		del commands[:]
 
-		if options.verbose >= 1:
-			self.load('errcheck')
+		options.__dict__.update(arg_options.__dict__)
+		commands.extend(arg_commands)
 
 	def execute(self):
 		"""
@@ -251,4 +342,4 @@ class OptionsContext(Context.Context):
 		"""
 		super(OptionsContext, self).execute()
 		self.parse_args()
-
+		Utils.alloc_process_pool(options.jobs)

+ 433 - 185
sdk/waf/waflib/Runner.py

@@ -1,98 +1,124 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2018 (ita)
 
 """
 Runner.py: Task scheduling and execution
-
 """
 
-import random, atexit
+import heapq, traceback
 try:
-	from queue import Queue
+	from queue import Queue, PriorityQueue
 except ImportError:
 	from Queue import Queue
+	try:
+		from Queue import PriorityQueue
+	except ImportError:
+		class PriorityQueue(Queue):
+			def _init(self, maxsize):
+				self.maxsize = maxsize
+				self.queue = []
+			def _put(self, item):
+				heapq.heappush(self.queue, item)
+			def _get(self):
+				return heapq.heappop(self.queue)
+
 from waflib import Utils, Task, Errors, Logs
 
-GAP = 10
+GAP = 5
 """
-Wait for free tasks if there are at least ``GAP * njobs`` in queue
+Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run
 """
 
-class TaskConsumer(Utils.threading.Thread):
-	"""
-	Task consumers belong to a pool of workers
+class PriorityTasks(object):
+	def __init__(self):
+		self.lst = []
+	def __len__(self):
+		return len(self.lst)
+	def __iter__(self):
+		return iter(self.lst)
+	def __str__(self):
+		return 'PriorityTasks: [%s]' % '\n  '.join(str(x) for x in self.lst)
+	def clear(self):
+		self.lst = []
+	def append(self, task):
+		heapq.heappush(self.lst, task)
+	def pop(self):
+		return heapq.heappop(self.lst)
+	def extend(self, lst):
+		if self.lst:
+			for x in lst:
+				self.append(x)
+		else:
+			if isinstance(lst, list):
+				self.lst = lst
+				heapq.heapify(lst)
+			else:
+				self.lst = lst.lst
 
-	They wait for tasks in the queue and then use ``task.process(...)``
+class Consumer(Utils.threading.Thread):
 	"""
-	def __init__(self):
+	Daemon thread object that executes a task. It shares a semaphore with
+	the coordinator :py:class:`waflib.Runner.Spawner`. There is one
+	instance per task to consume.
+	"""
+	def __init__(self, spawner, task):
 		Utils.threading.Thread.__init__(self)
-		self.ready = Queue()
+		self.task = task
+		"""Task to execute"""
+		self.spawner = spawner
+		"""Coordinator object"""
+		self.daemon = True
+		self.start()
+	def run(self):
 		"""
-		Obtain :py:class:`waflib.Task.TaskBase` instances from this queue.
+		Processes a single task
 		"""
-		self.setDaemon(1)
+		try:
+			if not self.spawner.master.stop:
+				self.spawner.master.process_task(self.task)
+		finally:
+			self.spawner.sem.release()
+			self.spawner.master.out.put(self.task)
+			self.task = None
+			self.spawner = None
+
+class Spawner(Utils.threading.Thread):
+	"""
+	Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
+	spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
+	:py:class:`waflib.Task.Task` instance.
+	"""
+	def __init__(self, master):
+		Utils.threading.Thread.__init__(self)
+		self.master = master
+		""":py:class:`waflib.Runner.Parallel` producer instance"""
+		self.sem = Utils.threading.Semaphore(master.numjobs)
+		"""Bounded semaphore that prevents spawning more than *n* concurrent consumers"""
+		self.daemon = True
 		self.start()
-
 	def run(self):
 		"""
-		Loop over the tasks to execute
+		Spawns new consumers to execute tasks by delegating to :py:meth:`waflib.Runner.Spawner.loop`
 		"""
 		try:
 			self.loop()
 		except Exception:
+			# Python 2 prints unnecessary messages when shutting down
+			# we also want to stop the thread properly
 			pass
-
 	def loop(self):
 		"""
-		Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call
-		:py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it.
+		Consumes task objects from the producer; ends when the producer has no more
+		task to provide.
 		"""
+		master = self.master
 		while 1:
-			tsk = self.ready.get()
-			if not isinstance(tsk, Task.TaskBase):
-				tsk(self)
-			else:
-				tsk.process()
-
-pool = Queue()
-"""
-Pool of task consumer objects
-"""
-
-def get_pool():
-	"""
-	Obtain a task consumer from :py:attr:`waflib.Runner.pool`.
-	Do not forget to put it back by using :py:func:`waflib.Runner.put_pool`
-	and reset properly (original waiting queue).
-
-	:rtype: :py:class:`waflib.Runner.TaskConsumer`
-	"""
-	try:
-		return pool.get(False)
-	except Exception:
-		return TaskConsumer()
-
-def put_pool(x):
-	"""
-	Return a task consumer to the thread pool :py:attr:`waflib.Runner.pool`
-
-	:param x: task consumer object
-	:type x: :py:class:`waflib.Runner.TaskConsumer`
-	"""
-	pool.put(x)
-
-def _free_resources():
-	global pool
-	lst = []
-	while pool.qsize():
-		lst.append(pool.get())
-	for x in lst:
-		x.ready.put(None)
-	for x in lst:
-		x.join()
-	pool = None
-atexit.register(_free_resources)
+			task = master.ready.get()
+			self.sem.acquire()
+			if not master.stop:
+				task.log_display(task.generator.bld)
+			Consumer(self, task)
 
 class Parallel(object):
 	"""
@@ -106,7 +132,7 @@ class Parallel(object):
 
 		self.numjobs = j
 		"""
-		Number of consumers in the pool
+		Amount of parallel consumers to use
 		"""
 
 		self.bld = bld
@@ -114,19 +140,25 @@ class Parallel(object):
 		Instance of :py:class:`waflib.Build.BuildContext`
 		"""
 
-		self.outstanding = []
-		"""List of :py:class:`waflib.Task.TaskBase` that may be ready to be executed"""
+		self.outstanding = PriorityTasks()
+		"""Heap of :py:class:`waflib.Task.Task` that may be ready to be executed"""
+
+		self.postponed = PriorityTasks()
+		"""Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons"""
 
-		self.frozen = []
-		"""List of :py:class:`waflib.Task.TaskBase` that cannot be executed immediately"""
+		self.incomplete = set()
+		"""List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)"""
+
+		self.ready = PriorityQueue(0)
+		"""List of :py:class:`waflib.Task.Task` ready to be executed by consumers"""
 
 		self.out = Queue(0)
-		"""List of :py:class:`waflib.Task.TaskBase` returned by the task consumers"""
+		"""List of :py:class:`waflib.Task.Task` returned by the task consumers"""
 
 		self.count = 0
 		"""Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`"""
 
-		self.processed = 1
+		self.processed = 0
 		"""Amount of tasks processed"""
 
 		self.stop = False
@@ -139,33 +171,46 @@ class Parallel(object):
 		"""Task iterator which must give groups of parallelizable tasks when calling ``next()``"""
 
 		self.dirty = False
-		"""Flag to indicate that tasks have been executed, and that the build cache must be saved (call :py:meth:`waflib.Build.BuildContext.store`)"""
+		"""
+		Flag that indicates that the build cache must be saved when a task was executed
+		(calls :py:meth:`waflib.Build.BuildContext.store`)"""
+
+		self.revdeps = Utils.defaultdict(set)
+		"""
+		The reverse dependency graph of dependencies obtained from Task.run_after
+		"""
+
+		self.spawner = None
+		"""
+		Coordinating daemon thread that spawns thread consumers
+		"""
+		if self.numjobs > 1:
+			self.spawner = Spawner(self)
 
 	def get_next_task(self):
 		"""
-		Obtain the next task to execute.
+		Obtains the next Task instance to run
 
-		:rtype: :py:class:`waflib.Task.TaskBase`
+		:rtype: :py:class:`waflib.Task.Task`
 		"""
 		if not self.outstanding:
 			return None
-		return self.outstanding.pop(0)
+		return self.outstanding.pop()
 
 	def postpone(self, tsk):
 		"""
-		A task cannot be executed at this point, put it in the list :py:attr:`waflib.Runner.Parallel.frozen`.
+		Adds the task to the list :py:attr:`waflib.Runner.Parallel.postponed`.
+		The order is scrambled so as to consume as many tasks in parallel as possible.
 
-		:param tsk: task
-		:type tsk: :py:class:`waflib.Task.TaskBase`
+		:param tsk: task instance
+		:type tsk: :py:class:`waflib.Task.Task`
 		"""
-		if random.randint(0, 1):
-			self.frozen.insert(0, tsk)
-		else:
-			self.frozen.append(tsk)
+		self.postponed.append(tsk)
 
 	def refill_task_list(self):
 		"""
-		Put the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
+		Pulls a next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
+		Ensures that all tasks in the current build group are complete before processing the next one.
 		"""
 		while self.count > self.numjobs * GAP:
 			self.get_out()
@@ -173,118 +218,242 @@ class Parallel(object):
 		while not self.outstanding:
 			if self.count:
 				self.get_out()
-			elif self.frozen:
+				if self.outstanding:
+					break
+			elif self.postponed:
 				try:
 					cond = self.deadlock == self.processed
 				except AttributeError:
 					pass
 				else:
 					if cond:
-						msg = 'check the build order for the tasks'
-						for tsk in self.frozen:
-							if not tsk.run_after:
-								msg = 'check the methods runnable_status'
-								break
+						# The most common reason is conflicting build order declaration
+						# for example: "X run_after Y" and "Y run_after X"
+						# Another can be changing "run_after" dependencies while the build is running
+						# for example: updating "tsk.run_after" in the "runnable_status" method
 						lst = []
-						for tsk in self.frozen:
-							lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after]))
-						raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst)))
+						for tsk in self.postponed:
+							deps = [id(x) for x in tsk.run_after if not x.hasrun]
+							lst.append('%s\t-> %r' % (repr(tsk), deps))
+							if not deps:
+								lst.append('\n  task %r dependencies are done, check its *runnable_status*?' % id(tsk))
+						raise Errors.WafError('Deadlock detected: check the task build order%s' % ''.join(lst))
 				self.deadlock = self.processed
 
-			if self.frozen:
-				self.outstanding += self.frozen
-				self.frozen = []
+			if self.postponed:
+				self.outstanding.extend(self.postponed)
+				self.postponed.clear()
 			elif not self.count:
-				self.outstanding.extend(next(self.biter))
-				self.total = self.bld.total()
-				break
+				if self.incomplete:
+					for x in self.incomplete:
+						for k in x.run_after:
+							if not k.hasrun:
+								break
+						else:
+							# dependency added after the build started without updating revdeps
+							self.incomplete.remove(x)
+							self.outstanding.append(x)
+							break
+					else:
+						if self.stop or self.error:
+							break
+						raise Errors.WafError('Broken revdeps detected on %r' % self.incomplete)
+				else:
+					tasks = next(self.biter)
+					ready, waiting = self.prio_and_split(tasks)
+					self.outstanding.extend(ready)
+					self.incomplete.update(waiting)
+					self.total = self.bld.total()
+					break
 
 	def add_more_tasks(self, tsk):
 		"""
-		Tasks may be added dynamically during the build by binding them to the task :py:attr:`waflib.Task.TaskBase.more_tasks`
+		If a task provides :py:attr:`waflib.Task.Task.more_tasks`, then the tasks contained
+		in that list are added to the current build and will be processed before the next build group.
 
-		:param tsk: task
-		:type tsk: :py:attr:`waflib.Task.TaskBase`
+		The priorities for dependent tasks are not re-calculated globally
+
+		:param tsk: task instance
+		:type tsk: :py:attr:`waflib.Task.Task`
 		"""
 		if getattr(tsk, 'more_tasks', None):
-			self.outstanding += tsk.more_tasks
+			more = set(tsk.more_tasks)
+			groups_done = set()
+			def iteri(a, b):
+				for x in a:
+					yield x
+				for x in b:
+					yield x
+
+			# Update the dependency tree
+			# this assumes that task.run_after values were updated
+			for x in iteri(self.outstanding, self.incomplete):
+				for k in x.run_after:
+					if isinstance(k, Task.TaskGroup):
+						if k not in groups_done:
+							groups_done.add(k)
+							for j in k.prev & more:
+								self.revdeps[j].add(k)
+					elif k in more:
+						self.revdeps[k].add(x)
+
+			ready, waiting = self.prio_and_split(tsk.more_tasks)
+			self.outstanding.extend(ready)
+			self.incomplete.update(waiting)
 			self.total += len(tsk.more_tasks)
 
+	def mark_finished(self, tsk):
+		def try_unfreeze(x):
+			# DAG ancestors are likely to be in the incomplete set
+			# This assumes that the run_after contents have not changed
+			# after the build starts, else a deadlock may occur
+			if x in self.incomplete:
+				# TODO remove dependencies to free some memory?
+				# x.run_after.remove(tsk)
+				for k in x.run_after:
+					if not k.hasrun:
+						break
+				else:
+					self.incomplete.remove(x)
+					self.outstanding.append(x)
+
+		if tsk in self.revdeps:
+			for x in self.revdeps[tsk]:
+				if isinstance(x, Task.TaskGroup):
+					x.prev.remove(tsk)
+					if not x.prev:
+						for k in x.next:
+							# TODO necessary optimization?
+							k.run_after.remove(x)
+							try_unfreeze(k)
+						# TODO necessary optimization?
+						x.next = []
+				else:
+					try_unfreeze(x)
+			del self.revdeps[tsk]
+
+		if hasattr(tsk, 'semaphore'):
+			sem = tsk.semaphore
+			try:
+				sem.release(tsk)
+			except KeyError:
+				# TODO
+				pass
+			else:
+				while sem.waiting and not sem.is_locked():
+					# take a frozen task, make it ready to run
+					x = sem.waiting.pop()
+					self.add_task(x)
+
 	def get_out(self):
 		"""
-		Obtain one task returned from the task consumers, and update the task count. Add more tasks if necessary through
-		:py:attr:`waflib.Runner.Parallel.add_more_tasks`.
+		Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution.
+		Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`.
 
-		:rtype: :py:attr:`waflib.Task.TaskBase`
+		:rtype: :py:attr:`waflib.Task.Task`
 		"""
 		tsk = self.out.get()
 		if not self.stop:
 			self.add_more_tasks(tsk)
+		self.mark_finished(tsk)
+
 		self.count -= 1
 		self.dirty = True
 		return tsk
 
+	def add_task(self, tsk):
+		if hasattr(tsk, 'semaphore'):
+			sem = tsk.semaphore
+			try:
+				sem.acquire(tsk)
+			except IndexError:
+				sem.waiting.add(tsk)
+				return
+
+		self.count += 1
+		self.processed += 1
+		if self.numjobs == 1:
+			tsk.log_display(tsk.generator.bld)
+			try:
+				self.process_task(tsk)
+			finally:
+				self.out.put(tsk)
+		else:
+			self.ready.put(tsk)
+
+	def process_task(self, tsk):
+		"""
+		Processes a task and attempts to stop the build in case of errors
+		"""
+		tsk.process()
+		if tsk.hasrun != Task.SUCCESS:
+			self.error_handler(tsk)
+
+	def skip(self, tsk):
+		"""
+		Mark a task as skipped/up-to-date
+		"""
+		tsk.hasrun = Task.SKIPPED
+		self.mark_finished(tsk)
+
+	def cancel(self, tsk):
+		"""
+		Mark a task as failed because of unsatisfiable dependencies
+		"""
+		tsk.hasrun = Task.CANCELED
+		self.mark_finished(tsk)
+
 	def error_handler(self, tsk):
 		"""
-		Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, unless
-		the build is executed with::
+		Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set,
+		unless the build is executed with::
 
 			$ waf build -k
 
-		:param tsk: task
-		:type tsk: :py:attr:`waflib.Task.TaskBase`
+		:param tsk: task instance
+		:type tsk: :py:attr:`waflib.Task.Task`
 		"""
 		if not self.bld.keep:
 			self.stop = True
 		self.error.append(tsk)
 
-	def add_task(self, tsk):
+	def task_status(self, tsk):
 		"""
-		Pass a task to a consumer.
+		Obtains the task status to decide whether to run it immediately or not.
 
-		:param tsk: task
-		:type tsk: :py:attr:`waflib.Task.TaskBase`
+		:return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER`
+		:rtype: integer
 		"""
 		try:
-			self.pool
-		except AttributeError:
-			self.init_task_pool()
-		self.ready.put(tsk)
-
-	def init_task_pool(self):
-		# lazy creation, and set a common pool for all task consumers
-		pool = self.pool = [get_pool() for i in range(self.numjobs)]
-		self.ready = Queue(0)
-		def setq(consumer):
-			consumer.ready = self.ready
-		for x in pool:
-			x.ready.put(setq)
-		return pool
-
-	def free_task_pool(self):
-		# return the consumers, setting a different queue for each of them
-		def setq(consumer):
-			consumer.ready = Queue(0)
-			self.out.put(self)
-		try:
-			pool = self.pool
-		except AttributeError:
-			pass
-		else:
-			for x in pool:
-				self.ready.put(setq)
-			for x in pool:
-				self.get_out()
-			for x in pool:
-				put_pool(x)
-			self.pool = []
+			return tsk.runnable_status()
+		except Exception:
+			self.processed += 1
+			tsk.err_msg = traceback.format_exc()
+			if not self.stop and self.bld.keep:
+				self.skip(tsk)
+				if self.bld.keep == 1:
+					# if -k stop on the first exception, if -kk try to go as far as possible
+					if Logs.verbose > 1 or not self.error:
+						self.error.append(tsk)
+					self.stop = True
+				else:
+					if Logs.verbose > 1:
+						self.error.append(tsk)
+				return Task.EXCEPTION
+
+			tsk.hasrun = Task.EXCEPTION
+			self.error_handler(tsk)
+
+			return Task.EXCEPTION
 
 	def start(self):
 		"""
-		Give tasks to :py:class:`waflib.Runner.TaskConsumer` instances until the build finishes or the ``stop`` flag is set.
-		If only one job is used, then execute the tasks one by one, without consumers.
+		Obtains Task instances from the BuildContext instance and adds the ones that need to be executed to
+		:py:class:`waflib.Runner.Parallel.ready` so that the :py:class:`waflib.Runner.Spawner` consumer thread
+		has them executed. Obtains the executed Tasks back from :py:class:`waflib.Runner.Parallel.out`
+		and marks the build as failed by setting the ``stop`` flag.
+		If only one job is used, then executes the tasks one by one, without consumers.
 		"""
-
 		self.total = self.bld.total()
 
 		while not self.stop:
@@ -306,56 +475,135 @@ class Parallel(object):
 				self.processed += 1
 				continue
 
-			if self.stop: # stop immediately after a failure was detected
+			if self.stop: # stop immediately after a failure is detected
 				break
 
-			try:
-				st = tsk.runnable_status()
-			except Exception:
-				self.processed += 1
-				# TODO waf 1.7 this piece of code should go in the error_handler
-				tsk.err_msg = Utils.ex_stack()
-				if not self.stop and self.bld.keep:
-					tsk.hasrun = Task.SKIPPED
-					if self.bld.keep == 1:
-						# if -k stop at the first exception, if -kk try to go as far as possible
-						if Logs.verbose > 1 or not self.error:
-							self.error.append(tsk)
-						self.stop = True
-					else:
-						if Logs.verbose > 1:
-							self.error.append(tsk)
-					continue
-				tsk.hasrun = Task.EXCEPTION
-				self.error_handler(tsk)
-				continue
-
-			if st == Task.ASK_LATER:
+			st = self.task_status(tsk)
+			if st == Task.RUN_ME:
+				self.add_task(tsk)
+			elif st == Task.ASK_LATER:
 				self.postpone(tsk)
 			elif st == Task.SKIP_ME:
 				self.processed += 1
-				tsk.hasrun = Task.SKIPPED
+				self.skip(tsk)
 				self.add_more_tasks(tsk)
-			else:
-				# run me: put the task in ready queue
-				tsk.position = (self.processed, self.total)
-				self.count += 1
-				tsk.master = self
+			elif st == Task.CANCEL_ME:
+				# A dependency problem has occurred, and the
+				# build is most likely run with `waf -k`
+				if Logs.verbose > 1:
+					self.error.append(tsk)
 				self.processed += 1
-
-				if self.numjobs == 1:
-					tsk.process()
-				else:
-					self.add_task(tsk)
+				self.cancel(tsk)
 
 		# self.count represents the tasks that have been made available to the consumer threads
 		# collect all the tasks after an error else the message may be incomplete
 		while self.error and self.count:
 			self.get_out()
 
-		#print loop
-		assert (self.count == 0 or self.stop)
+		self.ready.put(None)
+		if not self.stop:
+			assert not self.count
+			assert not self.postponed
+			assert not self.incomplete
+
+	def prio_and_split(self, tasks):
+		"""
+		Label input tasks with priority values, and return a pair containing
+		the tasks that are ready to run and the tasks that are necessarily
+		waiting for other tasks to complete.
+
+		The priority system is really meant as an optional layer for optimization:
+		dependency cycles are found quickly, and builds should be more efficient.
+		A high priority number means that a task is processed first.
+
+		This method can be overridden to disable the priority system::
 
-		# free the task pool, if any
-		self.free_task_pool()
+			def prio_and_split(self, tasks):
+				return tasks, []
+
+		:return: A pair of task lists
+		:rtype: tuple
+		"""
+		# to disable:
+		#return tasks, []
+		for x in tasks:
+			x.visited = 0
+
+		reverse = self.revdeps
+
+		groups_done = set()
+		for x in tasks:
+			for k in x.run_after:
+				if isinstance(k, Task.TaskGroup):
+					if k not in groups_done:
+						groups_done.add(k)
+						for j in k.prev:
+							reverse[j].add(k)
+				else:
+					reverse[k].add(x)
+
+		# the priority number is not the tree depth
+		def visit(n):
+			if isinstance(n, Task.TaskGroup):
+				return sum(visit(k) for k in n.next)
+
+			if n.visited == 0:
+				n.visited = 1
+
+				if n in reverse:
+					rev = reverse[n]
+					n.prio_order = n.tree_weight + len(rev) + sum(visit(k) for k in rev)
+				else:
+					n.prio_order = n.tree_weight
+
+				n.visited = 2
+			elif n.visited == 1:
+				raise Errors.WafError('Dependency cycle found!')
+			return n.prio_order
+
+		for x in tasks:
+			if x.visited != 0:
+				# must visit all to detect cycles
+				continue
+			try:
+				visit(x)
+			except Errors.WafError:
+				self.debug_cycles(tasks, reverse)
+
+		ready = []
+		waiting = []
+		for x in tasks:
+			for k in x.run_after:
+				if not k.hasrun:
+					waiting.append(x)
+					break
+			else:
+				ready.append(x)
+		return (ready, waiting)
+
+	def debug_cycles(self, tasks, reverse):
+		tmp = {}
+		for x in tasks:
+			tmp[x] = 0
+
+		def visit(n, acc):
+			if isinstance(n, Task.TaskGroup):
+				for k in n.next:
+					visit(k, acc)
+				return
+			if tmp[n] == 0:
+				tmp[n] = 1
+				for k in reverse.get(n, []):
+					visit(k, [n] + acc)
+				tmp[n] = 2
+			elif tmp[n] == 1:
+				lst = []
+				for tsk in acc:
+					lst.append(repr(tsk))
+					if tsk is n:
+						# exclude prior nodes, we want the minimum cycle
+						break
+				raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s' % ''.join(lst))
+		for x in tasks:
+			visit(x, [])
 

+ 253 - 195
sdk/waf/waflib/Scripting.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2018 (ita)
 
 "Module called for configuring, compiling and installing targets"
 
@@ -24,46 +24,71 @@ def waf_entry_point(current_directory, version, wafdir):
 	:param wafdir: absolute path representing the directory of the waf library
 	:type wafdir: string
 	"""
-
 	Logs.init_log()
 
 	if Context.WAFVERSION != version:
-		Logs.error('Waf script %r and library %r do not match (directory %r)' % (version, Context.WAFVERSION, wafdir))
+		Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir)
 		sys.exit(1)
 
-	if '--version' in sys.argv:
-		Context.run_dir = current_directory
-		ctx = Context.create_context('options')
-		ctx.curdir = current_directory
-		ctx.parse_args()
-		sys.exit(0)
-
+	# Store current directory before any chdir
 	Context.waf_dir = wafdir
-	Context.launch_dir = current_directory
+	Context.run_dir = Context.launch_dir = current_directory
+	start_dir = current_directory
+	no_climb = os.environ.get('NOCLIMB')
+
+	if len(sys.argv) > 1:
+		# os.path.join handles absolute paths
+		# if sys.argv[1] is not an absolute path, then it is relative to the current working directory
+		potential_wscript = os.path.join(current_directory, sys.argv[1])
+		if os.path.basename(potential_wscript) == Context.WSCRIPT_FILE and os.path.isfile(potential_wscript):
+			# need to explicitly normalize the path, as it may contain extra '/.'
+			path = os.path.normpath(os.path.dirname(potential_wscript))
+			start_dir = os.path.abspath(path)
+			no_climb = True
+			sys.argv.pop(1)
+
+	ctx = Context.create_context('options')
+	# allow --ver option in user scripts #2453
+	ctx.parser.allow_abbrev = False
+	(options, commands) = ctx.parse_cmd_args(allow_unknown=True)
+	if options.version:
+		print('%s %s (%s)'%(Context.WAFNAME, Context.WAFVERSION, Context.WAFREVISION))
+		sys.exit(0)
+	if getattr(options, 'top', None):
+		start_dir = Context.run_dir = Context.top_dir = options.top
+		no_climb = True
+	if getattr(options, 'out', None):
+		Context.out_dir = options.out
 
 	# if 'configure' is in the commands, do not search any further
-	no_climb = os.environ.get('NOCLIMB', None)
 	if not no_climb:
 		for k in no_climb_commands:
-			if k in sys.argv:
-				no_climb = True
-				break
+			for y in commands:
+				if y.startswith(k):
+					no_climb = True
+					break
 
 	# try to find a lock file (if the project was configured)
 	# at the same time, store the first wscript file seen
-	cur = current_directory
+	cur = start_dir
 	while cur:
-		lst = os.listdir(cur)
+		try:
+			lst = os.listdir(cur)
+		except OSError:
+			lst = []
+			Logs.error('Directory %r is unreadable!', cur)
 		if Options.lockfile in lst:
 			env = ConfigSet.ConfigSet()
 			try:
 				env.load(os.path.join(cur, Options.lockfile))
 				ino = os.stat(cur)[stat.ST_INO]
-			except Exception:
+			except EnvironmentError:
 				pass
 			else:
 				# check if the folder was not moved
-				for x in [env.run_dir, env.top_dir, env.out_dir]:
+				for x in (env.run_dir, env.top_dir, env.out_dir):
+					if not x:
+						continue
 					if Utils.is_win32:
 						if cur == x:
 							load = True
@@ -79,7 +104,7 @@ def waf_entry_point(current_directory, version, wafdir):
 								load = True
 								break
 				else:
-					Logs.warn('invalid lock file in %s' % cur)
+					Logs.warn('invalid lock file in %s', cur)
 					load = False
 
 				if load:
@@ -100,56 +125,62 @@ def waf_entry_point(current_directory, version, wafdir):
 		if no_climb:
 			break
 
-	if not Context.run_dir:
-		if '-h' in sys.argv or '--help' in sys.argv:
-			Logs.warn('No wscript file found: the help message may be incomplete')
-			Context.run_dir = current_directory
-			ctx = Context.create_context('options')
-			ctx.curdir = current_directory
-			ctx.parse_args()
+	wscript = os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE))
+	if not os.path.exists(wscript):
+		if options.whelp:
+			Logs.warn('These are the generic options (no wscript/project found)')
+			ctx.parser.print_help()
 			sys.exit(0)
-		Logs.error('Waf: Run from a directory containing a file named %r' % Context.WSCRIPT_FILE)
+		Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)', Context.WSCRIPT_FILE)
 		sys.exit(1)
 
 	try:
 		os.chdir(Context.run_dir)
 	except OSError:
-		Logs.error('Waf: The folder %r is unreadable' % Context.run_dir)
+		Logs.error('Waf: The folder %r is unreadable', Context.run_dir)
 		sys.exit(1)
 
 	try:
-		set_main_module(Context.run_dir + os.sep + Context.WSCRIPT_FILE)
+		set_main_module(wscript)
 	except Errors.WafError as e:
 		Logs.pprint('RED', e.verbose_msg)
 		Logs.error(str(e))
 		sys.exit(1)
 	except Exception as e:
-		Logs.error('Waf: The wscript in %r is unreadable' % Context.run_dir, e)
+		Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir)
 		traceback.print_exc(file=sys.stdout)
 		sys.exit(2)
 
-	"""
-	import cProfile, pstats
-	cProfile.runctx("from waflib import Scripting; Scripting.run_commands()", {}, {}, 'profi.txt')
-	p = pstats.Stats('profi.txt')
-	p.sort_stats('time').print_stats(25) # or 'cumulative'
-	"""
-	try:
-		run_commands()
-	except Errors.WafError as e:
-		if Logs.verbose > 1:
-			Logs.pprint('RED', e.verbose_msg)
-		Logs.error(e.msg)
-		sys.exit(1)
-	except SystemExit:
-		raise
-	except Exception as e:
-		traceback.print_exc(file=sys.stdout)
-		sys.exit(2)
-	except KeyboardInterrupt:
-		Logs.pprint('RED', 'Interrupted')
-		sys.exit(68)
-	#"""
+	if options.profile:
+		import cProfile, pstats
+		cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt')
+		p = pstats.Stats('profi.txt')
+		p.sort_stats('time').print_stats(75) # or 'cumulative'
+	else:
+		try:
+			try:
+				run_commands()
+			except:
+				if options.pdb:
+					import pdb
+					type, value, tb = sys.exc_info()
+					traceback.print_exc()
+					pdb.post_mortem(tb)
+				else:
+					raise
+		except Errors.WafError as e:
+			if Logs.verbose > 1:
+				Logs.pprint('RED', e.verbose_msg)
+			Logs.error(e.msg)
+			sys.exit(1)
+		except SystemExit:
+			raise
+		except Exception as e:
+			traceback.print_exc(file=sys.stdout)
+			sys.exit(2)
+		except KeyboardInterrupt:
+			Logs.pprint('RED', 'Interrupted')
+			sys.exit(68)
 
 def set_main_module(file_path):
 	"""
@@ -170,7 +201,7 @@ def set_main_module(file_path):
 		name = obj.__name__
 		if not name in Context.g_module.__dict__:
 			setattr(Context.g_module, name, obj)
-	for k in [update, dist, distclean, distcheck, update]:
+	for k in (dist, distclean, distcheck):
 		set_def(k)
 	# add dummy init and shutdown functions if they're not defined
 	if not 'init' in Context.g_module.__dict__:
@@ -182,32 +213,23 @@ def set_main_module(file_path):
 
 def parse_options():
 	"""
-	Parse the command-line options and initialize the logging system.
+	Parses the command-line options and initialize the logging system.
 	Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
 	"""
-	Context.create_context('options').execute()
-
+	ctx = Context.create_context('options')
+	ctx.execute()
 	if not Options.commands:
-		Options.commands = [default_cmd]
-	Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076
-
-	# process some internal Waf options
-	Logs.verbose = Options.options.verbose
-	Logs.init_log()
-
-	if Options.options.zones:
-		Logs.zones = Options.options.zones.split(',')
-		if not Logs.verbose:
-			Logs.verbose = 1
-	elif Logs.verbose > 0:
-		Logs.zones = ['runner']
-
-	if Logs.verbose > 2:
-		Logs.zones = ['*']
+		if isinstance(default_cmd, list):
+			Options.commands.extend(default_cmd)
+		else:
+			Options.commands.append(default_cmd)
+	if Options.options.whelp:
+		ctx.parser.print_help()
+		sys.exit(0)
 
 def run_command(cmd_name):
 	"""
-	Execute a single command. Called by :py:func:`waflib.Scripting.run_commands`.
+	Executes a single Waf command. Called by :py:func:`waflib.Scripting.run_commands`.
 
 	:param cmd_name: command to execute, like ``build``
 	:type cmd_name: string
@@ -216,12 +238,16 @@ def run_command(cmd_name):
 	ctx.log_timer = Utils.Timer()
 	ctx.options = Options.options # provided for convenience
 	ctx.cmd = cmd_name
-	ctx.execute()
+	try:
+		ctx.execute()
+	finally:
+		# Issue 1374
+		ctx.finalize()
 	return ctx
 
 def run_commands():
 	"""
-	Execute the commands that were given on the command-line, and the other options
+	Execute the Waf commands that were given on the command-line, and the other options
 	Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed
 	after :py:func:`waflib.Scripting.parse_options`.
 	"""
@@ -230,18 +256,11 @@ def run_commands():
 	while Options.commands:
 		cmd_name = Options.commands.pop(0)
 		ctx = run_command(cmd_name)
-		Logs.info('%r finished successfully (%s)' % (cmd_name, str(ctx.log_timer)))
+		Logs.info('%r finished successfully (%s)', cmd_name, ctx.log_timer)
 	run_command('shutdown')
 
 ###########################################################################################
 
-def _can_distclean(name):
-	# WARNING: this method may disappear anytime
-	for k in '.o .moc .exe'.split():
-		if name.endswith(k):
-			return True
-	return False
-
 def distclean_dir(dirname):
 	"""
 	Distclean function called in the particular case when::
@@ -253,56 +272,77 @@ def distclean_dir(dirname):
 	"""
 	for (root, dirs, files) in os.walk(dirname):
 		for f in files:
-			if _can_distclean(f):
-				fname = root + os.sep + f
+			if f.endswith(('.o', '.moc', '.exe')):
+				fname = os.path.join(root, f)
 				try:
 					os.remove(fname)
 				except OSError:
-					Logs.warn('Could not remove %r' % fname)
+					Logs.warn('Could not remove %r', fname)
 
-	for x in [Context.DBFILE, 'config.log']:
+	for x in (Context.DBFILE, 'config.log'):
 		try:
 			os.remove(x)
 		except OSError:
 			pass
 
 	try:
-		shutil.rmtree('c4che')
+		shutil.rmtree(Build.CACHE_DIR)
 	except OSError:
 		pass
 
 def distclean(ctx):
-	'''removes the build directory'''
-	lst = os.listdir('.')
-	for f in lst:
-		if f == Options.lockfile:
-			try:
-				proj = ConfigSet.ConfigSet(f)
-			except IOError:
-				Logs.warn('Could not read %r' % f)
-				continue
+	'''removes build folders and data'''
 
-			if proj['out_dir'] != proj['top_dir']:
-				try:
-					shutil.rmtree(proj['out_dir'])
-				except IOError:
-					pass
-				except OSError as e:
-					if e.errno != errno.ENOENT:
-						Logs.warn('project %r cannot be removed' % proj[Context.OUT])
-			else:
-				distclean_dir(proj['out_dir'])
+	def remove_and_log(k, fun):
+		try:
+			fun(k)
+		except EnvironmentError as e:
+			if e.errno != errno.ENOENT:
+				Logs.warn('Could not remove %r', k)
 
-			for k in (proj['out_dir'], proj['top_dir'], proj['run_dir']):
-				try:
-					os.remove(os.path.join(k, Options.lockfile))
-				except OSError as e:
-					if e.errno != errno.ENOENT:
-						Logs.warn('file %r cannot be removed' % f)
+	# remove waf cache folders on the top-level
+	if not Options.commands:
+		for k in os.listdir('.'):
+			for x in '.waf-2 waf-2 .waf3-2 waf3-2'.split():
+				if k.startswith(x):
+					remove_and_log(k, shutil.rmtree)
+
+	# remove a build folder, if any
+	cur = '.'
+	if os.environ.get('NO_LOCK_IN_TOP') or ctx.options.no_lock_in_top:
+		cur = ctx.options.out
+
+	try:
+		lst = os.listdir(cur)
+	except OSError:
+		Logs.warn('Could not read %r', cur)
+		return
+
+	if Options.lockfile in lst:
+		f = os.path.join(cur, Options.lockfile)
+		try:
+			env = ConfigSet.ConfigSet(f)
+		except EnvironmentError:
+			Logs.warn('Could not read %r', f)
+			return
+
+		if not env.out_dir or not env.top_dir:
+			Logs.warn('Invalid lock file %r', f)
+			return
 
-		# remove the local waf cache
-		if f.startswith('.waf') and not Options.commands:
-			shutil.rmtree(f, ignore_errors=True)
+		if env.out_dir == env.top_dir:
+			distclean_dir(env.out_dir)
+		else:
+			remove_and_log(env.out_dir, shutil.rmtree)
+
+		env_dirs = [env.out_dir]
+		if not (os.environ.get('NO_LOCK_IN_TOP') or ctx.options.no_lock_in_top):
+			env_dirs.append(env.top_dir)
+		if not (os.environ.get('NO_LOCK_IN_RUN') or ctx.options.no_lock_in_run):
+			env_dirs.append(env.run_dir)
+		for k in env_dirs:
+			p = os.path.join(k, Options.lockfile)
+			remove_and_log(p, os.remove)
 
 class Dist(Context.Context):
 	'''creates an archive containing the project source code'''
@@ -320,7 +360,7 @@ class Dist(Context.Context):
 
 	def archive(self):
 		"""
-		Create the archive.
+		Creates the source archive.
 		"""
 		import tarfile
 
@@ -334,49 +374,57 @@ class Dist(Context.Context):
 		node = self.base_path.make_node(arch_name)
 		try:
 			node.delete()
-		except Exception:
+		except OSError:
 			pass
 
 		files = self.get_files()
 
 		if self.algo.startswith('tar.'):
-			tar = tarfile.open(arch_name, 'w:' + self.algo.replace('tar.', ''))
+			tar = tarfile.open(node.abspath(), 'w:' + self.algo.replace('tar.', ''))
 
 			for x in files:
 				self.add_tar_file(x, tar)
 			tar.close()
 		elif self.algo == 'zip':
 			import zipfile
-			zip = zipfile.ZipFile(arch_name, 'w', compression=zipfile.ZIP_DEFLATED)
+			zip = zipfile.ZipFile(node.abspath(), 'w', compression=zipfile.ZIP_DEFLATED)
 
 			for x in files:
 				archive_name = self.get_base_name() + '/' + x.path_from(self.base_path)
-				zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED)
+				if os.environ.get('SOURCE_DATE_EPOCH'):
+					# TODO: parse that timestamp
+					zip.writestr(zipfile.ZipInfo(archive_name), x.read(), zipfile.ZIP_DEFLATED)
+				else:
+					zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED)
 			zip.close()
 		else:
-			self.fatal('Valid algo types are tar.bz2, tar.gz or zip')
+			self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')
 
 		try:
-			from hashlib import sha1 as sha
+			from hashlib import sha256
 		except ImportError:
-			from sha import sha
-		try:
-			digest = " (sha=%r)" % sha(node.read()).hexdigest()
-		except Exception:
 			digest = ''
+		else:
+			digest = ' (sha256=%r)' % sha256(node.read(flags='rb')).hexdigest()
 
-		Logs.info('New archive created: %s%s' % (self.arch_name, digest))
+		Logs.info('New archive created: %s%s', self.arch_name, digest)
 
 	def get_tar_path(self, node):
 		"""
-		return the path to use for a node in the tar archive, the purpose of this
+		Return the path to use for a node in the tar archive, the purpose of this
 		is to let subclases resolve symbolic links or to change file names
+
+		:return: absolute path
+		:rtype: string
 		"""
 		return node.abspath()
 
 	def add_tar_file(self, x, tar):
 		"""
-		Add a file to the tar archive. Transform symlinks into files if the files lie out of the project tree.
+		Adds a file to the tar archive. Symlinks are not verified.
+
+		:param x: file path
+		:param tar: tar file object
 		"""
 		p = self.get_tar_path(x)
 		tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path))
@@ -384,16 +432,21 @@ class Dist(Context.Context):
 		tinfo.gid   = 0
 		tinfo.uname = 'root'
 		tinfo.gname = 'root'
+		if os.environ.get('SOURCE_DATE_EPOCH'):
+			tinfo.mtime = int(os.environ.get('SOURCE_DATE_EPOCH'))
 
-		fu = None
-		try:
-			fu = open(p, 'rb')
-			tar.addfile(tinfo, fileobj=fu)
-		finally:
-			if fu:
-				fu.close()
+		if os.path.isfile(p):
+			with open(p, 'rb') as f:
+				tar.addfile(tinfo, fileobj=f)
+		else:
+			tar.addfile(tinfo)
 
 	def get_tar_prefix(self):
+		"""
+		Returns the base path for files added into the archive tar file
+
+		:rtype: string
+		"""
 		try:
 			return self.tar_prefix
 		except AttributeError:
@@ -401,7 +454,8 @@ class Dist(Context.Context):
 
 	def get_arch_name(self):
 		"""
-		Return the name of the archive to create. Change the default value by setting *arch_name*::
+		Returns the archive file name.
+		Set the attribute *arch_name* to change the default value::
 
 			def dist(ctx):
 				ctx.arch_name = 'ctx.tar.bz2'
@@ -416,7 +470,7 @@ class Dist(Context.Context):
 
 	def get_base_name(self):
 		"""
-		Return the default name of the main directory in the archive, which is set to *appname-version*.
+		Returns the default name of the main directory in the archive, which is set to *appname-version*.
 		Set the attribute *base_name* to change the default value::
 
 			def dist(ctx):
@@ -434,8 +488,8 @@ class Dist(Context.Context):
 
 	def get_excl(self):
 		"""
-		Return the patterns to exclude for finding the files in the top-level directory. Set the attribute *excl*
-		to change the default value::
+		Returns the patterns to exclude for finding the files in the top-level directory.
+		Set the attribute *excl* to change the default value::
 
 			def dist(ctx):
 				ctx.excl = 'build **/*.o **/*.class'
@@ -445,21 +499,22 @@ class Dist(Context.Context):
 		try:
 			return self.excl
 		except AttributeError:
-			self.excl = Node.exclude_regs + ' **/waf-1.7.* **/.waf-1.7* **/waf3-1.7.* **/.waf3-1.7* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
-			nd = self.root.find_node(Context.out_dir)
-			if nd:
-				self.excl += ' ' + nd.path_from(self.base_path)
+			self.excl = Node.exclude_regs + ' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
+			if Context.out_dir:
+				nd = self.root.find_node(Context.out_dir)
+				if nd:
+					self.excl += ' ' + nd.path_from(self.base_path)
 			return self.excl
 
 	def get_files(self):
 		"""
-		The files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`. Set
-		*files* to prevent this behaviour::
+		Files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`.
+		Set *files* to prevent this behaviour::
 
 			def dist(ctx):
 				ctx.files = ctx.path.find_node('wscript')
 
-		The files are searched from the directory 'base_path', to change it, set::
+		Files are also searched from the directory 'base_path', to change it, set::
 
 			def dist(ctx):
 				ctx.base_path = path
@@ -472,18 +527,12 @@ class Dist(Context.Context):
 			files = self.base_path.ant_glob('**/*', excl=self.get_excl())
 		return files
 
-
 def dist(ctx):
 	'''makes a tarball for redistributing the sources'''
 	pass
 
 class DistCheck(Dist):
-	"""
-	Create an archive of the project, and try to build the project in a temporary directory::
-
-		$ waf distcheck
-	"""
-
+	"""creates an archive with dist, then tries to build it"""
 	fun = 'distcheck'
 	cmd = 'distcheck'
 
@@ -495,32 +544,33 @@ class DistCheck(Dist):
 		self.archive()
 		self.check()
 
+	def make_distcheck_cmd(self, tmpdir):
+		cfg = []
+		if Options.options.distcheck_args:
+			cfg = shlex.split(Options.options.distcheck_args)
+		else:
+			cfg = [x for x in sys.argv if x.startswith('-')]
+		cmd = [sys.executable, sys.argv[0], 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] + cfg
+		return cmd
+
 	def check(self):
 		"""
-		Create the archive, uncompress it and try to build the project
+		Creates the archive, uncompresses it and tries to build the project
 		"""
 		import tempfile, tarfile
 
-		t = None
-		try:
-			t = tarfile.open(self.get_arch_name())
+		with tarfile.open(self.get_arch_name()) as t:
 			for x in t:
-				t.extract(x)
-		finally:
-			if t:
-				t.close()
-
-		cfg = []
-
-		if Options.options.distcheck_args:
-			cfg = shlex.split(Options.options.distcheck_args)
-		else:
-			cfg = [x for x in sys.argv if x.startswith('-')]
+				if hasattr(tarfile, 'data_filter'):
+					t.extract(x, filter='data')
+				else:
+					t.extract(x)
 
 		instdir = tempfile.mkdtemp('.inst', self.get_base_name())
-		ret = Utils.subprocess.Popen([sys.executable, sys.argv[0], 'configure', 'install', 'uninstall', '--destdir=' + instdir] + cfg, cwd=self.get_base_name()).wait()
+		cmd = self.make_distcheck_cmd(instdir)
+		ret = Utils.subprocess.Popen(cmd, cwd=self.get_base_name()).wait()
 		if ret:
-			raise Errors.WafError('distcheck failed with code %i' % ret)
+			raise Errors.WafError('distcheck failed with code %r' % ret)
 
 		if os.path.exists(instdir):
 			raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir)
@@ -532,23 +582,14 @@ def distcheck(ctx):
 	'''checks if the project compiles (tarball from 'dist')'''
 	pass
 
-def update(ctx):
-	'''updates the plugins from the *waflib/extras* directory'''
-	lst = Options.options.files.split(',')
-	if not lst:
-		lst = [x for x in Utils.listdir(Context.waf_dir + '/waflib/extras') if x.endswith('.py')]
-	for x in lst:
-		tool = x.replace('.py', '')
-		try:
-			Configure.download_tool(tool, force=True, ctx=ctx)
-		except Errors.WafError:
-			Logs.error('Could not find the tool %s in the remote repository' % x)
-
 def autoconfigure(execute_method):
 	"""
-	Decorator used to set the commands that can be configured automatically
+	Decorator that enables context commands to run *configure* as needed.
 	"""
 	def execute(self):
+		"""
+		Wraps :py:func:`waflib.Context.Context.execute` on the context class
+		"""
 		if not Configure.autoconfig:
 			return execute_method(self)
 
@@ -556,7 +597,7 @@ def autoconfigure(execute_method):
 		do_config = False
 		try:
 			env.load(os.path.join(Context.top_dir, Options.lockfile))
-		except Exception:
+		except EnvironmentError:
 			Logs.warn('Configuring the project')
 			do_config = True
 		else:
@@ -564,16 +605,33 @@ def autoconfigure(execute_method):
 				do_config = True
 			else:
 				h = 0
-				for f in env['files']:
-					h = hash((h, Utils.readf(f, 'rb')))
-				do_config = h != env.hash
+				for f in env.files:
+					try:
+						h = Utils.h_list((h, Utils.readf(f, 'rb')))
+					except EnvironmentError:
+						do_config = True
+						break
+				else:
+					do_config = h != env.hash
 
 		if do_config:
-			Options.commands.insert(0, self.cmd)
-			Options.commands.insert(0, 'configure')
-			return
-
-		return execute_method(self)
+			cmd = env.config_cmd or 'configure'
+			if Configure.autoconfig == 'clobber':
+				tmp = Options.options.__dict__
+				launch_dir_tmp = Context.launch_dir
+				if env.options:
+					Options.options.__dict__ = env.options
+				Context.launch_dir = env.launch_dir
+				try:
+					run_command(cmd)
+				finally:
+					Options.options.__dict__ = tmp
+					Context.launch_dir = launch_dir_tmp
+			else:
+				run_command(cmd)
+			run_command(self.cmd)
+		else:
+			return execute_method(self)
 	return execute
 Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute)
 

File diff suppressed because it is too large
+ 424 - 309
sdk/waf/waflib/Task.py


+ 257 - 161
sdk/waf/waflib/TaskGen.py

@@ -1,26 +1,26 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2018 (ita)
 
 """
 Task generators
 
 The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code)
 The instances can have various parameters, but the creation of task nodes (Task.py)
-is always postponed. To achieve this, various methods are called from the method "apply"
-
-
+is deferred. To achieve this, various methods are called from the method "apply"
 """
 
-import copy, re, os
+import copy, re, os, functools
 from waflib import Task, Utils, Logs, Errors, ConfigSet, Node
 
 feats = Utils.defaultdict(set)
 """remember the methods declaring features"""
 
+HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh']
+
 class task_gen(object):
 	"""
-	Instances of this class create :py:class:`waflib.Task.TaskBase` when
+	Instances of this class create :py:class:`waflib.Task.Task` when
 	calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
 	A few notes:
 
@@ -31,39 +31,29 @@ class task_gen(object):
 	* The attribute 'idx' is a counter of task generators in the same path
 	"""
 
-	mappings = {}
-	prec = Utils.defaultdict(list)
+	mappings = Utils.ordered_iter_dict()
+	"""Mappings are global file extension mappings that are retrieved in the order of definition"""
+
+	prec = Utils.defaultdict(set)
+	"""Dict that holds the precedence execution rules for task generator methods"""
 
 	def __init__(self, *k, **kw):
 		"""
-		The task generator objects predefine various attributes (source, target) for possible
+		Task generator objects predefine various attributes (source, target) for possible
 		processing by process_rule (make-like rules) or process_source (extensions, misc methods)
 
-		The tasks are stored on the attribute 'tasks'. They are created by calling methods
-		listed in self.meths *or* referenced in the attribute features
-		A topological sort is performed to ease the method re-use.
+		Tasks are stored on the attribute 'tasks'. They are created by calling methods
+		listed in ``self.meths`` or referenced in the attribute ``features``
+		A topological sort is performed to execute the methods in correct order.
 
-		The extra key/value elements passed in kw are set as attributes
+		The extra key/value elements passed in ``kw`` are set as attributes
 		"""
-
-		# so we will have to play with directed acyclic graphs
-		# detect cycles, etc
-		self.source = ''
+		self.source = []
 		self.target = ''
 
 		self.meths = []
 		"""
-		List of method names to execute (it is usually a good idea to avoid touching this)
-		"""
-
-		self.prec = Utils.defaultdict(list)
-		"""
-		Precedence table for sorting the methods in self.meths
-		"""
-
-		self.mappings = {}
-		"""
-		List of mappings {extension -> function} for processing files by extension
+		List of method names to execute (internal)
 		"""
 
 		self.features = []
@@ -73,7 +63,7 @@ class task_gen(object):
 
 		self.tasks = []
 		"""
-		List of tasks created.
+		Tasks created are added to this list
 		"""
 
 		if not 'bld' in kw:
@@ -84,33 +74,52 @@ class task_gen(object):
 		else:
 			self.bld = kw['bld']
 			self.env = self.bld.env.derive()
-			self.path = self.bld.path # emulate chdir when reading scripts
+			self.path = kw.get('path', self.bld.path) # by default, emulate chdir when reading scripts
 
-			# provide a unique id
+			# Provide a unique index per folder
+			# This is part of a measure to prevent output file name collisions
+			path = self.path.abspath()
 			try:
-				self.idx = self.bld.idx[id(self.path)] = self.bld.idx.get(id(self.path), 0) + 1
+				self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1
 			except AttributeError:
 				self.bld.idx = {}
-				self.idx = self.bld.idx[id(self.path)] = 1
+				self.idx = self.bld.idx[path] = 1
+
+			# Record the global task generator count
+			try:
+				self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1
+			except AttributeError:
+				self.tg_idx_count = self.bld.tg_idx_count = 1
 
 		for key, val in kw.items():
 			setattr(self, key, val)
 
 	def __str__(self):
-		"""for debugging purposes"""
+		"""Debugging helper"""
 		return "<task_gen %r declared in %s>" % (self.name, self.path.abspath())
 
 	def __repr__(self):
-		"""for debugging purposes"""
+		"""Debugging helper"""
 		lst = []
-		for x in self.__dict__.keys():
-			if x not in ['env', 'bld', 'compiled_tasks', 'tasks']:
+		for x in self.__dict__:
+			if x not in ('env', 'bld', 'compiled_tasks', 'tasks'):
 				lst.append("%s=%s" % (x, repr(getattr(self, x))))
 		return "bld(%s) in %s" % (", ".join(lst), self.path.abspath())
 
+	def get_cwd(self):
+		"""
+		Current working directory for the task generator, defaults to the build directory.
+		This is still used in a few places but it should disappear at some point as the classes
+		define their own working directory.
+
+		:rtype: :py:class:`waflib.Node.Node`
+		"""
+		return self.bld.bldnode
+
 	def get_name(self):
 		"""
-		If not set, the name is computed from the target name::
+		If the attribute ``name`` is not set on the instance,
+		the name is computed from the target name::
 
 			def build(bld):
 				x = bld(name='foo')
@@ -137,18 +146,20 @@ class task_gen(object):
 
 	def to_list(self, val):
 		"""
-		Ensure that a parameter is a list
+		Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list`
 
 		:type val: string or list of string
 		:param val: input to return as a list
 		:rtype: list
 		"""
-		if isinstance(val, str): return val.split()
-		else: return val
+		if isinstance(val, str):
+			return val.split()
+		else:
+			return val
 
 	def post(self):
 		"""
-		Create task objects. The following operations are performed:
+		Creates tasks for this task generators. The following operations are performed:
 
 		#. The body of this method is called only once and sets the attribute ``posted``
 		#. The attribute ``features`` is used to add more methods in ``self.meths``
@@ -156,27 +167,25 @@ class task_gen(object):
 		#. The methods are then executed in order
 		#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
 		"""
-
-		# we could add a decorator to let the task run once, but then python 2.3 will be difficult to support
 		if getattr(self, 'posted', None):
-			#error("OBJECT ALREADY POSTED" + str( self))
 			return False
 		self.posted = True
 
 		keys = set(self.meths)
+		keys.update(feats['*'])
 
 		# add the methods listed in the features
 		self.features = Utils.to_list(self.features)
-		for x in self.features + ['*']:
+		for x in self.features:
 			st = feats[x]
-			if not st:
-				if not x in Task.classes:
-					Logs.warn('feature %r does not exist - bind at least one method to it' % x)
-			keys.update(list(st)) # ironpython 2.7 wants the cast to list
+			if st:
+				keys.update(st)
+			elif not x in Task.classes:
+				Logs.warn('feature %r does not exist - bind at least one method to it?', x)
 
 		# copy the precedence table
 		prec = {}
-		prec_tbl = self.prec or task_gen.prec
+		prec_tbl = self.prec
 		for x in prec_tbl:
 			if x in keys:
 				prec[x] = prec_tbl[x]
@@ -185,17 +194,19 @@ class task_gen(object):
 		tmp = []
 		for a in keys:
 			for x in prec.values():
-				if a in x: break
+				if a in x:
+					break
 			else:
 				tmp.append(a)
 
-		tmp.sort()
+		tmp.sort(reverse=True)
 
 		# topological sort
 		out = []
 		while tmp:
 			e = tmp.pop()
-			if e in keys: out.append(e)
+			if e in keys:
+				out.append(e)
 			try:
 				nlst = prec[e]
 			except KeyError:
@@ -208,27 +219,32 @@ class task_gen(object):
 							break
 					else:
 						tmp.append(x)
+						tmp.sort(reverse=True)
 
 		if prec:
-			raise Errors.WafError('Cycle detected in the method execution %r' % prec)
-		out.reverse()
+			buf = ['Cycle detected in the method execution:']
+			for k, v in prec.items():
+				buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
+			raise Errors.WafError('\n'.join(buf))
 		self.meths = out
 
 		# then we run the methods in order
-		Logs.debug('task_gen: posting %s %d' % (self, id(self)))
+		Logs.debug('task_gen: posting %s %d', self, id(self))
 		for x in out:
 			try:
 				v = getattr(self, x)
 			except AttributeError:
 				raise Errors.WafError('%r is not a valid task generator method' % x)
-			Logs.debug('task_gen: -> %s (%d)' % (x, id(self)))
+			Logs.debug('task_gen: -> %s (%d)', x, id(self))
 			v()
 
-		Logs.debug('task_gen: posted %s' % self.name)
+		Logs.debug('task_gen: posted %s', self.name)
 		return True
 
 	def get_hook(self, node):
 		"""
+		Returns the ``@extension`` method to call for a Node of a particular extension.
+
 		:param node: Input file to process
 		:type node: :py:class:`waflib.Tools.Node.Node`
 		:return: A method able to process the input node by looking at the extension
@@ -236,17 +252,19 @@ class task_gen(object):
 		"""
 		name = node.name
 		for k in self.mappings:
-			if name.endswith(k):
-				return self.mappings[k]
-		for k in task_gen.mappings:
-			if name.endswith(k):
-				return task_gen.mappings[k]
-		raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)" % (node, task_gen.mappings.keys()))
-
-	def create_task(self, name, src=None, tgt=None):
+			try:
+				if name.endswith(k):
+					return self.mappings[k]
+			except TypeError:
+				# regexps objects
+				if k.match(name):
+					return self.mappings[k]
+		keys = list(self.mappings.keys())
+		raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)" % (node, keys))
+
+	def create_task(self, name, src=None, tgt=None, **kw):
 		"""
-		Wrapper for creating task instances. The classes are retrieved from the
-		context class if possible, then from the global dict Task.classes.
+		Creates task instances.
 
 		:param name: task class name
 		:type name: string
@@ -255,19 +273,20 @@ class task_gen(object):
 		:param tgt: output nodes
 		:type tgt: list of :py:class:`waflib.Tools.Node.Node`
 		:return: A task object
-		:rtype: :py:class:`waflib.Task.TaskBase`
+		:rtype: :py:class:`waflib.Task.Task`
 		"""
 		task = Task.classes[name](env=self.env.derive(), generator=self)
 		if src:
 			task.set_inputs(src)
 		if tgt:
 			task.set_outputs(tgt)
+		task.__dict__.update(kw)
 		self.tasks.append(task)
 		return task
 
 	def clone(self, env):
 		"""
-		Make a copy of a task generator. Once the copy is made, it is necessary to ensure that the
+		Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the
 		it does not create the same output files as the original, or the same files may
 		be compiled several times.
 
@@ -278,9 +297,9 @@ class task_gen(object):
 		"""
 		newobj = self.bld()
 		for x in self.__dict__:
-			if x in ['env', 'bld']:
+			if x in ('env', 'bld'):
 				continue
-			elif x in ['path', 'features']:
+			elif x in ('path', 'features'):
 				setattr(newobj, x, getattr(self, x))
 			else:
 				setattr(newobj, x, copy.copy(getattr(self, x)))
@@ -296,7 +315,7 @@ class task_gen(object):
 def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
 	ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
 	"""
-	Create a new mapping and a task class for processing files by extension.
+	Creates a new mapping and a task class for processing files by extension.
 	See Tools/flex.py for an example.
 
 	:param name: name for the task class
@@ -315,7 +334,7 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
 	:type before: list of string
 	:param after: execute instances of this task after classes of the given names
 	:type after: list of string
-	:param decider: if present, use it to create the output nodes for the task
+	:param decider: if present, function that returns a list of output file extensions (overrides ext_out for output files, but not for the build order)
 	:type decider: function
 	:param scan: scanner function for the task
 	:type scan: function
@@ -329,14 +348,13 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
 	cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)
 
 	def x_file(self, node):
-		ext = decider and decider(self, node) or cls.ext_out
 		if ext_in:
 			_ext_in = ext_in[0]
 
 		tsk = self.create_task(name, node)
 		cnt = 0
 
-		keys = list(self.mappings.keys()) + list(self.__class__.mappings.keys())
+		ext = decider(self, node) if decider else cls.ext_out
 		for x in ext:
 			k = node.change_ext(x, ext_in=_ext_in)
 			tsk.outputs.append(k)
@@ -345,14 +363,15 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
 				if cnt < int(reentrant):
 					self.source.append(k)
 			else:
-				for y in keys: # ~ nfile * nextensions :-/
+				# reinject downstream files into the build
+				for y in self.mappings: # ~ nfile * nextensions :-/
 					if k.name.endswith(y):
 						self.source.append(k)
 						break
 			cnt += 1
 
 		if install_path:
-			self.bld.install_files(install_path, tsk.outputs)
+			self.install_task = self.add_install_files(install_to=install_path, install_from=tsk.outputs)
 		return tsk
 
 	for x in cls.ext_in:
@@ -361,7 +380,7 @@ def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
 
 def taskgen_method(func):
 	"""
-	Decorator: register a method as a task generator method.
+	Decorator that registers method as a task generator method.
 	The function must accept a task generator as first parameter::
 
 		from waflib.TaskGen import taskgen_method
@@ -378,10 +397,10 @@ def taskgen_method(func):
 
 def feature(*k):
 	"""
-	Decorator: register a task generator method that will be executed when the
-	object attribute 'feature' contains the corresponding key(s)::
+	Decorator that registers a task generator method that will be executed when the
+	object attribute ``feature`` contains the corresponding key(s)::
 
-		from waflib.Task import feature
+		from waflib.TaskGen import feature
 		@feature('myfeature')
 		def myfunction(self):
 			print('that is my feature!')
@@ -400,7 +419,7 @@ def feature(*k):
 
 def before_method(*k):
 	"""
-	Decorator: register a task generator method which will be executed
+	Decorator that registera task generator method which will be executed
 	before the functions of given name(s)::
 
 		from waflib.TaskGen import feature, before
@@ -420,16 +439,14 @@ def before_method(*k):
 	def deco(func):
 		setattr(task_gen, func.__name__, func)
 		for fun_name in k:
-			if not func.__name__ in task_gen.prec[fun_name]:
-				task_gen.prec[fun_name].append(func.__name__)
-				#task_gen.prec[fun_name].sort()
+			task_gen.prec[func.__name__].add(fun_name)
 		return func
 	return deco
 before = before_method
 
 def after_method(*k):
 	"""
-	Decorator: register a task generator method which will be executed
+	Decorator that registers a task generator method which will be executed
 	after the functions of given name(s)::
 
 		from waflib.TaskGen import feature, after
@@ -449,16 +466,14 @@ def after_method(*k):
 	def deco(func):
 		setattr(task_gen, func.__name__, func)
 		for fun_name in k:
-			if not fun_name in task_gen.prec[func.__name__]:
-				task_gen.prec[func.__name__].append(fun_name)
-				#task_gen.prec[func.__name__].sort()
+			task_gen.prec[fun_name].add(func.__name__)
 		return func
 	return deco
 after = after_method
 
 def extension(*k):
 	"""
-	Decorator: register a task generator method which will be invoked during
+	Decorator that registers a task generator method which will be invoked during
 	the processing of source files for the extension given::
 
 		from waflib import Task
@@ -477,14 +492,11 @@ def extension(*k):
 		return func
 	return deco
 
-# ---------------------------------------------------------------
-# The following methods are task generator methods commonly used
-# they are almost examples, the rest of waf core does not depend on them
-
 @taskgen_method
 def to_nodes(self, lst, path=None):
 	"""
-	Convert the input list into a list of nodes.
+	Flatten the input list of string/nodes/lists into a list of nodes.
+
 	It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
 	It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:
 
@@ -498,24 +510,26 @@ def to_nodes(self, lst, path=None):
 	path = path or self.path
 	find = path.find_resource
 
-	if isinstance(lst, self.path.__class__):
+	if isinstance(lst, Node.Node):
 		lst = [lst]
 
-	# either a list or a string, convert to a list of nodes
 	for x in Utils.to_list(lst):
 		if isinstance(x, str):
 			node = find(x)
-		else:
+		elif hasattr(x, 'name'):
 			node = x
+		else:
+			tmp.extend(self.to_nodes(x))
+			continue
 		if not node:
-			raise Errors.WafError("source not found: %r in %r" % (x, self))
+			raise Errors.WafError('source not found: %r in %r' % (x, self))
 		tmp.append(node)
 	return tmp
 
 @feature('*')
 def process_source(self):
 	"""
-	Process each element in the attribute ``source`` by extension.
+	Processes each element in the attribute ``source`` by extension.
 
 	#. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first.
 	#. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension`
@@ -531,10 +545,29 @@ def process_source(self):
 @before_method('process_source')
 def process_rule(self):
 	"""
-	Process the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::
+	Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::
 
 		def build(bld):
 			bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
+
+	Main attributes processed:
+
+	* rule: command to execute, it can be a tuple of strings for multiple commands
+	* chmod: permissions for the resulting files (integer value such as Utils.O755)
+	* shell: set to False to execute the command directly (default is True to use a shell)
+	* scan: scanner function
+	* vars: list of variables to trigger rebuilds, such as CFLAGS
+	* cls_str: string to display when executing the task
+	* cls_keyword: label to display when executing the task
+	* cache_rule: by default, try to re-use similar classes, set to False to disable
+	* source: list of Node or string objects representing the source files required by this task
+	* target: list of Node or string objects representing the files that this task creates
+	* cwd: current working directory (Node or string)
+	* stdout: standard output, set to None to prevent waf from capturing the text
+	* stderr: standard error, set to None to prevent waf from capturing the text
+	* timeout: timeout for command execution (Python 3)
+	* always: whether to always run the command (False by default)
+	* deep_inputs: whether the task must depend on the input file tasks too (False by default)
 	"""
 	if not getattr(self, 'rule', None):
 		return
@@ -548,44 +581,85 @@ def process_rule(self):
 	except AttributeError:
 		cache = self.bld.cache_rule_attr = {}
 
+	chmod = getattr(self, 'chmod', None)
+	shell = getattr(self, 'shell', True)
+	color = getattr(self, 'color', 'BLUE')
+	scan = getattr(self, 'scan', None)
+	_vars = getattr(self, 'vars', [])
+	cls_str = getattr(self, 'cls_str', None)
+	cls_keyword = getattr(self, 'cls_keyword', None)
+	use_cache = getattr(self, 'cache_rule', 'True')
+	deep_inputs = getattr(self, 'deep_inputs', False)
+
+	scan_val = has_deps = hasattr(self, 'deps')
+	if scan:
+		scan_val = id(scan)
+
+	key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str, cls_keyword, scan_val, _vars, deep_inputs))
+
 	cls = None
-	if getattr(self, 'cache_rule', 'True'):
+	if use_cache:
 		try:
-			cls = cache[(name, self.rule)]
+			cls = cache[key]
 		except KeyError:
 			pass
 	if not cls:
-		cls = Task.task_factory(name, self.rule,
-			getattr(self, 'vars', []),
-			shell=getattr(self, 'shell', True), color=getattr(self, 'color', 'BLUE'),
-			scan = getattr(self, 'scan', None))
-		if getattr(self, 'scan', None):
+		rule = self.rule
+		if chmod is not None:
+			def chmod_fun(tsk):
+				for x in tsk.outputs:
+					os.chmod(x.abspath(), tsk.generator.chmod)
+			if isinstance(rule, tuple):
+				rule = list(rule)
+				rule.append(chmod_fun)
+				rule = tuple(rule)
+			else:
+				rule = (rule, chmod_fun)
+
+		cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)
+
+		if cls_str:
+			if isinstance(cls_str, str):
+				raise ValueError('cls_str should be a function %r' % self)
+			setattr(cls, '__str__', self.cls_str)
+
+		if cls_keyword:
+			if isinstance(cls_keyword, str):
+				raise ValueError('cls_keyword should be a function %r' % self)
+			setattr(cls, 'keyword', self.cls_keyword)
+
+		if deep_inputs:
+			Task.deep_inputs(cls)
+
+		if scan:
 			cls.scan = self.scan
-		elif getattr(self, 'deps', None):
+		elif has_deps:
 			def scan(self):
-				nodes = []
-				for x in self.generator.to_list(getattr(self.generator, 'deps', None)):
-					node = self.generator.path.find_resource(x)
-					if not node:
-						self.generator.bld.fatal('Could not find %r (was it declared?)' % x)
-					nodes.append(node)
+				deps = getattr(self.generator, 'deps', None)
+				nodes = self.generator.to_nodes(deps)
 				return [nodes, []]
 			cls.scan = scan
 
-		if getattr(self, 'update_outputs', None):
-			Task.update_outputs(cls)
+		if use_cache:
+			cache[key] = cls
+
+	# now create one instance
+	tsk = self.create_task(name)
 
-		if getattr(self, 'always', None):
-			Task.always_run(cls)
+	for x in ('after', 'before', 'ext_in', 'ext_out'):
+		setattr(tsk, x, getattr(self, x, []))
 
-		for x in ['after', 'before', 'ext_in', 'ext_out']:
-			setattr(cls, x, getattr(self, x, []))
+	if hasattr(self, 'stdout'):
+		tsk.stdout = self.stdout
 
-		if getattr(self, 'cache_rule', 'True'):
-			cache[(name, self.rule)] = cls
+	if hasattr(self, 'stderr'):
+		tsk.stderr = self.stderr
 
-	# now create one instance
-	tsk = self.create_task(name)
+	if getattr(self, 'timeout', None):
+		tsk.timeout = self.timeout
+
+	if getattr(self, 'always', None):
+		tsk.always_run = True
 
 	if getattr(self, 'target', None):
 		if isinstance(self.target, str):
@@ -599,10 +673,8 @@ def process_rule(self):
 				x.parent.mkdir() # if a node was given, create the required folders
 				tsk.outputs.append(x)
 		if getattr(self, 'install_path', None):
-			# from waf 1.5
-			# although convenient, it does not 1. allow to name the target file and 2. symlinks
-			# TODO remove in waf 1.7
-			self.bld.install_files(self.install_path, tsk.outputs)
+			self.install_task = self.add_install_files(install_to=self.install_path,
+				install_from=tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644))
 
 	if getattr(self, 'source', None):
 		tsk.inputs = self.to_nodes(self.source)
@@ -612,10 +684,16 @@ def process_rule(self):
 	if getattr(self, 'cwd', None):
 		tsk.cwd = self.cwd
 
+	if isinstance(tsk.run, functools.partial):
+		# Python documentation says: "partial objects defined in classes
+		# behave like static methods and do not transform into bound
+		# methods during instance attribute look-up."
+		tsk.run = functools.partial(tsk.run, tsk)
+
 @feature('seq')
 def sequence_order(self):
 	"""
-	Add a strict sequential constraint between the tasks generated by task generators.
+	Adds a strict sequential constraint between the tasks generated by task generators.
 	It works because task generators are posted in order.
 	It will not post objects which belong to other folders.
 
@@ -649,32 +727,44 @@ def sequence_order(self):
 	self.bld.prev = self
 
 
-re_m4 = re.compile('@(\w+)@', re.M)
+re_m4 = re.compile(r'@(\w+)@', re.M)
 
 class subst_pc(Task.Task):
 	"""
-	Create *.pc* files from *.pc.in*. The task is executed whenever an input variable used
+	Creates *.pc* files from *.pc.in*. The task is executed whenever an input variable used
 	in the substitution changes.
 	"""
 
+	def force_permissions(self):
+		"Private for the time being, we will probably refactor this into run_str=[run1,chmod]"
+		if getattr(self.generator, 'chmod', None):
+			for x in self.outputs:
+				os.chmod(x.abspath(), self.generator.chmod)
+
 	def run(self):
 		"Substitutes variables in a .in file"
 
 		if getattr(self.generator, 'is_copy', None):
-			self.outputs[0].write(self.inputs[0].read('rb'), 'wb')
-			if getattr(self.generator, 'chmod', None):
-				os.chmod(self.outputs[0].abspath(), self.generator.chmod)
+			for i, x in enumerate(self.outputs):
+				x.write(self.inputs[i].read('rb'), 'wb')
+				stat = os.stat(self.inputs[i].abspath()) # Preserve mtime of the copy
+				os.utime(self.outputs[i].abspath(), (stat.st_atime, stat.st_mtime))
+			self.force_permissions()
 			return None
 
 		if getattr(self.generator, 'fun', None):
-			self.generator.fun(self)
+			ret = self.generator.fun(self)
+			if not ret:
+				self.force_permissions()
+			return ret
 
-		code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
+		code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'latin-1'))
 		if getattr(self.generator, 'subst_fun', None):
 			code = self.generator.subst_fun(self, code)
-			if code:
-				self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
-			return
+			if code is not None:
+				self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
+			self.force_permissions()
+			return None
 
 		# replace all % by %% to prevent errors by % signs
 		code = code.replace('%', '%%')
@@ -687,26 +777,31 @@ class subst_pc(Task.Task):
 				lst.append(g(1))
 				return "%%(%s)s" % g(1)
 			return ''
-		code = re_m4.sub(repl, code)
+		code = getattr(self.generator, 're_m4', re_m4).sub(repl, code)
 
 		try:
 			d = self.generator.dct
 		except AttributeError:
 			d = {}
 			for x in lst:
-				tmp = getattr(self.generator, x, '') or self.env.get_flat(x) or self.env.get_flat(x.upper())
-				d[x] = str(tmp)
+				tmp = getattr(self.generator, x, '') or self.env[x] or self.env[x.upper()]
+				try:
+					tmp = ''.join(tmp)
+				except TypeError:
+					tmp = str(tmp)
+				d[x] = tmp
 
 		code = code % d
-		self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
-		self.generator.bld.raw_deps[self.uid()] = self.dep_vars = lst
+		self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
+		self.generator.bld.raw_deps[self.uid()] = lst
 
 		# make sure the signature is updated
-		try: delattr(self, 'cache_sig')
-		except AttributeError: pass
+		try:
+			delattr(self, 'cache_sig')
+		except AttributeError:
+			pass
 
-		if getattr(self.generator, 'chmod', None):
-			os.chmod(self.outputs[0].abspath(), self.generator.chmod)
+		self.force_permissions()
 
 	def sig_vars(self):
 		"""
@@ -736,13 +831,14 @@ class subst_pc(Task.Task):
 @extension('.pc.in')
 def add_pcfile(self, node):
 	"""
-	Process *.pc.in* files to *.pc*. Install the results to ``${PREFIX}/lib/pkgconfig/``
+	Processes *.pc.in* files to *.pc*. Installs the results to ``${PREFIX}/lib/pkgconfig/`` by default
 
 		def build(bld):
 			bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/')
 	"""
 	tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in'))
-	self.bld.install_files(getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), tsk.outputs)
+	self.install_task = self.add_install_files(
+		install_to=getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), install_from=tsk.outputs)
 
 class subst(subst_pc):
 	pass
@@ -751,7 +847,7 @@ class subst(subst_pc):
 @before_method('process_source', 'process_rule')
 def process_subst(self):
 	"""
-	Define a transformation that substitutes the contents of *source* files to *target* files::
+	Defines a transformation that substitutes the contents of *source* files to *target* files::
 
 		def build(bld):
 			bld(
@@ -786,7 +882,6 @@ def process_subst(self):
 			a = self.path.find_node(x)
 			b = self.path.get_bld().make_node(y)
 			if not os.path.isfile(b.abspath()):
-				b.sig = None
 				b.parent.mkdir()
 		else:
 			if isinstance(x, str):
@@ -799,23 +894,24 @@ def process_subst(self):
 				b = y
 
 		if not a:
-			raise Errors.WafError('cound not find %r for %r' % (x, self))
+			raise Errors.WafError('could not find %r for %r' % (x, self))
 
-		has_constraints = False
 		tsk = self.create_task('subst', a, b)
 		for k in ('after', 'before', 'ext_in', 'ext_out'):
 			val = getattr(self, k, None)
 			if val:
-				has_constraints = True
 				setattr(tsk, k, val)
 
 		# paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
-		if not has_constraints and b.name.endswith('.h'):
-			tsk.before = [k for k in ('c', 'cxx') if k in Task.classes]
+		for xt in HEADER_EXTS:
+			if b.name.endswith(xt):
+				tsk.ext_out = tsk.ext_out + ['.h']
+				break
 
 		inst_to = getattr(self, 'install_path', None)
 		if inst_to:
-			self.bld.install_files(inst_to, b, chmod=getattr(self, 'chmod', Utils.O644))
+			self.install_task = self.add_install_files(install_to=inst_to,
+				install_from=b, chmod=getattr(self, 'chmod', Utils.O644))
 
 	self.source = []
 

+ 1 - 1
sdk/waf/waflib/Tools/__init__.py

@@ -1,3 +1,3 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2018 (ita)

+ 5 - 3
sdk/waf/waflib/Tools/ar.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 # Ralf Habacker, 2006 (rh)
 
 """
@@ -16,7 +16,9 @@ def find_ar(conf):
 	conf.load('ar')
 
 def configure(conf):
-	"""Find the ar program and set the default flags in ``conf.env.ARFLAGS``"""
+	"""Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``"""
 	conf.find_program('ar', var='AR')
-	conf.env.ARFLAGS = 'rcs'
+	conf.add_os_flags('ARFLAGS')
+	if not conf.env.ARFLAGS:
+		conf.env.ARFLAGS = ['rcs']
 

+ 44 - 12
sdk/waf/waflib/Tools/asm.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2008-2010 (ita)
+# Thomas Nagy, 2008-2018 (ita)
 
 """
 Assembly support, used by tools such as gas and nasm
@@ -34,23 +34,54 @@ Support for pure asm programs and libraries should also work::
 			target = 'asmtest')
 """
 
-import os, sys
-from waflib import Task, Utils
-import waflib.Task
+import re
+from waflib import Errors, Logs, Task
 from waflib.Tools.ccroot import link_task, stlink_task
-from waflib.TaskGen import extension, feature
+from waflib.TaskGen import extension
+from waflib.Tools import c_preproc
+
+re_lines = re.compile(
+	'^[ \t]*(?:%)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef)[ \t]*(.*)\r*$',
+	re.IGNORECASE | re.MULTILINE)
+
+class asm_parser(c_preproc.c_parser):
+	def filter_comments(self, node):
+		code = node.read()
+		code = c_preproc.re_nl.sub('', code)
+		code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+		return re_lines.findall(code)
 
 class asm(Task.Task):
 	"""
-	Compile asm files by gas/nasm/yasm/...
+	Compiles asm files by gas/nasm/yasm/...
 	"""
 	color = 'BLUE'
-	run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
+	run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${ASMDEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
+
+	def scan(self):
+		if self.env.ASM_NAME == 'gas':
+			return c_preproc.scan(self)
+		elif self.env.ASM_NAME == 'nasm':
+			Logs.warn('The Nasm dependency scanner is incomplete!')
+
+		try:
+			incn = self.generator.includes_nodes
+		except AttributeError:
+			raise Errors.WafError('%r is missing the "asm" feature' % self.generator)
+
+		if c_preproc.go_absolute:
+			nodepaths = incn
+		else:
+			nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)]
+
+		tmp = asm_parser(nodepaths)
+		tmp.start(self.inputs[0], self.env)
+		return (tmp.nodes, tmp.names)
 
 @extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP')
 def asm_hook(self, node):
 	"""
-	Bind the asm extension to the asm task
+	Binds the asm extension to the asm task
 
 	:param node: input file
 	:type node: :py:class:`waflib.Node.Node`
@@ -58,18 +89,19 @@ def asm_hook(self, node):
 	return self.create_compiled_task('asm', node)
 
 class asmprogram(link_task):
-	"Link object files into a c program"
+	"Links object files into a c program"
 	run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
 	ext_out = ['.bin']
 	inst_to = '${BINDIR}'
 
 class asmshlib(asmprogram):
-	"Link object files into a c shared library"
+	"Links object files into a c shared library"
 	inst_to = '${LIBDIR}'
 
 class asmstlib(stlink_task):
-	"Link object files into a c static library"
+	"Links object files into a c static library"
 	pass # do not remove
 
 def configure(conf):
-	conf.env['ASMPATH_ST'] = '-I%s'
+	conf.env.ASMPATH_ST = '-I%s'
+	conf.env.ASMDEFINES_ST = '-D%s'

+ 6 - 6
sdk/waf/waflib/Tools/bison.py

@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # encoding: utf-8
 # John O'Meara, 2006
-# Thomas Nagy 2009-2010 (ita)
+# Thomas Nagy 2009-2018 (ita)
 
 """
 The **bison** program is a code generator which creates C or C++ files.
@@ -12,7 +12,7 @@ from waflib import Task
 from waflib.TaskGen import extension
 
 class bison(Task.Task):
-	"""Compile bison files"""
+	"""Compiles bison files"""
 	color   = 'BLUE'
 	run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
 	ext_out = ['.h'] # just to make sure
@@ -20,9 +20,9 @@ class bison(Task.Task):
 @extension('.y', '.yc', '.yy')
 def big_bison(self, node):
 	"""
-	Create a bison task, which must be executed from the directory of the output file.
+	Creates a bison task, which must be executed from the directory of the output file.
 	"""
-	has_h = '-d' in self.env['BISONFLAGS']
+	has_h = '-d' in self.env.BISONFLAGS
 
 	outs = []
 	if node.name.endswith('.yc'):
@@ -35,14 +35,14 @@ def big_bison(self, node):
 			outs.append(node.change_ext('.tab.h'))
 
 	tsk = self.create_task('bison', node, outs)
-	tsk.cwd = node.parent.get_bld().abspath()
+	tsk.cwd = node.parent.get_bld()
 
 	# and the c/cxx file must be compiled too
 	self.source.append(outs[0])
 
 def configure(conf):
 	"""
-	Detect the *bison* program
+	Detects the *bison* program
 	"""
 	conf.find_program('bison', var='BISON')
 	conf.env.BISONFLAGS = ['-d']

+ 11 - 9
sdk/waf/waflib/Tools/c.py

@@ -1,37 +1,39 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 
 "Base for c programs/libraries"
 
-from waflib import TaskGen, Task, Utils
+from waflib import TaskGen, Task
 from waflib.Tools import c_preproc
 from waflib.Tools.ccroot import link_task, stlink_task
 
 @TaskGen.extension('.c')
 def c_hook(self, node):
-	"Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
+	"Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances"
+	if not self.env.CC and self.env.CXX:
+		return self.create_compiled_task('cxx', node)
 	return self.create_compiled_task('c', node)
 
 class c(Task.Task):
-	"Compile C files into object files"
-	run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
+	"Compiles C files into object files"
+	run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
 	vars    = ['CCDEPS'] # unused variable to depend on, just in case
 	ext_in  = ['.h'] # set the build order easily by using ext_out=['.h']
 	scan    = c_preproc.scan
 
 class cprogram(link_task):
-	"Link object files into a c program"
-	run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
+	"Links object files into c programs"
+	run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
 	ext_out = ['.bin']
 	vars    = ['LINKDEPS']
 	inst_to = '${BINDIR}'
 
 class cshlib(cprogram):
-	"Link object files into a c shared library"
+	"Links object files into c shared libraries"
 	inst_to = '${LIBDIR}'
 
 class cstlib(stlink_task):
-	"Link object files into a c static library"
+	"Links object files into a c static libraries"
 	pass # do not remove
 

+ 42 - 24
sdk/waf/waflib/Tools/c_aliases.py

@@ -1,15 +1,16 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2015 (ita)
 
 "base for all c/c++ programs and libraries"
 
-import os, sys, re
-from waflib import Utils, Build
+from waflib import Utils, Errors
 from waflib.Configure import conf
 
 def get_extensions(lst):
 	"""
+	Returns the file extensions for the list of files given as input
+
 	:param lst: files to process
 	:list lst: list of string or :py:class:`waflib.Node.Node`
 	:return: list of file extensions
@@ -17,17 +18,15 @@ def get_extensions(lst):
 	"""
 	ret = []
 	for x in Utils.to_list(lst):
-		try:
-			if not isinstance(x, str):
-				x = x.name
-			ret.append(x[x.rfind('.') + 1:])
-		except Exception:
-			pass
+		if not isinstance(x, str):
+			x = x.name
+		ret.append(x[x.rfind('.') + 1:])
 	return ret
 
 def sniff_features(**kw):
 	"""
-	Look at the source files and return the features for a task generator (mainly cc and cxx)::
+	Computes and returns the features required for a task generator by
+	looking at the file extensions. This aimed for C/C++ mainly::
 
 		snif_features(source=['foo.c', 'foo.cxx'], type='shlib')
 		# returns  ['cxx', 'c', 'cxxshlib', 'cshlib']
@@ -39,35 +38,54 @@ def sniff_features(**kw):
 	:return: the list of features for a task generator processing the source files
 	:rtype: list of string
 	"""
-	exts = get_extensions(kw['source'])
-	type = kw['_type']
+	exts = get_extensions(kw.get('source', []))
+	typ = kw['typ']
 	feats = []
 
 	# watch the order, cxx will have the precedence
-	if 'cxx' in exts or 'cpp' in exts or 'c++' in exts or 'cc' in exts or 'C' in exts:
-		feats.append('cxx')
-
-	if 'c' in exts or 'vala' in exts:
+	for x in 'cxx cpp c++ cc C'.split():
+		if x in exts:
+			feats.append('cxx')
+			break
+	if 'c' in exts or 'vala' in exts or 'gs' in exts:
 		feats.append('c')
 
+	if 's' in exts or 'S' in exts:
+		feats.append('asm')
+
+	for x in 'f f90 F F90 for FOR'.split():
+		if x in exts:
+			feats.append('fc')
+			break
+
 	if 'd' in exts:
 		feats.append('d')
 
 	if 'java' in exts:
 		feats.append('java')
-
-	if 'java' in exts:
 		return 'java'
 
-	if type in ['program', 'shlib', 'stlib']:
+	if typ in ('program', 'shlib', 'stlib'):
+		will_link = False
 		for x in feats:
-			if x in ['cxx', 'd', 'c']:
-				feats.append(x + type)
-
+			if x in ('cxx', 'd', 'fc', 'c', 'asm'):
+				feats.append(x + typ)
+				will_link = True
+		if not will_link and not kw.get('features', []):
+			raise Errors.WafError('Unable to determine how to link %r, try adding eg: features="c cshlib"?' % kw)
 	return feats
 
-def set_features(kw, _type):
-	kw['_type'] = _type
+def set_features(kw, typ):
+	"""
+	Inserts data in the input dict *kw* based on existing data and on the type of target
+	required (typ).
+
+	:param kw: task generator parameters
+	:type kw: dict
+	:param typ: type of target
+	:type typ: string
+	"""
+	kw['typ'] = typ
 	kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw))
 
 @conf

File diff suppressed because it is too large
+ 313 - 363
sdk/waf/waflib/Tools/c_config.py


+ 42 - 37
sdk/waf/waflib/Tools/c_osx.py

@@ -1,13 +1,13 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy 2008-2010
+# Thomas Nagy 2008-2018 (ita)
 
 """
 MacOSX related tools
 """
 
-import os, shutil, sys, platform
-from waflib import TaskGen, Task, Build, Options, Utils, Errors
+import os, shutil, platform
+from waflib import Task, Utils
 from waflib.TaskGen import taskgen_method, feature, after_method, before_method
 
 app_info = '''
@@ -24,7 +24,7 @@ app_info = '''
 	<key>NOTE</key>
 	<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
 	<key>CFBundleExecutable</key>
-	<string>%s</string>
+	<string>{app_name}</string>
 </dict>
 </plist>
 '''
@@ -37,8 +37,8 @@ def set_macosx_deployment_target(self):
 	"""
 	see WAF issue 285 and also and also http://trac.macports.org/ticket/17059
 	"""
-	if self.env['MACOSX_DEPLOYMENT_TARGET']:
-		os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
+	if self.env.MACOSX_DEPLOYMENT_TARGET:
+		os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET
 	elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
 		if Utils.unversioned_sys_platform() == 'darwin':
 			os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
@@ -46,9 +46,8 @@ def set_macosx_deployment_target(self):
 @taskgen_method
 def create_bundle_dirs(self, name, out):
 	"""
-	Create bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
+	Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
 	"""
-	bld = self.bld
 	dir = out.parent.find_or_declare(name)
 	dir.mkdir()
 	macos = dir.find_or_declare(['Contents', 'MacOS'])
@@ -71,7 +70,7 @@ def create_task_macapp(self):
 	To compile an executable into a Mac application (a .app), set its *mac_app* attribute::
 
 		def build(bld):
-			bld.shlib(source='a.c', target='foo', mac_app = True)
+			bld.shlib(source='a.c', target='foo', mac_app=True)
 
 	To force *all* executables to be transformed into Mac applications::
 
@@ -79,7 +78,7 @@ def create_task_macapp(self):
 			bld.env.MACAPP = True
 			bld.shlib(source='a.c', target='foo')
 	"""
-	if self.env['MACAPP'] or getattr(self, 'mac_app', False):
+	if self.env.MACAPP or getattr(self, 'mac_app', False):
 		out = self.link_task.outputs[0]
 
 		name = bundle_name_for_output(out)
@@ -89,37 +88,33 @@ def create_task_macapp(self):
 
 		self.apptask = self.create_task('macapp', self.link_task.outputs, n1)
 		inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name
-		self.bld.install_files(inst_to, n1, chmod=Utils.O755)
-
-		if getattr(self, 'mac_resources', None):
+		self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755)
+
+		if getattr(self, 'mac_files', None):
+			# this only accepts files; they will be installed as seen from mac_files_root
+			mac_files_root = getattr(self, 'mac_files_root', None)
+			if isinstance(mac_files_root, str):
+				mac_files_root = self.path.find_node(mac_files_root)
+				if not mac_files_root:
+					self.bld.fatal('Invalid mac_files_root %r' % self.mac_files_root)
 			res_dir = n1.parent.parent.make_node('Resources')
 			inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name
-			for x in self.to_list(self.mac_resources):
-				node = self.path.find_node(x)
-				if not node:
-					raise Errors.WafError('Missing mac_resource %r in %r' % (x, self))
-
-				parent = node.parent
-				if os.path.isdir(node.abspath()):
-					nodes = node.ant_glob('**')
-				else:
-					nodes = [node]
-				for node in nodes:
-					rel = node.path_from(parent)
-					tsk = self.create_task('macapp', node, res_dir.make_node(rel))
-					self.bld.install_as(inst_to + '/%s' % rel, node)
+			for node in self.to_nodes(self.mac_files):
+				relpath = node.path_from(mac_files_root or node.parent)
+				self.create_task('macapp', node, res_dir.make_node(relpath))
+				self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node)
 
 		if getattr(self.bld, 'is_install', None):
-			# disable the normal binary installation
+			# disable regular binary installation
 			self.install_task.hasrun = Task.SKIP_ME
 
 @feature('cprogram', 'cxxprogram')
 @after_method('apply_link')
 def create_task_macplist(self):
 	"""
-	Create a :py:class:`waflib.Tools.c_osx.macplist` instance.
+	Creates a :py:class:`waflib.Tools.c_osx.macplist` instance.
 	"""
-	if  self.env['MACAPP'] or getattr(self, 'mac_app', False):
+	if  self.env.MACAPP or getattr(self, 'mac_app', False):
 		out = self.link_task.outputs[0]
 
 		name = bundle_name_for_output(out)
@@ -127,6 +122,14 @@ def create_task_macplist(self):
 		dir = self.create_bundle_dirs(name, out)
 		n1 = dir.find_or_declare(['Contents', 'Info.plist'])
 		self.plisttask = plisttask = self.create_task('macplist', [], n1)
+		plisttask.context = {
+			'app_name': self.link_task.outputs[0].name,
+			'env': self.env
+		}
+
+		plist_ctx = getattr(self, 'plist_context', None)
+		if (plist_ctx):
+			plisttask.context.update(plist_ctx)
 
 		if getattr(self, 'mac_plist', False):
 			node = self.path.find_resource(self.mac_plist)
@@ -135,10 +138,10 @@ def create_task_macplist(self):
 			else:
 				plisttask.code = self.mac_plist
 		else:
-			plisttask.code = app_info % self.link_task.outputs[0].name
+			plisttask.code = app_info
 
 		inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name
-		self.bld.install_files(inst_to, n1)
+		self.add_install_files(install_to=inst_to, install_from=n1)
 
 @feature('cshlib', 'cxxshlib')
 @before_method('apply_link', 'propagate_uselib_vars')
@@ -155,9 +158,9 @@ def apply_bundle(self):
 			bld.env.MACBUNDLE = True
 			bld.shlib(source='a.c', target='foo')
 	"""
-	if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
-		self.env['LINKFLAGS_cshlib'] = self.env['LINKFLAGS_cxxshlib'] = [] # disable the '-dynamiclib' flag
-		self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['macbundle_PATTERN']
+	if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False):
+		self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag
+		self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN
 		use = self.use = self.to_list(getattr(self, 'use', []))
 		if not 'MACBUNDLE' in use:
 			use.append('MACBUNDLE')
@@ -166,7 +169,7 @@ app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
 
 class macapp(Task.Task):
 	"""
-	Create mac applications
+	Creates mac applications
 	"""
 	color = 'PINK'
 	def run(self):
@@ -175,7 +178,7 @@ class macapp(Task.Task):
 
 class macplist(Task.Task):
 	"""
-	Create plist files
+	Creates plist files
 	"""
 	color = 'PINK'
 	ext_in = ['.bin']
@@ -184,5 +187,7 @@ class macplist(Task.Task):
 			txt = self.code
 		else:
 			txt = self.inputs[0].read()
+		context = getattr(self, 'context', {})
+		txt = txt.format(**context)
 		self.outputs[0].write(txt)
 

+ 280 - 218
sdk/waf/waflib/Tools/c_preproc.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 
 """
 C/C++ preprocessor for finding dependencies
@@ -28,11 +28,13 @@ A dumb preprocessor is also available in the tool *c_dumbpreproc*
 
 import re, string, traceback
 from waflib import Logs, Utils, Errors
-from waflib.Logs import debug, error
 
 class PreprocError(Errors.WafError):
 	pass
 
+FILE_CACHE_SIZE = 100000
+LINE_CACHE_SIZE = 100000
+
 POPFILE = '-'
 "Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously"
 
@@ -42,43 +44,44 @@ recursion_limit = 150
 go_absolute = False
 "Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"
 
-standard_includes = ['/usr/include']
+standard_includes = ['/usr/local/include', '/usr/include']
 if Utils.is_win32:
 	standard_includes = []
 
 use_trigraphs = 0
 """Apply trigraph rules (False by default)"""
 
+# obsolete, do not use
 strict_quotes = 0
-"""Reserve the "#include <>" quotes for system includes (do not search for those includes). False by default."""
 
 g_optrans = {
 'not':'!',
+'not_eq':'!',
 'and':'&&',
-'bitand':'&',
 'and_eq':'&=',
 'or':'||',
-'bitor':'|',
 'or_eq':'|=',
 'xor':'^',
 'xor_eq':'^=',
+'bitand':'&',
+'bitor':'|',
 'compl':'~',
 }
 """Operators such as and/or/xor for c++. Set an empty dict to disable."""
 
 # ignore #warning and #error
 re_lines = re.compile(
-	'^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
+	'^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
 	re.IGNORECASE | re.MULTILINE)
 """Match #include lines"""
 
-re_mac = re.compile("^[a-zA-Z_]\w*")
+re_mac = re.compile(r"^[a-zA-Z_]\w*")
 """Match macro definitions"""
 
 re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
 """Match macro functions"""
 
-re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
+re_pragma_once = re.compile(r'^\s*once\s*', re.IGNORECASE)
 """Match #pragma once statements"""
 
 re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
@@ -136,54 +139,22 @@ skipped   = 's'
 
 def repl(m):
 	"""Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`"""
-	s = m.group(0)
-	if s.startswith('/'):
+	s = m.group()
+	if s[0] == '/':
 		return ' '
 	return s
 
-def filter_comments(filename):
-	"""
-	Filter the comments from a c/h file, and return the preprocessor lines.
-	The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.
-
-	:return: the preprocessor directives as a list of (keyword, line)
-	:rtype: a list of string pairs
-	"""
-	# return a list of tuples : keyword, line
-	code = Utils.readf(filename)
-	if use_trigraphs:
-		for (a, b) in trig_def: code = code.split(a).join(b)
-	code = re_nl.sub('', code)
-	code = re_cpp.sub(repl, code)
-	return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
-
 prec = {}
 """
-Operator precendence rules required for parsing expressions of the form::
+Operator precedence rules required for parsing expressions of the form::
 
 	#if 1 && 2 != 0
 """
 ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
-for x in range(len(ops)):
-	syms = ops[x]
+for x, syms in enumerate(ops):
 	for u in syms.split():
 		prec[u] = x
 
-def trimquotes(s):
-	"""
-	Remove the single quotes around an expression::
-
-		trimquotes("'test'") == "test"
-
-	:param s: expression to transform
-	:type s: string
-	:rtype: string
-	"""
-	if not s: return ''
-	s = s.rstrip()
-	if s[0] == "'" and s[-1] == "'": return s[1:-1]
-	return s
-
 def reduce_nums(val_1, val_2, val_op):
 	"""
 	Apply arithmetic rules to compute a result
@@ -199,32 +170,56 @@ def reduce_nums(val_1, val_2, val_op):
 	#print val_1, val_2, val_op
 
 	# now perform the operation, make certain a and b are numeric
-	try:    a = 0 + val_1
-	except TypeError: a = int(val_1)
-	try:    b = 0 + val_2
-	except TypeError: b = int(val_2)
+	try:
+		a = 0 + val_1
+	except TypeError:
+		a = int(val_1)
+	try:
+		b = 0 + val_2
+	except TypeError:
+		b = int(val_2)
 
 	d = val_op
-	if d == '%':  c = a%b
-	elif d=='+':  c = a+b
-	elif d=='-':  c = a-b
-	elif d=='*':  c = a*b
-	elif d=='/':  c = a/b
-	elif d=='^':  c = a^b
-	elif d=='|':  c = a|b
-	elif d=='||': c = int(a or b)
-	elif d=='&':  c = a&b
-	elif d=='&&': c = int(a and b)
-	elif d=='==': c = int(a == b)
-	elif d=='!=': c = int(a != b)
-	elif d=='<=': c = int(a <= b)
-	elif d=='<':  c = int(a < b)
-	elif d=='>':  c = int(a > b)
-	elif d=='>=': c = int(a >= b)
-	elif d=='^':  c = int(a^b)
-	elif d=='<<': c = a<<b
-	elif d=='>>': c = a>>b
-	else: c = 0
+	if d == '%':
+		c = a % b
+	elif d=='+':
+		c = a + b
+	elif d=='-':
+		c = a - b
+	elif d=='*':
+		c = a * b
+	elif d=='/':
+		c = a / b
+	elif d=='^':
+		c = a ^ b
+	elif d=='==':
+		c = int(a == b)
+	elif d=='|'  or d == 'bitor':
+		c = a | b
+	elif d=='||' or d == 'or' :
+		c = int(a or b)
+	elif d=='&'  or d == 'bitand':
+		c = a & b
+	elif d=='&&' or d == 'and':
+		c = int(a and b)
+	elif d=='!=' or d == 'not_eq':
+		c = int(a != b)
+	elif d=='^'  or d == 'xor':
+		c = int(a^b)
+	elif d=='<=':
+		c = int(a <= b)
+	elif d=='<':
+		c = int(a < b)
+	elif d=='>':
+		c = int(a > b)
+	elif d=='>=':
+		c = int(a >= b)
+	elif d=='<<':
+		c = a << b
+	elif d=='>>':
+		c = a >> b
+	else:
+		c = 0
 	return c
 
 def get_num(lst):
@@ -236,7 +231,8 @@ def get_num(lst):
 	:return: a pair containing the number and the rest of the list
 	:rtype: tuple(value, list)
 	"""
-	if not lst: raise PreprocError("empty list for get_num")
+	if not lst:
+		raise PreprocError('empty list for get_num')
 	(p, v) = lst[0]
 	if p == OP:
 		if v == '(':
@@ -254,7 +250,7 @@ def get_num(lst):
 						count_par += 1
 				i += 1
 			else:
-				raise PreprocError("rparen expected %r" % lst)
+				raise PreprocError('rparen expected %r' % lst)
 
 			(num, _) = get_term(lst[1:i])
 			return (num, lst[i+1:])
@@ -271,14 +267,14 @@ def get_num(lst):
 			num, lst = get_num(lst[1:])
 			return (~ int(num), lst)
 		else:
-			raise PreprocError("Invalid op token %r for get_num" % lst)
+			raise PreprocError('Invalid op token %r for get_num' % lst)
 	elif p == NUM:
 		return v, lst[1:]
 	elif p == IDENT:
 		# all macros should have been replaced, remaining identifiers eval to 0
 		return 0, lst[1:]
 	else:
-		raise PreprocError("Invalid token %r for get_num" % lst)
+		raise PreprocError('Invalid token %r for get_num' % lst)
 
 def get_term(lst):
 	"""
@@ -292,7 +288,8 @@ def get_term(lst):
 	:rtype: value, list
 	"""
 
-	if not lst: raise PreprocError("empty list for get_term")
+	if not lst:
+		raise PreprocError('empty list for get_term')
 	num, lst = get_num(lst)
 	if not lst:
 		return (num, [])
@@ -317,7 +314,7 @@ def get_term(lst):
 							break
 				i += 1
 			else:
-				raise PreprocError("rparen expected %r" % lst)
+				raise PreprocError('rparen expected %r' % lst)
 
 			if int(num):
 				return get_term(lst[1:i])
@@ -335,7 +332,7 @@ def get_term(lst):
 			# operator precedence
 			p2, v2 = lst[0]
 			if p2 != OP:
-				raise PreprocError("op expected %r" % lst)
+				raise PreprocError('op expected %r' % lst)
 
 			if prec[v2] >= prec[v]:
 				num2 = reduce_nums(num, num2, v)
@@ -346,7 +343,7 @@ def get_term(lst):
 				return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
 
 
-	raise PreprocError("cannot reduce %r" % lst)
+	raise PreprocError('cannot reduce %r' % lst)
 
 def reduce_eval(lst):
 	"""
@@ -408,8 +405,8 @@ def reduce_tokens(lst, defs, ban=[]):
 	:return: the new list of tokens
 	:rtype: value, list
 	"""
-	i = 0
 
+	i = 0
 	while i < len(lst):
 		(p, v) = lst[i]
 
@@ -431,7 +428,7 @@ def reduce_tokens(lst, defs, ban=[]):
 					else:
 						lst[i] = (NUM, 0)
 				else:
-					raise PreprocError("Invalid define expression %r" % lst)
+					raise PreprocError('Invalid define expression %r' % lst)
 
 		elif p == IDENT and v in defs:
 
@@ -446,8 +443,8 @@ def reduce_tokens(lst, defs, ban=[]):
 				del lst[i]
 				accu = to_add[:]
 				reduce_tokens(accu, defs, ban+[v])
-				for x in range(len(accu)):
-					lst.insert(i, accu[x])
+				for tmp in accu:
+					lst.insert(i, tmp)
 					i += 1
 			else:
 				# collect the arguments for the funcall
@@ -456,11 +453,11 @@ def reduce_tokens(lst, defs, ban=[]):
 				del lst[i]
 
 				if i >= len(lst):
-					raise PreprocError("expected '(' after %r (got nothing)" % v)
+					raise PreprocError('expected ( after %r (got nothing)' % v)
 
 				(p2, v2) = lst[i]
 				if p2 != OP or v2 != '(':
-					raise PreprocError("expected '(' after %r" % v)
+					raise PreprocError('expected ( after %r' % v)
 
 				del lst[i]
 
@@ -475,18 +472,22 @@ def reduce_tokens(lst, defs, ban=[]):
 							one_param.append((p2, v2))
 							count_paren += 1
 						elif v2 == ')':
-							if one_param: args.append(one_param)
+							if one_param:
+								args.append(one_param)
 							break
 						elif v2 == ',':
-							if not one_param: raise PreprocError("empty param in funcall %s" % p)
+							if not one_param:
+								raise PreprocError('empty param in funcall %r' % v)
 							args.append(one_param)
 							one_param = []
 						else:
 							one_param.append((p2, v2))
 					else:
 						one_param.append((p2, v2))
-						if   v2 == '(': count_paren += 1
-						elif v2 == ')': count_paren -= 1
+						if   v2 == '(':
+							count_paren += 1
+						elif v2 == ')':
+							count_paren -= 1
 				else:
 					raise PreprocError('malformed macro')
 
@@ -523,7 +524,6 @@ def reduce_tokens(lst, defs, ban=[]):
 									accu.append((p2, v2))
 									accu.extend(toks)
 							elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
-								# TODO not sure
 								# first collect the tokens
 								va_toks = []
 								st = len(macro_def[0])
@@ -531,7 +531,8 @@ def reduce_tokens(lst, defs, ban=[]):
 								for x in args[pt-st+1:]:
 									va_toks.extend(x)
 									va_toks.append((OP, ','))
-								if va_toks: va_toks.pop() # extra comma
+								if va_toks:
+									va_toks.pop() # extra comma
 								if len(accu)>1:
 									(p3, v3) = accu[-1]
 									(p4, v4) = accu[-2]
@@ -579,8 +580,15 @@ def eval_macro(lst, defs):
 	:rtype: int
 	"""
 	reduce_tokens(lst, defs, [])
-	if not lst: raise PreprocError("missing tokens to evaluate")
-	(p, v) = reduce_eval(lst)
+	if not lst:
+		raise PreprocError('missing tokens to evaluate')
+
+	if lst:
+		p, v = lst[0]
+		if p == IDENT and v not in defs:
+			raise PreprocError('missing macro %r' % lst)
+
+	p, v = reduce_eval(lst)
 	return int(v) != 0
 
 def extract_macro(txt):
@@ -600,7 +608,8 @@ def extract_macro(txt):
 		p, name = t[0]
 
 		p, v = t[1]
-		if p != OP: raise PreprocError("expected open parenthesis")
+		if p != OP:
+			raise PreprocError('expected (')
 
 		i = 1
 		pindex = 0
@@ -619,35 +628,39 @@ def extract_macro(txt):
 				elif p == OP and v == ')':
 					break
 				else:
-					raise PreprocError("unexpected token (3)")
+					raise PreprocError('unexpected token (3)')
 			elif prev == IDENT:
 				if p == OP and v == ',':
 					prev = v
 				elif p == OP and v == ')':
 					break
 				else:
-					raise PreprocError("comma or ... expected")
+					raise PreprocError('comma or ... expected')
 			elif prev == ',':
 				if p == IDENT:
 					params[v] = pindex
 					pindex += 1
 					prev = p
 				elif p == OP and v == '...':
-					raise PreprocError("not implemented (1)")
+					raise PreprocError('not implemented (1)')
 				else:
-					raise PreprocError("comma or ... expected (2)")
+					raise PreprocError('comma or ... expected (2)')
 			elif prev == '...':
-				raise PreprocError("not implemented (2)")
+				raise PreprocError('not implemented (2)')
 			else:
-				raise PreprocError("unexpected else")
+				raise PreprocError('unexpected else')
 
 		#~ print (name, [params, t[i+1:]])
 		return (name, [params, t[i+1:]])
 	else:
 		(p, v) = t[0]
-		return (v, [[], t[1:]])
+		if len(t) > 1:
+			return (v, [[], t[1:]])
+		else:
+			# empty define, assign an empty token
+			return (v, [[], [('T','')]])
 
-re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
+re_include = re.compile(r'^\s*(<(?:.*)>|"(?:.*)")')
 def extract_include(txt, defs):
 	"""
 	Process a line in the form::
@@ -663,24 +676,25 @@ def extract_include(txt, defs):
 	"""
 	m = re_include.search(txt)
 	if m:
-		if m.group('a'): return '<', m.group('a')
-		if m.group('b'): return '"', m.group('b')
+		txt = m.group(1)
+		return txt[0], txt[1:-1]
 
 	# perform preprocessing and look at the result, it must match an include
 	toks = tokenize(txt)
 	reduce_tokens(toks, defs, ['waf_include'])
 
 	if not toks:
-		raise PreprocError("could not parse include %s" % txt)
+		raise PreprocError('could not parse include %r' % txt)
 
 	if len(toks) == 1:
 		if toks[0][0] == STR:
 			return '"', toks[0][1]
 	else:
 		if toks[0][1] == '<' and toks[-1][1] == '>':
-			return stringize(toks).lstrip('<').rstrip('>')
+			ret = '<', stringize(toks).lstrip('<').rstrip('>')
+			return ret
 
-	raise PreprocError("could not parse include %s." % txt)
+	raise PreprocError('could not parse include %r' % txt)
 
 def parse_char(txt):
 	"""
@@ -692,21 +706,26 @@ def parse_char(txt):
 	:rtype: string
 	"""
 
-	if not txt: raise PreprocError("attempted to parse a null char")
+	if not txt:
+		raise PreprocError('attempted to parse a null char')
 	if txt[0] != '\\':
 		return ord(txt)
 	c = txt[1]
 	if c == 'x':
-		if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
+		if len(txt) == 4 and txt[3] in string.hexdigits:
+			return int(txt[2:], 16)
 		return int(txt[2:], 16)
 	elif c.isdigit():
-		if c == '0' and len(txt)==2: return 0
+		if c == '0' and len(txt)==2:
+			return 0
 		for i in 3, 2, 1:
 			if len(txt) > i and txt[1:1+i].isdigit():
 				return (1+i, int(txt[1:1+i], 8))
 	else:
-		try: return chr_esc[c]
-		except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
+		try:
+			return chr_esc[c]
+		except KeyError:
+			raise PreprocError('could not parse char literal %r' % txt)
 
 def tokenize(s):
 	"""
@@ -719,7 +738,6 @@ def tokenize(s):
 	"""
 	return tokenize_private(s)[:] # force a copy of the results
 
-@Utils.run_once
 def tokenize_private(s):
 	ret = []
 	for match in re_clexer.finditer(s):
@@ -728,26 +746,32 @@ def tokenize_private(s):
 			v = m(name)
 			if v:
 				if name == IDENT:
-					try: v = g_optrans[v]; name = OP
-					except KeyError:
-						# c++ specific
-						if v.lower() == "true":
-							v = 1
-							name = NUM
-						elif v.lower() == "false":
-							v = 0
-							name = NUM
+					if v in g_optrans:
+						name = OP
+					elif v.lower() == "true":
+						v = 1
+						name = NUM
+					elif v.lower() == "false":
+						v = 0
+						name = NUM
 				elif name == NUM:
-					if m('oct'): v = int(v, 8)
-					elif m('hex'): v = int(m('hex'), 16)
-					elif m('n0'): v = m('n0')
+					if m('oct'):
+						v = int(v, 8)
+					elif m('hex'):
+						v = int(m('hex'), 16)
+					elif m('n0'):
+						v = m('n0')
 					else:
 						v = m('char')
-						if v: v = parse_char(v)
-						else: v = m('n2') or m('n4')
+						if v:
+							v = parse_char(v)
+						else:
+							v = m('n2') or m('n4')
 				elif name == OP:
-					if v == '%:': v = '#'
-					elif v == '%:%:': v = '##'
+					if v == '%:':
+						v = '#'
+					elif v == '%:%:':
+						v = '##'
 				elif name == STR:
 					# remove the quotes around the string
 					v = v[1:-1]
@@ -755,15 +779,20 @@ def tokenize_private(s):
 				break
 	return ret
 
-@Utils.run_once
-def define_name(line):
-	"""
-	:param line: define line
-	:type line: string
-	:rtype: string
-	:return: the define name
-	"""
-	return re_mac.match(line).group(0)
+def format_defines(lst):
+	ret = []
+	for y in lst:
+		if y:
+			pos = y.find('=')
+			if pos == -1:
+				# "-DFOO" should give "#define FOO 1"
+				ret.append(y)
+			elif pos > 0:
+				# all others are assumed to be -DX=Y
+				ret.append('%s %s' % (y[:pos], y[pos+1:]))
+			else:
+				raise ValueError('Invalid define expression %r' % y)
+	return ret
 
 class c_parser(object):
 	"""
@@ -795,9 +824,12 @@ class c_parser(object):
 		self.curfile = ''
 		"""Current file"""
 
-		self.ban_includes = set([])
+		self.ban_includes = set()
 		"""Includes that must not be read (#pragma once)"""
 
+		self.listed = set()
+		"""Include nodes/names already listed to avoid duplicates in self.nodes/self.names"""
+
 	def cached_find_resource(self, node, filename):
 		"""
 		Find a file from the input directory
@@ -810,13 +842,13 @@ class c_parser(object):
 		:rtype: :py:class:`waflib.Node.Node`
 		"""
 		try:
-			nd = node.ctx.cache_nd
+			cache = node.ctx.preproc_cache_node
 		except AttributeError:
-			nd = node.ctx.cache_nd = {}
+			cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE)
 
-		tup = (node, filename)
+		key = (node, filename)
 		try:
-			return nd[tup]
+			return cache[key]
 		except KeyError:
 			ret = node.find_resource(filename)
 			if ret:
@@ -826,10 +858,10 @@ class c_parser(object):
 					tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
 					if tmp and getattr(tmp, 'children', None):
 						ret = None
-			nd[tup] = ret
+			cache[key] = ret
 			return ret
 
-	def tryfind(self, filename):
+	def tryfind(self, filename, kind='"', env=None):
 		"""
 		Try to obtain a node from the filename based from the include paths. Will add
 		the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
@@ -841,26 +873,72 @@ class c_parser(object):
 		:return: the node if found
 		:rtype: :py:class:`waflib.Node.Node`
 		"""
+		if filename.endswith('.moc'):
+			# we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated
+			# in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient.
+			self.names.append(filename)
+			return None
+
 		self.curfile = filename
 
-		# for msvc it should be a for loop on the whole stack
-		found = self.cached_find_resource(self.currentnode_stack[-1], filename)
+		found = None
+		if kind == '"':
+			if env.MSVC_VERSION:
+				for n in reversed(self.currentnode_stack):
+					found = self.cached_find_resource(n, filename)
+					if found:
+						break
+			else:
+				found = self.cached_find_resource(self.currentnode_stack[-1], filename)
 
-		for n in self.nodepaths:
-			if found:
-				break
-			found = self.cached_find_resource(n, filename)
+		if not found:
+			for n in self.nodepaths:
+				found = self.cached_find_resource(n, filename)
+				if found:
+					break
 
+		listed = self.listed
 		if found and not found in self.ban_includes:
-			# TODO the duplicates do not increase the no-op build times too much, but they may be worth removing
-			self.nodes.append(found)
-			if filename[-4:] != '.moc':
-				self.addlines(found)
+			if found not in listed:
+				listed.add(found)
+				self.nodes.append(found)
+			self.addlines(found)
 		else:
-			if not filename in self.names:
+			if filename not in listed:
+				listed.add(filename)
 				self.names.append(filename)
 		return found
 
+	def filter_comments(self, node):
+		"""
+		Filter the comments from a c/h file, and return the preprocessor lines.
+		The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.
+
+		:return: the preprocessor directives as a list of (keyword, line)
+		:rtype: a list of string pairs
+		"""
+		# return a list of tuples : keyword, line
+		code = node.read()
+		if use_trigraphs:
+			for (a, b) in trig_def:
+				code = code.split(a).join(b)
+		code = re_nl.sub('', code)
+		code = re_cpp.sub(repl, code)
+		return re_lines.findall(code)
+
+	def parse_lines(self, node):
+		try:
+			cache = node.ctx.preproc_cache_lines
+		except AttributeError:
+			cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE)
+		try:
+			return cache[node]
+		except KeyError:
+			cache[node] = lines = self.filter_comments(node)
+			lines.append((POPFILE, ''))
+			lines.reverse()
+			return lines
+
 	def addlines(self, node):
 		"""
 		Add the lines from a header in the list of preprocessor lines to parse
@@ -870,34 +948,23 @@ class c_parser(object):
 		"""
 
 		self.currentnode_stack.append(node.parent)
-		filepath = node.abspath()
 
 		self.count_files += 1
 		if self.count_files > recursion_limit:
 			# issue #812
-			raise PreprocError("recursion limit exceeded")
-		pc = self.parse_cache
-		debug('preproc: reading file %r', filepath)
-		try:
-			lns = pc[filepath]
-		except KeyError:
-			pass
-		else:
-			self.lines.extend(lns)
-			return
+			raise PreprocError('recursion limit exceeded')
 
+		if Logs.verbose:
+			Logs.debug('preproc: reading file %r', node)
 		try:
-			lines = filter_comments(filepath)
-			lines.append((POPFILE, ''))
-			lines.reverse()
-			pc[filepath] = lines # cache the lines filtered
-			self.lines.extend(lines)
-		except IOError:
-			raise PreprocError("could not read the file %s" % filepath)
+			lines = self.parse_lines(node)
+		except EnvironmentError:
+			raise PreprocError('could not read the file %r' % node)
 		except Exception:
 			if Logs.verbose > 0:
-				error("parsing %s failed" % filepath)
-				traceback.print_exc()
+				Logs.error('parsing %r failed %s', node, traceback.format_exc())
+		else:
+			self.lines.extend(lines)
 
 	def start(self, node, env):
 		"""
@@ -909,28 +976,16 @@ class c_parser(object):
 		:param env: config set containing additional defines to take into account
 		:type env: :py:class:`waflib.ConfigSet.ConfigSet`
 		"""
-
-		debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
-
-		bld = node.ctx
-		try:
-			self.parse_cache = bld.parse_cache
-		except AttributeError:
-			bld.parse_cache = {}
-			self.parse_cache = bld.parse_cache
+		Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
 
 		self.current_file = node
 		self.addlines(node)
 
 		# macros may be defined on the command-line, so they must be parsed as if they were part of the file
-		if env['DEFINES']:
-			try:
-				lst = ['%s %s' % (x[0], trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in env['DEFINES']]]
-				lst.reverse()
-				self.lines.extend([('define', x) for x in lst])
-			except AttributeError:
-				# if the defines are invalid the compiler will tell the user
-				pass
+		if env.DEFINES:
+			lst = format_defines(env.DEFINES)
+			lst.reverse()
+			self.lines.extend([('define', x) for x in lst])
 
 		while self.lines:
 			(token, line) = self.lines.pop()
@@ -940,8 +995,6 @@ class c_parser(object):
 				continue
 
 			try:
-				ve = Logs.verbose
-				if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
 				state = self.state
 
 				# make certain we define the state if we are about to enter in an if block
@@ -957,23 +1010,27 @@ class c_parser(object):
 
 				if token == 'if':
 					ret = eval_macro(tokenize(line), self.defs)
-					if ret: state[-1] = accepted
-					else: state[-1] = ignored
+					if ret:
+						state[-1] = accepted
+					else:
+						state[-1] = ignored
 				elif token == 'ifdef':
 					m = re_mac.match(line)
-					if m and m.group(0) in self.defs: state[-1] = accepted
-					else: state[-1] = ignored
+					if m and m.group() in self.defs:
+						state[-1] = accepted
+					else:
+						state[-1] = ignored
 				elif token == 'ifndef':
 					m = re_mac.match(line)
-					if m and m.group(0) in self.defs: state[-1] = ignored
-					else: state[-1] = accepted
+					if m and m.group() in self.defs:
+						state[-1] = ignored
+					else:
+						state[-1] = accepted
 				elif token == 'include' or token == 'import':
 					(kind, inc) = extract_include(line, self.defs)
-					if ve: debug('preproc: include found %s    (%s) ', inc, kind)
-					if kind == '"' or not strict_quotes:
-						self.current_file = self.tryfind(inc)
-						if token == 'import':
-							self.ban_includes.add(self.current_file)
+					self.current_file = self.tryfind(inc, kind, env)
+					if token == 'import':
+						self.ban_includes.add(self.current_file)
 				elif token == 'elif':
 					if state[-1] == accepted:
 						state[-1] = skipped
@@ -981,24 +1038,35 @@ class c_parser(object):
 						if eval_macro(tokenize(line), self.defs):
 							state[-1] = accepted
 				elif token == 'else':
-					if state[-1] == accepted: state[-1] = skipped
-					elif state[-1] == ignored: state[-1] = accepted
+					if state[-1] == accepted:
+						state[-1] = skipped
+					elif state[-1] == ignored:
+						state[-1] = accepted
 				elif token == 'define':
 					try:
-						self.defs[define_name(line)] = line
-					except Exception:
-						raise PreprocError("Invalid define line %s" % line)
+						self.defs[self.define_name(line)] = line
+					except AttributeError:
+						raise PreprocError('Invalid define line %r' % line)
 				elif token == 'undef':
 					m = re_mac.match(line)
-					if m and m.group(0) in self.defs:
-						self.defs.__delitem__(m.group(0))
+					if m and m.group() in self.defs:
+						self.defs.__delitem__(m.group())
 						#print "undef %s" % name
 				elif token == 'pragma':
 					if re_pragma_once.match(line.lower()):
 						self.ban_includes.add(self.current_file)
 			except Exception as e:
 				if Logs.verbose:
-					debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
+					Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc())
+
+	def define_name(self, line):
+		"""
+		:param line: define line
+		:type line: string
+		:rtype: string
+		:return: the define name
+		"""
+		return re_mac.match(line).group()
 
 def scan(task):
 	"""
@@ -1008,9 +1076,6 @@ def scan(task):
 
 	This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
 	"""
-
-	global go_absolute
-
 	try:
 		incn = task.generator.includes_nodes
 	except AttributeError:
@@ -1023,7 +1088,4 @@ def scan(task):
 
 	tmp = c_parser(nodepaths)
 	tmp.start(task.inputs[0], task.env)
-	if Logs.verbose:
-		debug('deps: deps for %r: %r; unresolved %r' % (task.inputs, tmp.nodes, tmp.names))
 	return (tmp.nodes, tmp.names)
-

+ 27 - 16
sdk/waf/waflib/Tools/c_tests.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2010 (ita)
+# Thomas Nagy, 2016-2018 (ita)
 
 """
 Various configuration tests.
@@ -9,7 +9,6 @@ Various configuration tests.
 from waflib import Task
 from waflib.Configure import conf
 from waflib.TaskGen import feature, before_method, after_method
-import sys
 
 LIB_CODE = '''
 #ifdef _MSC_VER
@@ -37,7 +36,7 @@ int main(int argc, char **argv) {
 @before_method('process_source')
 def link_lib_test_fun(self):
 	"""
-	The configuration test :py:func:`waflib.Tools.ccroot.run_c_code` declares a unique task generator,
+	The configuration test :py:func:`waflib.Configure.run_build` declares a unique task generator,
 	so we need to create other task generators from here to check if the linker is able to link libraries.
 	"""
 	def write_test_file(task):
@@ -59,7 +58,7 @@ def link_lib_test_fun(self):
 @conf
 def check_library(self, mode=None, test_exec=True):
 	"""
-	Check if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
+	Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
 
 	:param mode: c or cxx or d
 	:type mode: string
@@ -73,8 +72,7 @@ def check_library(self, mode=None, test_exec=True):
 		features = 'link_lib_test',
 		msg = 'Checking for libraries',
 		mode = mode,
-		test_exec = test_exec,
-		)
+		test_exec = test_exec)
 
 ########################################################################################
 
@@ -90,7 +88,7 @@ INLINE_VALUES = ['inline', '__inline__', '__inline']
 @conf
 def check_inline(self, **kw):
 	"""
-	Check for the right value for inline macro.
+	Checks for the right value for inline macro.
 	Define INLINE_MACRO to 1 if the define is found.
 	If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)
 
@@ -99,7 +97,6 @@ def check_inline(self, **kw):
 	:param features: by default *c* or *cxx* depending on the compiler present
 	:type features: list of string
 	"""
-
 	self.start_msg('Checking for inline')
 
 	if not 'define_name' in kw:
@@ -136,7 +133,7 @@ int main(int argc, char **argv) {
 @conf
 def check_large_file(self, **kw):
 	"""
-	Check for large file support and define the macro HAVE_LARGEFILE
+	Checks for large file support and define the macro HAVE_LARGEFILE
 	The test is skipped on win32 systems (DEST_BINFMT == pe).
 
 	:param define_name: define to set, by default *HAVE_LARGEFILE*
@@ -144,7 +141,6 @@ def check_large_file(self, **kw):
 	:param execute: execute the test (yes by default)
 	:type execute: bool
 	"""
-
 	if not 'define_name' in kw:
 		kw['define_name'] = 'HAVE_LARGEFILE'
 	if not 'execute' in kw:
@@ -184,9 +180,15 @@ def check_large_file(self, **kw):
 ########################################################################################
 
 ENDIAN_FRAGMENT = '''
+#ifdef _MSC_VER
+#define testshlib_EXPORT __declspec(dllexport)
+#else
+#define testshlib_EXPORT
+#endif
+
 short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
 short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
-int use_ascii (int i) {
+int testshlib_EXPORT use_ascii (int i) {
 	return ascii_mm[i] + ascii_ii[i];
 }
 short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
@@ -198,9 +200,12 @@ extern int foo;
 '''
 
 class grep_for_endianness(Task.Task):
+	"""
+	Task that reads a binary and tries to determine the endianness
+	"""
 	color = 'PINK'
 	def run(self):
-		txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
+		txt = self.inputs[0].read(flags='rb').decode('latin-1')
 		if txt.find('LiTTleEnDian') > -1:
 			self.generator.tmp.append('little')
 		elif txt.find('BIGenDianSyS') > -1:
@@ -209,18 +214,24 @@ class grep_for_endianness(Task.Task):
 			return -1
 
 @feature('grep_for_endianness')
-@after_method('process_source')
+@after_method('apply_link')
 def grep_for_endianness_fun(self):
-	self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])
+	"""
+	Used by the endianness configuration test
+	"""
+	self.create_task('grep_for_endianness', self.link_task.outputs[0])
 
 @conf
 def check_endianness(self):
 	"""
-	Execute a configuration test to determine the endianness
+	Executes a configuration test to determine the endianness
 	"""
 	tmp = []
 	def check_msg(self):
 		return tmp[0]
-	self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', msg="Checking for endianness", define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
+
+	self.check(fragment=ENDIAN_FRAGMENT, features='c cstlib grep_for_endianness',
+		msg='Checking for endianness', define='ENDIANNESS', tmp=tmp,
+		okmsg=check_msg, confcache=None)
 	return tmp[0]
 

+ 239 - 87
sdk/waf/waflib/Tools/ccroot.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2018 (ita)
 
 """
 Classes and methods shared by tools providing support for C-like language such
@@ -8,7 +8,7 @@ as C/C++/D/Assembly/Go (this support module is almost never used alone).
 """
 
 import os, re
-from waflib import Task, Utils, Node, Errors
+from waflib import Task, Utils, Node, Errors, Logs
 from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
 from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
 from waflib.Configure import conf
@@ -25,8 +25,8 @@ USELIB_VARS['cxx']      = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS
 USELIB_VARS['d']        = set(['INCLUDES', 'DFLAGS'])
 USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH'])
 
-USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH'])
-USELIB_VARS['cshlib']   = USELIB_VARS['cxxshlib']   = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH'])
+USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS'])
+USELIB_VARS['cshlib']   = USELIB_VARS['cxxshlib']   = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS'])
 USELIB_VARS['cstlib']   = USELIB_VARS['cxxstlib']   = set(['ARFLAGS', 'LINKDEPS'])
 
 USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
@@ -77,12 +77,13 @@ def to_incnodes(self, inlst):
 	:return: list of include folders as nodes
 	"""
 	lst = []
-	seen = set([])
+	seen = set()
 	for x in self.to_list(inlst):
 		if x in seen or not x:
 			continue
 		seen.add(x)
 
+		# with a real lot of targets, it is sometimes interesting to cache the results below
 		if isinstance(x, Node.Node):
 			lst.append(x)
 		else:
@@ -110,25 +111,34 @@ def apply_incpaths(self):
 		tg = bld(features='includes', includes='.')
 
 	The folders only need to be relative to the current directory, the equivalent build directory is
-	added automatically (for headers created in the build directory). This enable using a build directory
+	added automatically (for headers created in the build directory). This enables using a build directory
 	or not (``top == out``).
 
 	This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``,
 	and the list of include paths in ``tg.env.INCLUDES``.
 	"""
 
-	lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
+	lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES)
 	self.includes_nodes = lst
-	self.env['INCPATHS'] = [x.abspath() for x in lst]
+	cwd = self.get_cwd()
+	if Utils.is_win32:
+		# Visual Studio limitations
+		self.env.INCPATHS = [x.path_from(cwd) if x.is_child_of(self.bld.srcnode) else x.abspath() for x in lst]
+	else:
+		self.env.INCPATHS = [x.path_from(cwd) for x in lst]
 
 class link_task(Task.Task):
 	"""
 	Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`.
 
 	.. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib  waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib
+	  :top-classes: waflib.Tools.ccroot.link_task
 	"""
 	color   = 'YELLOW'
 
+	weight  = 3
+	"""Try to process link tasks as early as possible"""
+
 	inst_to = None
 	"""Default installation path for the link task outputs, or None to disable"""
 
@@ -141,36 +151,117 @@ class link_task(Task.Task):
 		The settings are retrieved from ``env.clsname_PATTERN``
 		"""
 		if isinstance(target, str):
+			base = self.generator.path
+			if target.startswith('#'):
+				# for those who like flat structures
+				target = target[1:]
+				base = self.generator.bld.bldnode
+
 			pattern = self.env[self.__class__.__name__ + '_PATTERN']
 			if not pattern:
 				pattern = '%s'
 			folder, name = os.path.split(target)
 
-			if self.__class__.__name__.find('shlib') > 0:
-				if self.env.DEST_BINFMT == 'pe' and getattr(self.generator, 'vnum', None):
+			if self.__class__.__name__.find('shlib') > 0 and getattr(self.generator, 'vnum', None):
+				nums = self.generator.vnum.split('.')
+				if self.env.DEST_BINFMT == 'pe':
 					# include the version in the dll file name,
-					# the import lib file name stays unversionned.
-					name = name + '-' + self.generator.vnum.split('.')[0]
-
-			tmp = folder + os.sep + pattern % name
-			target = self.generator.path.find_or_declare(tmp)
+					# the import lib file name stays unversioned.
+					name = name + '-' + nums[0]
+				elif self.env.DEST_OS == 'openbsd':
+					pattern = '%s.%s' % (pattern, nums[0])
+					if len(nums) >= 2:
+						pattern += '.%s' % nums[1]
+
+			if folder:
+				tmp = folder + os.sep + pattern % name
+			else:
+				tmp = pattern % name
+			target = base.find_or_declare(tmp)
 		self.set_outputs(target)
 
+	def exec_command(self, *k, **kw):
+		ret = super(link_task, self).exec_command(*k, **kw)
+		if not ret and self.env.DO_MANIFEST:
+			ret = self.exec_mf()
+		return ret
+
+	def exec_mf(self):
+		"""
+		Create manifest files for VS-like compilers (msvc, ifort, ...)
+		"""
+		if not self.env.MT:
+			return 0
+
+		manifest = None
+		for out_node in self.outputs:
+			if out_node.name.endswith('.manifest'):
+				manifest = out_node.abspath()
+				break
+		else:
+			# Should never get here.  If we do, it means the manifest file was
+			# never added to the outputs list, thus we don't have a manifest file
+			# to embed, so we just return.
+			return 0
+
+		# embedding mode. Different for EXE's and DLL's.
+		# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
+		mode = ''
+		for x in Utils.to_list(self.generator.features):
+			if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'):
+				mode = 1
+			elif x in ('cshlib', 'cxxshlib', 'fcshlib'):
+				mode = 2
+
+		Logs.debug('msvc: embedding manifest in mode %r', mode)
+
+		lst = [] + self.env.MT
+		lst.extend(Utils.to_list(self.env.MTFLAGS))
+		lst.extend(['-manifest', manifest])
+		lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode))
+
+		return super(link_task, self).exec_command(lst)
+
 class stlink_task(link_task):
 	"""
 	Base for static link tasks, which use *ar* most of the time.
-	The target is always removed before being written.
 	"""
-	run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
+	run_str = [
+		lambda task: task.remove_before_build(),
+		'${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
+	]
+
+	chmod   = Utils.O644
+	"""Default installation mode for the static libraries"""
+
+	def remove_before_build(self):
+		"Remove the library before building it"
+		try:
+			os.remove(self.outputs[0].abspath())
+		except OSError:
+			pass
 
 def rm_tgt(cls):
+	# TODO obsolete code, remove in waf 2.2
 	old = cls.run
 	def wrap(self):
-		try: os.remove(self.outputs[0].abspath())
-		except OSError: pass
+		try:
+			os.remove(self.outputs[0].abspath())
+		except OSError:
+			pass
 		return old(self)
 	setattr(cls, 'run', wrap)
-rm_tgt(stlink_task)
+
+@feature('skip_stlib_link_deps')
+@before_method('process_use')
+def apply_skip_stlib_link_deps(self):
+	"""
+	This enables an optimization in the :py:func:wafilb.Tools.ccroot.processes_use: method that skips dependency and
+	link flag optimizations for targets that generate static libraries (via the :py:class:Tools.ccroot.stlink_task task).
+	The actual behavior is implemented in :py:func:wafilb.Tools.ccroot.processes_use: method so this feature only tells waf
+	to enable the new behavior.
+	"""
+	self.env.SKIP_STLIB_LINK_DEPS = True
 
 @feature('c', 'cxx', 'd', 'fc', 'asm')
 @after_method('process_source')
@@ -207,10 +298,12 @@ def apply_link(self):
 	try:
 		inst_to = self.install_path
 	except AttributeError:
-		inst_to = self.link_task.__class__.inst_to
+		inst_to = self.link_task.inst_to
 	if inst_to:
 		# install a copy of the node list we have at this moment (implib not added)
-		self.install_task = self.bld.install_files(inst_to, self.link_task.outputs[:], env=self.env, chmod=self.link_task.chmod)
+		self.install_task = self.add_install_files(
+			install_to=inst_to, install_from=self.link_task.outputs[:],
+			chmod=self.link_task.chmod, task=self.link_task)
 
 @taskgen_method
 def use_rec(self, name, **kw):
@@ -248,6 +341,8 @@ def use_rec(self, name, **kw):
 
 	p = self.tmp_use_prec
 	for x in self.to_list(getattr(y, 'use', [])):
+		if self.env["STLIB_" + x]:
+			continue
 		try:
 			p[x].append(name)
 		except KeyError:
@@ -268,7 +363,7 @@ def process_use(self):
 	See :py:func:`waflib.Tools.ccroot.use_rec`.
 	"""
 
-	use_not = self.tmp_use_not = set([])
+	use_not = self.tmp_use_not = set()
 	self.tmp_use_seen = [] # we would like an ordered set
 	use_prec = self.tmp_use_prec = {}
 	self.uselib = self.to_list(getattr(self, 'uselib', []))
@@ -283,7 +378,7 @@ def process_use(self):
 			del use_prec[x]
 
 	# topological sort
-	out = []
+	out = self.tmp_use_sorted = []
 	tmp = []
 	for x in self.tmp_use_seen:
 		for k in use_prec.values():
@@ -316,31 +411,36 @@ def process_use(self):
 		y = self.bld.get_tgen_by_name(x)
 		var = y.tmp_use_var
 		if var and link_task:
-			if var == 'LIB' or y.tmp_use_stlib:
+			if self.env.SKIP_STLIB_LINK_DEPS and isinstance(link_task, stlink_task):
+				# If the skip_stlib_link_deps feature is enabled then we should
+				# avoid adding lib deps to the stlink_task instance.
+				pass
+			elif var == 'LIB' or y.tmp_use_stlib or x in names:
 				self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]])
 				self.link_task.dep_nodes.extend(y.link_task.outputs)
-				tmp_path = y.link_task.outputs[0].parent.path_from(self.bld.bldnode)
-				self.env.append_value(var + 'PATH', [tmp_path])
+				tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd())
+				self.env.append_unique(var + 'PATH', [tmp_path])
 		else:
 			if y.tmp_use_objects:
 				self.add_objects_from_tgen(y)
 
 		if getattr(y, 'export_includes', None):
-			self.includes.extend(y.to_incnodes(y.export_includes))
+			# self.includes may come from a global variable #2035
+			self.includes = self.includes + y.to_incnodes(y.export_includes)
 
 		if getattr(y, 'export_defines', None):
 			self.env.append_value('DEFINES', self.to_list(y.export_defines))
 
 
-	# and finally, add the uselib variables (no recursion needed)
+	# and finally, add the use variables (no recursion needed)
 	for x in names:
 		try:
 			y = self.bld.get_tgen_by_name(x)
-		except Exception:
+		except Errors.WafError:
 			if not self.env['STLIB_' + x] and not x in self.uselib:
 				self.uselib.append(x)
 		else:
-			for k in self.to_list(getattr(y, 'uselib', [])):
+			for k in self.to_list(getattr(y, 'use', [])):
 				if not self.env['STLIB_' + k] and not k in self.uselib:
 					self.uselib.append(k)
 
@@ -376,7 +476,7 @@ def get_uselib_vars(self):
 	:return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`)
 	:rtype: list of string
 	"""
-	_vars = set([])
+	_vars = set()
 	for x in self.features:
 		if x in USELIB_VARS:
 			_vars |= USELIB_VARS[x]
@@ -391,7 +491,7 @@ def propagate_uselib_vars(self):
 		def build(bld):
 			bld.env.AFLAGS_aaa = ['bar']
 			from waflib.Tools.ccroot import USELIB_VARS
-			USELIB_VARS['aaa'] = set('AFLAGS')
+			USELIB_VARS['aaa'] = ['AFLAGS']
 
 			tg = bld(features='aaa', aflags='test')
 
@@ -401,19 +501,18 @@ def propagate_uselib_vars(self):
 	"""
 	_vars = self.get_uselib_vars()
 	env = self.env
-
-	for x in _vars:
-		y = x.lower()
-		env.append_unique(x, self.to_list(getattr(self, y, [])))
-
-	for x in self.features:
-		for var in _vars:
-			compvar = '%s_%s' % (var, x)
-			env.append_value(var, env[compvar])
-
-	for x in self.to_list(getattr(self, 'uselib', [])):
-		for v in _vars:
-			env.append_value(v, env[v + '_' + x])
+	app = env.append_value
+	feature_uselib = self.features + self.to_list(getattr(self, 'uselib', []))
+	for var in _vars:
+		y = var.lower()
+		val = getattr(self, y, [])
+		if val:
+			app(var, self.to_list(val))
+
+		for x in feature_uselib:
+			val = env['%s_%s' % (var, x)]
+			if val:
+				app(var, val)
 
 # ============ the code above must not know anything about import libs ==========
 
@@ -434,64 +533,87 @@ def apply_implib(self):
 		name = self.target.name
 	else:
 		name = os.path.split(self.target)[1]
-	implib = self.env['implib_PATTERN'] % name
+	implib = self.env.implib_PATTERN % name
 	implib = dll.parent.find_or_declare(implib)
-	self.env.append_value('LINKFLAGS', self.env['IMPLIB_ST'] % implib.bldpath())
+	self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath())
 	self.link_task.outputs.append(implib)
 
 	if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe':
 		node = self.path.find_resource(self.defs)
 		if not node:
 			raise Errors.WafError('invalid def file %r' % self.defs)
-		if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
-			self.env.append_value('LINKFLAGS', '/def:%s' % node.path_from(self.bld.bldnode))
+		if self.env.def_PATTERN:
+			self.env.append_value('LINKFLAGS', self.env.def_PATTERN % node.path_from(self.get_cwd()))
 			self.link_task.dep_nodes.append(node)
 		else:
-			#gcc for windows takes *.def file a an input without any special flag
+			# gcc for windows takes *.def file as input without any special flag
 			self.link_task.inputs.append(node)
 
-	try:
-		inst_to = self.install_path
-	except AttributeError:
-		inst_to = self.link_task.__class__.inst_to
-	if not inst_to:
-		return
-
-	self.implib_install_task = self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env)
+	# where to put the import library
+	if getattr(self, 'install_task', None):
+		try:
+			# user has given a specific installation path for the import library
+			inst_to = self.install_path_implib
+		except AttributeError:
+			try:
+				# user has given an installation path for the main library, put the import library in it
+				inst_to = self.install_path
+			except AttributeError:
+				# else, put the library in BINDIR and the import library in LIBDIR
+				inst_to = '${IMPLIBDIR}'
+				self.install_task.install_to = '${BINDIR}'
+				if not self.env.IMPLIBDIR:
+					self.env.IMPLIBDIR = self.env.LIBDIR
+		self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib,
+			chmod=self.link_task.chmod, task=self.link_task)
 
 # ============ the code above must not know anything about vnum processing on unix platforms =========
 
-re_vnum = re.compile('^([1-9]\\d*|0)[.]([1-9]\\d*|0)[.]([1-9]\\d*|0)$')
+re_vnum = re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$')
 @feature('cshlib', 'cxxshlib', 'dshlib', 'fcshlib', 'vnum')
 @after_method('apply_link', 'propagate_uselib_vars')
 def apply_vnum(self):
 	"""
-	Enforce version numbering on shared libraries. The valid version numbers must have at most two dots::
+	Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots::
 
 		def build(bld):
 			bld.shlib(source='a.c', target='foo', vnum='14.15.16')
 
-	In this example, ``libfoo.so`` is installed as ``libfoo.so.1.2.3``, and the following symbolic links are created:
+	In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created:
+
+	* ``libfoo.so    → libfoo.so.14.15.16``
+	* ``libfoo.so.14 → libfoo.so.14.15.16``
+
+	By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library.  When necessary, the compatibility can be explicitly defined using `cnum` parameter:
+
+		def build(bld):
+			bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15')
 
-	* ``libfoo.so   → libfoo.so.1.2.3``
-	* ``libfoo.so.1 → libfoo.so.1.2.3``
+	In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library.
+
+	On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library.
 	"""
 	if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
 		return
 
 	link = self.link_task
 	if not re_vnum.match(self.vnum):
-		raise Errors.WafError('Invalid version %r for %r' % (self.vnum, self))
+		raise Errors.WafError('Invalid vnum %r for target %r' % (self.vnum, getattr(self, 'name', self)))
 	nums = self.vnum.split('.')
 	node = link.outputs[0]
 
+	cnum = getattr(self, 'cnum', str(nums[0]))
+	cnums = cnum.split('.')
+	if len(cnums)>len(nums) or nums[0:len(cnums)] != cnums:
+		raise Errors.WafError('invalid compatibility version %s' % cnum)
+
 	libname = node.name
 	if libname.endswith('.dylib'):
 		name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
-		name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
+		name2 = libname.replace('.dylib', '.%s.dylib' % cnum)
 	else:
 		name3 = libname + '.' + self.vnum
-		name2 = libname + '.' + nums[0]
+		name2 = libname + '.' + cnum
 
 	# add the so name for the ld linker - to disable, just unset env.SONAME_ST
 	if self.env.SONAME_ST:
@@ -499,35 +621,50 @@ def apply_vnum(self):
 		self.env.append_value('LINKFLAGS', v.split())
 
 	# the following task is just to enable execution from the build dir :-/
-	self.create_task('vnum', node, [node.parent.find_or_declare(name2), node.parent.find_or_declare(name3)])
+	if self.env.DEST_OS != 'openbsd':
+		outs = [node.parent.make_node(name3)]
+		if name2 != name3:
+			outs.append(node.parent.make_node(name2))
+		self.create_task('vnum', node, outs)
 
 	if getattr(self, 'install_task', None):
-		self.install_task.hasrun = Task.SKIP_ME
-		bld = self.bld
-		path = self.install_task.dest
-		t1 = bld.install_as(path + os.sep + name3, node, env=self.env, chmod=self.link_task.chmod)
-		t2 = bld.symlink_as(path + os.sep + name2, name3)
-		t3 = bld.symlink_as(path + os.sep + libname, name3)
-		self.vnum_install_task = (t1, t2, t3)
-
-	if '-dynamiclib' in self.env['LINKFLAGS']:
+		self.install_task.hasrun = Task.SKIPPED
+		self.install_task.no_errcheck_out = True
+		path = self.install_task.install_to
+		if self.env.DEST_OS == 'openbsd':
+			libname = self.link_task.outputs[0].name
+			t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod)
+			self.vnum_install_task = (t1,)
+		else:
+			t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod)
+			t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3)
+			if name2 != name3:
+				t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3)
+				self.vnum_install_task = (t1, t2, t3)
+			else:
+				self.vnum_install_task = (t1, t3)
+
+	if '-dynamiclib' in self.env.LINKFLAGS:
 		# this requires after(propagate_uselib_vars)
 		try:
 			inst_to = self.install_path
 		except AttributeError:
-			inst_to = self.link_task.__class__.inst_to
+			inst_to = self.link_task.inst_to
 		if inst_to:
 			p = Utils.subst_vars(inst_to, self.env)
-			path = os.path.join(p, self.link_task.outputs[0].name)
+			path = os.path.join(p, name2)
 			self.env.append_value('LINKFLAGS', ['-install_name', path])
+			self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum)
+			self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum)
 
 class vnum(Task.Task):
 	"""
 	Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum`
 	"""
 	color = 'CYAN'
-	quient = True
 	ext_in = ['.bin']
+	def keyword(self):
+		return 'Symlinking'
 	def run(self):
 		for x in self.outputs:
 			path = x.abspath()
@@ -549,9 +686,6 @@ class fake_shlib(link_task):
 		for t in self.run_after:
 			if not t.hasrun:
 				return Task.ASK_LATER
-
-		for x in self.outputs:
-			x.sig = Utils.h_file(x.abspath())
 		return Task.SKIP_ME
 
 class fake_stlib(stlink_task):
@@ -562,9 +696,6 @@ class fake_stlib(stlink_task):
 		for t in self.run_after:
 			if not t.hasrun:
 				return Task.ASK_LATER
-
-		for x in self.outputs:
-			x.sig = Utils.h_file(x.abspath())
 		return Task.SKIP_ME
 
 @conf
@@ -607,7 +738,10 @@ def process_lib(self):
 		for y in names:
 			node = x.find_node(y)
 			if node:
-				node.sig = Utils.h_file(node.abspath())
+				try:
+					Utils.h_file(node.abspath())
+				except EnvironmentError:
+					raise ValueError('Could not read %r' % y)
 				break
 		else:
 			continue
@@ -651,3 +785,21 @@ def read_object(self, obj):
 		obj = self.path.find_resource(obj)
 	return self(features='fake_obj', source=obj, name=obj.name)
 
+@feature('cxxprogram', 'cprogram')
+@after_method('apply_link', 'process_use')
+def set_full_paths_hpux(self):
+	"""
+	On hp-ux, extend the libpaths and static library paths to absolute paths
+	"""
+	if self.env.DEST_OS != 'hp-ux':
+		return
+	base = self.bld.bldnode.abspath()
+	for var in ['LIBPATH', 'STLIBPATH']:
+		lst = []
+		for x in self.env[var]:
+			if x.startswith('/'):
+				lst.append(x)
+			else:
+				lst.append(os.path.normpath(os.path.join(base, x)))
+		self.env[var] = lst
+

+ 29 - 0
sdk/waf/waflib/Tools/clang.py

@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Krzysztof Kosiński 2014
+
+"""
+Detect the Clang C compiler
+"""
+
+from waflib.Tools import ccroot, ar, gcc
+from waflib.Configure import conf
+
+@conf
+def find_clang(conf):
+	"""
+	Finds the program clang and executes it to ensure it really is clang
+	"""
+	cc = conf.find_program('clang', var='CC')
+	conf.get_cc_version(cc, clang=True)
+	conf.env.CC_NAME = 'clang'
+
+def configure(conf):
+	conf.find_clang()
+	conf.find_program(['llvm-ar', 'ar'], var='AR')
+	conf.find_ar()
+	conf.gcc_common_flags()
+	conf.gcc_modifier_platform()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()

+ 30 - 0
sdk/waf/waflib/Tools/clangxx.py

@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2009-2018 (ita)
+
+"""
+Detect the Clang++ C++ compiler
+"""
+
+from waflib.Tools import ccroot, ar, gxx
+from waflib.Configure import conf
+
+@conf
+def find_clangxx(conf):
+	"""
+	Finds the program clang++, and executes it to ensure it really is clang++
+	"""
+	cxx = conf.find_program('clang++', var='CXX')
+	conf.get_cc_version(cxx, clang=True)
+	conf.env.CXX_NAME = 'clang'
+
+def configure(conf):
+	conf.find_clangxx()
+	conf.find_program(['llvm-ar', 'ar'], var='AR')
+	conf.find_ar()
+	conf.gxx_common_flags()
+	conf.gxx_modifier_platform()
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
+

+ 46 - 33
sdk/waf/waflib/Tools/compiler_c.py

@@ -15,11 +15,12 @@ Try to detect a C compiler from the list of supported compilers (gcc, msvc, etc)
 The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_c.c_compiler`. To register
 a new C compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use::
 
+	from waflib.Tools.compiler_c import c_compiler
+	c_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
+
 	def options(opt):
 		opt.load('compiler_c')
 	def configure(cnf):
-		from waflib.Tools.compiler_c import c_compiler
-		c_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
 		cnf.load('compiler_c')
 	def build(bld):
 		bld.program(source='main.c', target='app')
@@ -29,70 +30,82 @@ Not all compilers need to have a specific tool. For example, the clang compilers
 	$ CC=clang waf configure
 """
 
-import os, sys, imp, types
+import re
 from waflib.Tools import ccroot
-from waflib import Utils, Configure
+from waflib import Utils
 from waflib.Logs import debug
 
 c_compiler = {
-'win32':  ['msvc', 'gcc'],
-'cygwin': ['gcc'],
-'darwin': ['gcc'],
-'aix':    ['xlc', 'gcc'],
-'linux':  ['gcc', 'icc'],
-'sunos':  ['suncc', 'gcc'],
-'irix':   ['gcc', 'irixcc'],
-'hpux':   ['gcc'],
-'gnu':    ['gcc'],
-'java':   ['gcc', 'msvc', 'icc'],
-'default':['gcc'],
+'win32':       ['msvc', 'gcc', 'clang'],
+'cygwin':      ['gcc', 'clang'],
+'darwin':      ['clang', 'gcc'],
+'aix':         ['xlc', 'gcc', 'clang'],
+'linux':       ['gcc', 'clang', 'icc'],
+'sunos':       ['suncc', 'gcc'],
+'irix':        ['gcc', 'irixcc'],
+'hpux':        ['gcc'],
+'osf1V':       ['gcc'],
+'gnu':         ['gcc', 'clang'],
+'java':        ['gcc', 'msvc', 'clang', 'icc'],
+'gnukfreebsd': ['gcc', 'clang'],
+'default':     ['clang', 'gcc'],
 }
 """
-Dict mapping the platform names to waf tools finding specific compilers::
+Dict mapping platform names to Waf tools finding specific C compilers::
 
 	from waflib.Tools.compiler_c import c_compiler
 	c_compiler['linux'] = ['gcc', 'icc', 'suncc']
 """
 
+def default_compilers():
+	build_platform = Utils.unversioned_sys_platform()
+	possible_compiler_list = c_compiler.get(build_platform, c_compiler['default'])
+	return ' '.join(possible_compiler_list)
+
 def configure(conf):
 	"""
-	Try to find a suitable C compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
+	Detects a suitable C compiler
+
+	:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
 	"""
-	try: test_for_compiler = conf.options.check_c_compiler
-	except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_c')")
-	for compiler in test_for_compiler.split():
+	try:
+		test_for_compiler = conf.options.check_c_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_c')")
+
+	for compiler in re.split('[ ,]+', test_for_compiler):
 		conf.env.stash()
-		conf.start_msg('Checking for %r (c compiler)' % compiler)
+		conf.start_msg('Checking for %r (C compiler)' % compiler)
 		try:
 			conf.load(compiler)
 		except conf.errors.ConfigurationError as e:
 			conf.env.revert()
 			conf.end_msg(False)
-			debug('compiler_c: %r' % e)
+			debug('compiler_c: %r', e)
 		else:
-			if conf.env['CC']:
+			if conf.env.CC:
 				conf.end_msg(conf.env.get_flat('CC'))
-				conf.env['COMPILER_CC'] = compiler
+				conf.env.COMPILER_CC = compiler
+				conf.env.commit()
 				break
+			conf.env.revert()
 			conf.end_msg(False)
 	else:
-		conf.fatal('could not configure a c compiler!')
+		conf.fatal('could not configure a C compiler!')
 
 def options(opt):
 	"""
-	Restrict the compiler detection from the command-line::
+	This is how to provide compiler preferences on the command-line::
 
 		$ waf configure --check-c-compiler=gcc
 	"""
+	test_for_compiler = default_compilers()
 	opt.load_special_tools('c_*.py', ban=['c_dumbpreproc.py'])
-	global c_compiler
-	build_platform = Utils.unversioned_sys_platform()
-	possible_compiler_list = c_compiler[build_platform in c_compiler and build_platform or 'default']
-	test_for_compiler = ' '.join(possible_compiler_list)
-	cc_compiler_opts = opt.add_option_group("C Compiler Options")
-	cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler,
-		help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
+	cc_compiler_opts = opt.add_option_group('Configuration options')
+	cc_compiler_opts.add_option('--check-c-compiler', default=None,
+		help='list of C compilers to try [%s]' % test_for_compiler,
 		dest="check_c_compiler")
+
 	for x in test_for_compiler.split():
 		opt.load('%s' % x)
 

+ 43 - 33
sdk/waf/waflib/Tools/compiler_cxx.py

@@ -15,11 +15,12 @@ Try to detect a C++ compiler from the list of supported compilers (g++, msvc, et
 The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_cxx.cxx_compiler`. To register
 a new C++ compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use::
 
+	from waflib.Tools.compiler_cxx import cxx_compiler
+	cxx_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
+
 	def options(opt):
 		opt.load('compiler_cxx')
 	def configure(cnf):
-		from waflib.Tools.compiler_cxx import cxx_compiler
-		cxx_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
 		cnf.load('compiler_cxx')
 	def build(bld):
 		bld.program(source='main.c', target='app')
@@ -30,71 +31,80 @@ Not all compilers need to have a specific tool. For example, the clang compilers
 """
 
 
-import os, sys, imp, types
+import re
 from waflib.Tools import ccroot
-from waflib import Utils, Configure
+from waflib import Utils
 from waflib.Logs import debug
 
 cxx_compiler = {
-'win32':  ['msvc', 'g++'],
-'cygwin': ['g++'],
-'darwin': ['g++'],
-'aix':    ['xlc++', 'g++'],
-'linux':  ['g++', 'icpc'],
-'sunos':  ['sunc++', 'g++'],
-'irix':   ['g++'],
-'hpux':   ['g++'],
-'gnu':    ['g++'],
-'java':   ['g++', 'msvc', 'icpc'],
-'default': ['g++']
+'win32':       ['msvc', 'g++', 'clang++'],
+'cygwin':      ['g++', 'clang++'],
+'darwin':      ['clang++', 'g++'],
+'aix':         ['xlc++', 'g++', 'clang++'],
+'linux':       ['g++', 'clang++', 'icpc'],
+'sunos':       ['sunc++', 'g++'],
+'irix':        ['g++'],
+'hpux':        ['g++'],
+'osf1V':       ['g++'],
+'gnu':         ['g++', 'clang++'],
+'java':        ['g++', 'msvc', 'clang++', 'icpc'],
+'gnukfreebsd': ['g++', 'clang++'],
+'default':     ['clang++', 'g++']
 }
 """
-Dict mapping the platform names to waf tools finding specific compilers::
+Dict mapping the platform names to Waf tools finding specific C++ compilers::
 
 	from waflib.Tools.compiler_cxx import cxx_compiler
 	cxx_compiler['linux'] = ['gxx', 'icpc', 'suncxx']
 """
 
+def default_compilers():
+	build_platform = Utils.unversioned_sys_platform()
+	possible_compiler_list = cxx_compiler.get(build_platform, cxx_compiler['default'])
+	return ' '.join(possible_compiler_list)
 
 def configure(conf):
 	"""
-	Try to find a suitable C++ compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
+	Detects a suitable C++ compiler
+
+	:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
 	"""
-	try: test_for_compiler = conf.options.check_cxx_compiler
-	except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_cxx')")
+	try:
+		test_for_compiler = conf.options.check_cxx_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_cxx')")
 
-	for compiler in test_for_compiler.split():
+	for compiler in re.split('[ ,]+', test_for_compiler):
 		conf.env.stash()
-		conf.start_msg('Checking for %r (c++ compiler)' % compiler)
+		conf.start_msg('Checking for %r (C++ compiler)' % compiler)
 		try:
 			conf.load(compiler)
 		except conf.errors.ConfigurationError as e:
 			conf.env.revert()
 			conf.end_msg(False)
-			debug('compiler_cxx: %r' % e)
+			debug('compiler_cxx: %r', e)
 		else:
-			if conf.env['CXX']:
+			if conf.env.CXX:
 				conf.end_msg(conf.env.get_flat('CXX'))
-				conf.env['COMPILER_CXX'] = compiler
+				conf.env.COMPILER_CXX = compiler
+				conf.env.commit()
 				break
+			conf.env.revert()
 			conf.end_msg(False)
 	else:
-		conf.fatal('could not configure a c++ compiler!')
+		conf.fatal('could not configure a C++ compiler!')
 
 def options(opt):
 	"""
-	Restrict the compiler detection from the command-line::
+	This is how to provide compiler preferences on the command-line::
 
 		$ waf configure --check-cxx-compiler=gxx
 	"""
+	test_for_compiler = default_compilers()
 	opt.load_special_tools('cxx_*.py')
-	global cxx_compiler
-	build_platform = Utils.unversioned_sys_platform()
-	possible_compiler_list = cxx_compiler[build_platform in cxx_compiler and build_platform or 'default']
-	test_for_compiler = ' '.join(possible_compiler_list)
-	cxx_compiler_opts = opt.add_option_group('C++ Compiler Options')
-	cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler,
-		help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
+	cxx_compiler_opts = opt.add_option_group('Configuration options')
+	cxx_compiler_opts.add_option('--check-cxx-compiler', default=None,
+		help='list of C++ compilers to try [%s]' % test_for_compiler,
 		dest="check_cxx_compiler")
 
 	for x in test_for_compiler.split():

+ 41 - 15
sdk/waf/waflib/Tools/compiler_d.py

@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # encoding: utf-8
 # Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2010 (ita)
+# Thomas Nagy, 2016-2018 (ita)
 
 """
 Try to detect a D compiler from the list of supported compilers::
@@ -20,40 +20,66 @@ Only three D compilers are really present at the moment:
 * ldc2
 """
 
-import os, sys, imp, types
-from waflib import Utils, Configure, Options, Logs
+import re
+from waflib import Utils, Logs
+
+d_compiler = {
+'default' : ['gdc', 'dmd', 'ldc2']
+}
+"""
+Dict mapping the platform names to lists of names of D compilers to try, in order of preference::
+
+	from waflib.Tools.compiler_d import d_compiler
+	d_compiler['default'] = ['gdc', 'dmd', 'ldc2']
+"""
+
+def default_compilers():
+	build_platform = Utils.unversioned_sys_platform()
+	possible_compiler_list = d_compiler.get(build_platform, d_compiler['default'])
+	return ' '.join(possible_compiler_list)
 
 def configure(conf):
 	"""
-	Try to find a suitable D compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
+	Detects a suitable D compiler
+
+	:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
 	"""
-	for compiler in conf.options.dcheck.split(','):
+	try:
+		test_for_compiler = conf.options.check_d_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_d')")
+
+	for compiler in re.split('[ ,]+', test_for_compiler):
 		conf.env.stash()
-		conf.start_msg('Checking for %r (d compiler)' % compiler)
+		conf.start_msg('Checking for %r (D compiler)' % compiler)
 		try:
 			conf.load(compiler)
 		except conf.errors.ConfigurationError as e:
 			conf.env.revert()
 			conf.end_msg(False)
-			Logs.debug('compiler_d: %r' % e)
+			Logs.debug('compiler_d: %r', e)
 		else:
 			if conf.env.D:
 				conf.end_msg(conf.env.get_flat('D'))
-				conf.env['COMPILER_D'] = compiler
+				conf.env.COMPILER_D = compiler
+				conf.env.commit()
 				break
+			conf.env.revert()
 			conf.end_msg(False)
 	else:
-		conf.fatal('no suitable d compiler was found')
+		conf.fatal('could not configure a D compiler!')
 
 def options(opt):
 	"""
-	Restrict the compiler detection from the command-line::
+	This is how to provide compiler preferences on the command-line::
 
 		$ waf configure --check-d-compiler=dmd
 	"""
-	d_compiler_opts = opt.add_option_group('D Compiler Options')
-	d_compiler_opts.add_option('--check-d-compiler', default='gdc,dmd,ldc2', action='store',
-		help='check for the compiler [Default:gdc,dmd,ldc2]', dest='dcheck')
-	for d_compiler in ['gdc', 'dmd', 'ldc2']:
-		opt.load('%s' % d_compiler)
+	test_for_compiler = default_compilers()
+	d_compiler_opts = opt.add_option_group('Configuration options')
+	d_compiler_opts.add_option('--check-d-compiler', default=None,
+		help='list of D compilers to try [%s]' % test_for_compiler, dest='check_d_compiler')
+
+	for x in test_for_compiler.split():
+		opt.load('%s' % x)
 

+ 34 - 27
sdk/waf/waflib/Tools/compiler_fc.py

@@ -1,8 +1,8 @@
 #!/usr/bin/env python
 # encoding: utf-8
 
-import os, sys, imp, types
-from waflib import Utils, Configure, Options, Logs, Errors
+import re
+from waflib import Utils, Logs
 from waflib.Tools import fc
 
 fc_compiler = {
@@ -13,54 +13,61 @@ fc_compiler = {
 	'default': ['gfortran'],
 	'aix'    : ['gfortran']
 }
+"""
+Dict mapping the platform names to lists of names of Fortran compilers to try, in order of preference::
 
-def __list_possible_compiler(platform):
-	try:
-		return fc_compiler[platform]
-	except KeyError:
-		return fc_compiler["default"]
+	from waflib.Tools.compiler_c import c_compiler
+	c_compiler['linux'] = ['gfortran', 'g95', 'ifort']
+"""
+
+def default_compilers():
+	build_platform = Utils.unversioned_sys_platform()
+	possible_compiler_list = fc_compiler.get(build_platform, fc_compiler['default'])
+	return ' '.join(possible_compiler_list)
 
 def configure(conf):
 	"""
-	Try to find a suitable Fortran compiler or raise a :py:class:`waflib.Errors.ConfigurationError`.
+	Detects a suitable Fortran compiler
+
+	:raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
 	"""
-	try: test_for_compiler = conf.options.check_fc
-	except AttributeError: conf.fatal("Add options(opt): opt.load('compiler_fc')")
-	for compiler in test_for_compiler.split():
+	try:
+		test_for_compiler = conf.options.check_fortran_compiler or default_compilers()
+	except AttributeError:
+		conf.fatal("Add options(opt): opt.load('compiler_fc')")
+	for compiler in re.split('[ ,]+', test_for_compiler):
 		conf.env.stash()
-		conf.start_msg('Checking for %r (fortran compiler)' % compiler)
+		conf.start_msg('Checking for %r (Fortran compiler)' % compiler)
 		try:
 			conf.load(compiler)
 		except conf.errors.ConfigurationError as e:
 			conf.env.revert()
 			conf.end_msg(False)
-			Logs.debug('compiler_fortran: %r' % e)
+			Logs.debug('compiler_fortran: %r', e)
 		else:
-			if conf.env['FC']:
+			if conf.env.FC:
 				conf.end_msg(conf.env.get_flat('FC'))
 				conf.env.COMPILER_FORTRAN = compiler
+				conf.env.commit()
 				break
+			conf.env.revert()
 			conf.end_msg(False)
 	else:
-		conf.fatal('could not configure a fortran compiler!')
+		conf.fatal('could not configure a Fortran compiler!')
 
 def options(opt):
 	"""
-	Restrict the compiler detection from the command-line::
+	This is how to provide compiler preferences on the command-line::
 
 		$ waf configure --check-fortran-compiler=ifort
 	"""
+	test_for_compiler = default_compilers()
 	opt.load_special_tools('fc_*.py')
-	build_platform = Utils.unversioned_sys_platform()
-	detected_platform = Options.platform
-	possible_compiler_list = __list_possible_compiler(detected_platform)
-	test_for_compiler = ' '.join(possible_compiler_list)
-	fortran_compiler_opts = opt.add_option_group("Fortran Compiler Options")
-	fortran_compiler_opts.add_option('--check-fortran-compiler',
-			default="%s" % test_for_compiler,
-			help='On this platform (%s) the following Fortran Compiler will be checked by default: "%s"' % (detected_platform, test_for_compiler),
-		dest="check_fc")
+	fortran_compiler_opts = opt.add_option_group('Configuration options')
+	fortran_compiler_opts.add_option('--check-fortran-compiler', default=None,
+			help='list of Fortran compiler to try [%s]' % test_for_compiler,
+		dest="check_fortran_compiler")
 
-	for compiler in test_for_compiler.split():
-		opt.load('%s' % compiler)
+	for x in test_for_compiler.split():
+		opt.load('%s' % x)
 

+ 42 - 53
sdk/waf/waflib/Tools/cs.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 
 """
 C# support. A simple example::
@@ -21,11 +21,10 @@ Note that the configuration may compile C# snippets::
 			bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support')
 """
 
-from waflib import Utils, Task, Options, Logs, Errors
+from waflib import Utils, Task, Options, Errors
 from waflib.TaskGen import before_method, after_method, feature
 from waflib.Tools import ccroot
 from waflib.Configure import conf
-import os, tempfile
 
 ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
 ccroot.lib_patterns['csshlib'] = ['%s']
@@ -55,7 +54,7 @@ def apply_cs(self):
 	if inst_to:
 		# note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
 		mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
-		self.install_task = self.bld.install_files(inst_to, self.cs_task.outputs[:], env=self.env, chmod=mod)
+		self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
 
 @feature('cs')
 @after_method('apply_cs')
@@ -81,7 +80,7 @@ def use_cs(self):
 		if not tsk:
 			self.bld.fatal('cs task has no link task for use %r' % self)
 		self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
-		self.cs_task.set_run_after(tsk) # order (redundant, the order is infered from the nodes inputs/outputs)
+		self.cs_task.set_run_after(tsk) # order (redundant, the order is inferred from the nodes inputs/outputs)
 		self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath())
 
 @feature('cs')
@@ -92,7 +91,7 @@ def debug_cs(self):
 
 		def build(bld):
 			bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full')
-			# csdebug is a value in [True, 'full', 'pdbonly']
+			# csdebug is a value in (True, 'full', 'pdbonly')
 	"""
 	csdebug = getattr(self, 'csdebug', self.env.CSDEBUG)
 	if not csdebug:
@@ -104,10 +103,10 @@ def debug_cs(self):
 	else:
 		out = node.change_ext('.pdb')
 	self.cs_task.outputs.append(out)
-	try:
-		self.install_task.source.append(out)
-	except AttributeError:
-		pass
+
+	if getattr(self, 'install_task', None):
+		self.pdb_install_task = self.add_install_files(
+			install_to=self.install_task.install_to, install_from=out)
 
 	if csdebug == 'pdbonly':
 		val = ['/debug+', '/debug:pdbonly']
@@ -117,6 +116,29 @@ def debug_cs(self):
 		val = ['/debug-']
 	self.env.append_value('CSFLAGS', val)
 
+@feature('cs')
+@after_method('debug_cs')
+def doc_cs(self):
+	"""
+	The C# targets may create .xml documentation files::
+
+		def build(bld):
+			bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdoc=True)
+			# csdoc is a boolean value
+	"""
+	csdoc = getattr(self, 'csdoc', self.env.CSDOC)
+	if not csdoc:
+		return
+
+	node = self.cs_task.outputs[0]
+	out = node.change_ext('.xml')
+	self.cs_task.outputs.append(out)
+
+	if getattr(self, 'install_task', None):
+		self.doc_install_task = self.add_install_files(
+			install_to=self.install_task.install_to, install_from=out)
+
+	self.env.append_value('CSFLAGS', '/doc:%s' % out.abspath())
 
 class mcs(Task.Task):
 	"""
@@ -125,47 +147,16 @@ class mcs(Task.Task):
 	color   = 'YELLOW'
 	run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
 
-	def exec_command(self, cmd, **kw):
-		bld = self.generator.bld
-
-		try:
-			if not kw.get('cwd', None):
-				kw['cwd'] = bld.cwd
-		except AttributeError:
-			bld.cwd = kw['cwd'] = bld.variant_dir
-
-		try:
-			tmp = None
-			if isinstance(cmd, list) and len(' '.join(cmd)) >= 8192:
-				program = cmd[0] #unquoted program name, otherwise exec_command will fail
-				cmd = [self.quote_response_command(x) for x in cmd]
-				(fd, tmp) = tempfile.mkstemp()
-				os.write(fd, '\r\n'.join(i.replace('\\', '\\\\') for i in cmd[1:]).encode())
-				os.close(fd)
-				cmd = [program, '@' + tmp]
-			# no return here, that's on purpose
-			ret = self.generator.bld.exec_command(cmd, **kw)
-		finally:
-			if tmp:
-				try:
-					os.remove(tmp)
-				except OSError:
-					pass # anti-virus and indexers can keep the files open -_-
-		return ret
-
-	def quote_response_command(self, flag):
-		# /noconfig is not allowed when using response files
-		if flag.lower() == '/noconfig':
-			return ''
-
-		if flag.find(' ') > -1:
-			for x in ('/r:', '/reference:', '/resource:', '/lib:', '/out:'):
-				if flag.startswith(x):
-					flag = '%s"%s"' % (x, flag[len(x):])
-					break
+	def split_argfile(self, cmd):
+		inline = [cmd[0]]
+		infile = []
+		for x in cmd[1:]:
+			# csc doesn't want /noconfig in @file
+			if x.lower() == '/noconfig':
+				inline.append(x)
 			else:
-				flag = '"%s"' % flag
-		return flag
+				infile.append(self.quote_flag(x))
+		return (inline, infile)
 
 def configure(conf):
 	"""
@@ -188,7 +179,7 @@ def options(opt):
 
 		$ waf configure --with-csc-binary=/foo/bar/mcs
 	"""
-	opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
+	opt.add_option('--with-csc-binary', type=str, dest='cscbinary')
 
 class fake_csshlib(Task.Task):
 	"""
@@ -198,8 +189,6 @@ class fake_csshlib(Task.Task):
 	inst_to = None
 
 	def runnable_status(self):
-		for x in self.outputs:
-			x.sig = Utils.h_file(x.abspath())
 		return Task.SKIP_ME
 
 @conf

+ 9 - 9
sdk/waf/waflib/Tools/cxx.py

@@ -1,40 +1,40 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2018 (ita)
 
 "Base for c++ programs and libraries"
 
-from waflib import TaskGen, Task, Utils
+from waflib import TaskGen, Task
 from waflib.Tools import c_preproc
 from waflib.Tools.ccroot import link_task, stlink_task
 
 @TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
 def cxx_hook(self, node):
-	"Bind the c++ file extensions to the creation of a :py:class:`waflib.Tools.cxx.cxx` instance"
+	"Binds c++ file extensions to create :py:class:`waflib.Tools.cxx.cxx` instances"
 	return self.create_compiled_task('cxx', node)
 
 if not '.c' in TaskGen.task_gen.mappings:
 	TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']
 
 class cxx(Task.Task):
-	"Compile C++ files into object files"
-	run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
+	"Compiles C++ files into object files"
+	run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
 	vars    = ['CXXDEPS'] # unused variable to depend on, just in case
 	ext_in  = ['.h'] # set the build order easily by using ext_out=['.h']
 	scan    = c_preproc.scan
 
 class cxxprogram(link_task):
-	"Link object files into a c++ program"
-	run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
+	"Links object files into c++ programs"
+	run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
 	vars    = ['LINKDEPS']
 	ext_out = ['.bin']
 	inst_to = '${BINDIR}'
 
 class cxxshlib(cxxprogram):
-	"Link object files into a c++ shared library"
+	"Links object files into c++ shared libraries"
 	inst_to = '${LIBDIR}'
 
 class cxxstlib(stlink_task):
-	"Link object files into a c++ static library"
+	"Links object files into c++ static libraries"
 	pass # do not remove
 

+ 2 - 2
sdk/waf/waflib/Tools/d.py

@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # encoding: utf-8
 # Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2007-2010 (ita)
+# Thomas Nagy, 2007-2018 (ita)
 
 from waflib import Utils, Task, Errors
 from waflib.TaskGen import taskgen_method, feature, extension
@@ -57,7 +57,7 @@ def d_hook(self, node):
 
 	if getattr(self, 'generate_headers', None):
 		tsk = create_compiled_task(self, 'd_with_header', node)
-		tsk.outputs.append(node.change_ext(self.env['DHEADER_ext']))
+		tsk.outputs.append(node.change_ext(self.env.DHEADER_ext))
 	else:
 		tsk = create_compiled_task(self, 'd', node)
 	return tsk

+ 13 - 12
sdk/waf/waflib/Tools/d_config.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2010 (ita)
+# Thomas Nagy, 2016-2018 (ita)
 
 from waflib import Utils
 from waflib.Configure import conf
@@ -8,24 +8,24 @@ from waflib.Configure import conf
 @conf
 def d_platform_flags(self):
 	"""
-	Set the extensions dll/so for d programs and libraries
+	Sets the extensions dll/so for d programs and libraries
 	"""
 	v = self.env
 	if not v.DEST_OS:
 		v.DEST_OS = Utils.unversioned_sys_platform()
 	binfmt = Utils.destos_to_binfmt(self.env.DEST_OS)
 	if binfmt == 'pe':
-		v['dprogram_PATTERN'] = '%s.exe'
-		v['dshlib_PATTERN']   = 'lib%s.dll'
-		v['dstlib_PATTERN']   = 'lib%s.a'
+		v.dprogram_PATTERN = '%s.exe'
+		v.dshlib_PATTERN   = 'lib%s.dll'
+		v.dstlib_PATTERN   = 'lib%s.a'
 	elif binfmt == 'mac-o':
-		v['dprogram_PATTERN'] = '%s'
-		v['dshlib_PATTERN']   = 'lib%s.dylib'
-		v['dstlib_PATTERN']   = 'lib%s.a'
+		v.dprogram_PATTERN = '%s'
+		v.dshlib_PATTERN   = 'lib%s.dylib'
+		v.dstlib_PATTERN   = 'lib%s.a'
 	else:
-		v['dprogram_PATTERN'] = '%s'
-		v['dshlib_PATTERN']   = 'lib%s.so'
-		v['dstlib_PATTERN']   = 'lib%s.a'
+		v.dprogram_PATTERN = '%s'
+		v.dshlib_PATTERN   = 'lib%s.so'
+		v.dstlib_PATTERN   = 'lib%s.a'
 
 DLIB = '''
 version(D_Version2) {
@@ -55,7 +55,8 @@ version(D_Version2) {
 @conf
 def check_dlibrary(self, execute=True):
 	"""
-	Detect the kind of standard library that comes with the compiler, will set conf.env.DLIBRARY to tango, phobos1 or phobos2.
+	Detects the kind of standard library that comes with the compiler,
+	and sets conf.env.DLIBRARY to tango, phobos1 or phobos2
 	"""
 	ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True)
 	if execute:

+ 16 - 14
sdk/waf/waflib/Tools/d_scan.py

@@ -1,13 +1,13 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2010 (ita)
+# Thomas Nagy, 2016-2018 (ita)
 
 """
 Provide a scanner for finding dependencies on d files
 """
 
 import re
-from waflib import Utils, Logs
+from waflib import Utils
 
 def filter_comments(filename):
 	"""
@@ -29,7 +29,8 @@ def filter_comments(filename):
 			i += 1
 			while i < max:
 				c = txt[i]
-				if c == delim: break
+				if c == delim:
+					break
 				elif c == '\\':  # skip the character following backslash
 					i += 1
 				i += 1
@@ -38,7 +39,8 @@ def filter_comments(filename):
 		elif c == '/':  # try to replace a comment with whitespace
 			buf.append(txt[begin:i])
 			i += 1
-			if i == max: break
+			if i == max:
+				break
 			c = txt[i]
 			if c == '+':  # eat nesting /+ +/ comment
 				i += 1
@@ -52,7 +54,8 @@ def filter_comments(filename):
 						c = None
 					elif prev == '+' and c == '/':
 						nesting -= 1
-						if nesting == 0: break
+						if nesting == 0:
+							break
 						c = None
 					i += 1
 			elif c == '*':  # eat /* */ comment
@@ -61,7 +64,8 @@ def filter_comments(filename):
 				while i < max:
 					prev = c
 					c = txt[i]
-					if prev == '*' and c == '/': break
+					if prev == '*' and c == '/':
+						break
 					i += 1
 			elif c == '/':  # eat // comment
 				i += 1
@@ -89,8 +93,8 @@ class d_parser(object):
 
 		self.allnames = []
 
-		self.re_module = re.compile("module\s+([^;]+)")
-		self.re_import = re.compile("import\s+([^;]+)")
+		self.re_module = re.compile(r"module\s+([^;]+)")
+		self.re_import = re.compile(r"import\s+([^;]+)")
 		self.re_import_bindings = re.compile("([^:]+):(.*)")
 		self.re_import_alias = re.compile("[^=]+=(.+)")
 
@@ -134,7 +138,7 @@ class d_parser(object):
 
 		mod_name = self.re_module.search(code)
 		if mod_name:
-			self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
+			self.module = re.sub(r'\s+', '', mod_name.group(1)) # strip all whitespaces
 
 		# go through the code, have a look at all import occurrences
 
@@ -142,7 +146,7 @@ class d_parser(object):
 		import_iterator = self.re_import.finditer(code)
 		if import_iterator:
 			for import_match in import_iterator:
-				import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
+				import_match_str = re.sub(r'\s+', '', import_match.group(1)) # strip all whitespaces
 
 				# does this end with an import bindings declaration?
 				# (import bindings always terminate the list of imports)
@@ -188,7 +192,8 @@ class d_parser(object):
 		names = self.get_strings(code) # obtain the import strings
 		for x in names:
 			# optimization
-			if x in self.allnames: continue
+			if x in self.allnames:
+				continue
 			self.allnames.append(x)
 
 			# for each name, see if it is like a node or not
@@ -202,8 +207,5 @@ def scan(self):
 	gruik.start(node)
 	nodes = gruik.nodes
 	names = gruik.names
-
-	if Logs.verbose:
-		Logs.debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names))
 	return (nodes, names)
 

+ 7 - 7
sdk/waf/waflib/Tools/dbus.py

@@ -3,7 +3,7 @@
 # Ali Sabil, 2007
 
 """
-Compile dbus files with **dbus-binding-tool**
+Compiles dbus files with **dbus-binding-tool**
 
 Typical usage::
 
@@ -25,7 +25,7 @@ from waflib.TaskGen import taskgen_method, before_method
 @taskgen_method
 def add_dbus_file(self, filename, prefix, mode):
 	"""
-	Add a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
+	Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
 
 	:param filename: xml file to compile
 	:type filename: string
@@ -40,10 +40,10 @@ def add_dbus_file(self, filename, prefix, mode):
 		self.meths.append('process_dbus')
 	self.dbus_lst.append([filename, prefix, mode])
 
-@before_method('apply_core')
+@before_method('process_source')
 def process_dbus(self):
 	"""
-	Process the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances.
+	Processes the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances.
 	"""
 	for filename, prefix, mode in getattr(self, 'dbus_lst', []):
 		node = self.path.find_resource(filename)
@@ -55,7 +55,7 @@ def process_dbus(self):
 
 class dbus_binding_tool(Task.Task):
 	"""
-	Compile a dbus file
+	Compiles a dbus file
 	"""
 	color   = 'BLUE'
 	ext_out = ['.h']
@@ -64,7 +64,7 @@ class dbus_binding_tool(Task.Task):
 
 def configure(conf):
 	"""
-	Detect the program dbus-binding-tool and set the *conf.env.DBUS_BINDING_TOOL*
+	Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL``
 	"""
-	dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
+	conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
 

+ 24 - 32
sdk/waf/waflib/Tools/dmd.py

@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # encoding: utf-8
 # Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2008-2010 (ita)
+# Thomas Nagy, 2008-2018 (ita)
 
 import sys
 from waflib.Tools import ar, d
@@ -10,62 +10,54 @@ from waflib.Configure import conf
 @conf
 def find_dmd(conf):
 	"""
-	Find the program *dmd*, *dmd2*, or *ldc* and set the variable *D*
+	Finds the program *dmd*, *dmd2*, or *ldc* and set the variable *D*
 	"""
 	conf.find_program(['dmd', 'dmd2', 'ldc'], var='D')
 
 	# make sure that we're dealing with dmd1, dmd2, or ldc(1)
-	out = conf.cmd_and_log([conf.env.D, '--help'])
+	out = conf.cmd_and_log(conf.env.D + ['--help'])
 	if out.find("D Compiler v") == -1:
-		out = conf.cmd_and_log([conf.env.D, '-version'])
+		out = conf.cmd_and_log(conf.env.D + ['-version'])
 		if out.find("based on DMD v1.") == -1:
 			conf.fatal("detected compiler is not dmd/ldc")
 
 @conf
 def common_flags_ldc(conf):
 	"""
-	Set the D flags required by *ldc*
+	Sets the D flags required by *ldc*
 	"""
 	v = conf.env
-	v['DFLAGS']        = ['-d-version=Posix']
-	v['LINKFLAGS']     = []
-	v['DFLAGS_dshlib'] = ['-relocation-model=pic']
+	v.DFLAGS        = ['-d-version=Posix']
+	v.LINKFLAGS     = []
+	v.DFLAGS_dshlib = ['-relocation-model=pic']
 
 @conf
 def common_flags_dmd(conf):
 	"""
 	Set the flags required by *dmd* or *dmd2*
 	"""
-
 	v = conf.env
 
-	# _DFLAGS _DIMPORTFLAGS
-
-	# Compiler is dmd so 'gdc' part will be ignored, just
-	# ensure key is there, so wscript can append flags to it
-	#v['DFLAGS']            = ['-version=Posix']
-
-	v['D_SRC_F']           = ['-c']
-	v['D_TGT_F']           = '-of%s'
+	v.D_SRC_F           = ['-c']
+	v.D_TGT_F           = '-of%s'
 
-	# linker
-	v['D_LINKER']          = v['D']
-	v['DLNK_SRC_F']        = ''
-	v['DLNK_TGT_F']        = '-of%s'
-	v['DINC_ST']           = '-I%s'
+	v.D_LINKER          = v.D
+	v.DLNK_SRC_F        = ''
+	v.DLNK_TGT_F        = '-of%s'
+	v.DINC_ST           = '-I%s'
 
-	v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
-	v['DSTLIB_ST'] = v['DSHLIB_ST']         = '-L-l%s'
-	v['DSTLIBPATH_ST'] = v['DLIBPATH_ST']   = '-L-L%s'
+	v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
+	v.DSTLIB_ST = v.DSHLIB_ST         = '-L-l%s'
+	v.DSTLIBPATH_ST = v.DLIBPATH_ST   = '-L-L%s'
 
-	v['LINKFLAGS_dprogram']= ['-quiet']
+	v.LINKFLAGS_dprogram= ['-quiet']
 
-	v['DFLAGS_dshlib']     = ['-fPIC']
-	v['LINKFLAGS_dshlib']  = ['-L-shared']
+	v.DFLAGS_dshlib     = ['-fPIC']
+	v.LINKFLAGS_dshlib  = ['-L-shared']
 
-	v['DHEADER_ext']       = '.di'
+	v.DHEADER_ext       = '.di'
 	v.DFLAGS_d_with_header = ['-H', '-Hf']
-	v['D_HDR_F']           = '%s'
+	v.D_HDR_F           = '%s'
 
 def configure(conf):
 	"""
@@ -74,8 +66,8 @@ def configure(conf):
 	conf.find_dmd()
 
 	if sys.platform == 'win32':
-		out = conf.cmd_and_log([conf.env.D, '--help'])
-		if out.find("D Compiler v2.") > -1:
+		out = conf.cmd_and_log(conf.env.D + ['--help'])
+		if out.find('D Compiler v2.') > -1:
 			conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
 
 	conf.load('ar')

+ 56 - 39
sdk/waf/waflib/Tools/errcheck.py

@@ -3,9 +3,9 @@
 # Thomas Nagy, 2011 (ita)
 
 """
-errcheck: highlight common mistakes
+Common mistakes highlighting.
 
-There is a performance hit, so this tool is only loaded when running "waf -v"
+There is a performance impact, so this tool is only loaded when running ``waf -v``
 """
 
 typos = {
@@ -18,12 +18,14 @@ typos = {
 'importpath':'includes',
 'installpath':'install_path',
 'iscopy':'is_copy',
+'uses':'use',
 }
 
 meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects']
 
+import sys
 from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils
-import waflib.Tools.ccroot
+from waflib.Tools import ccroot
 
 def check_same_targets(self):
 	mp = Utils.defaultdict(list)
@@ -32,6 +34,8 @@ def check_same_targets(self):
 	def check_task(tsk):
 		if not isinstance(tsk, Task.Task):
 			return
+		if hasattr(tsk, 'no_errcheck_out'):
+			return
 
 		for node in tsk.outputs:
 			mp[node].append(tsk)
@@ -57,44 +61,51 @@ def check_same_targets(self):
 			Logs.error(msg)
 			for x in v:
 				if Logs.verbose > 1:
-					Logs.error('  %d. %r' % (1 + v.index(x), x.generator))
+					Logs.error('  %d. %r', 1 + v.index(x), x.generator)
 				else:
-					Logs.error('  %d. %r in %r' % (1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None)))
+					Logs.error('  %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None))
+			Logs.error('If you think that this is an error, set no_errcheck_out on the task instance')
 
 	if not dupe:
 		for (k, v) in uids.items():
 			if len(v) > 1:
-				Logs.error('* Several tasks use the same identifier. Please check the information on\n   http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid')
+				Logs.error('* Several tasks use the same identifier. Please check the information on\n   https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
+				tg_details = tsk.generator.name
+				if Logs.verbose > 2:
+					tg_details = tsk.generator
 				for tsk in v:
-					Logs.error('  - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator))
+					Logs.error('  - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details)
 
 def check_invalid_constraints(self):
-	feat = set([])
+	feat = set()
 	for x in list(TaskGen.feats.values()):
 		feat.union(set(x))
 	for (x, y) in TaskGen.task_gen.prec.items():
 		feat.add(x)
 		feat.union(set(y))
-	ext = set([])
+	ext = set()
 	for x in TaskGen.task_gen.mappings.values():
 		ext.add(x.__name__)
 	invalid = ext & feat
 	if invalid:
-		Logs.error('The methods %r have invalid annotations:  @extension <-> @feature/@before_method/@after_method' % list(invalid))
+		Logs.error('The methods %r have invalid annotations:  @extension <-> @feature/@before_method/@after_method', list(invalid))
 
 	# the build scripts have been read, so we can check for invalid after/before attributes on task classes
 	for cls in list(Task.classes.values()):
+		if sys.hexversion > 0x3000000 and issubclass(cls, Task.Task) and isinstance(cls.hcode, str):
+			raise Errors.WafError('Class %r has hcode value %r of type <str>, expecting <bytes> (use Utils.h_cmd() ?)' % (cls, cls.hcode))
+
 		for x in ('before', 'after'):
 			for y in Utils.to_list(getattr(cls, x, [])):
-				if not Task.classes.get(y, None):
-					Logs.error('Erroneous order constraint %r=%r on task class %r' % (x, y, cls.__name__))
+				if not Task.classes.get(y):
+					Logs.error('Erroneous order constraint %r=%r on task class %r', x, y, cls.__name__)
 		if getattr(cls, 'rule', None):
-			Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")' % cls.__name__)
+			Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")', cls.__name__)
 
 def replace(m):
 	"""
-	We could add properties, but they would not work in some cases:
-	bld.program(...) requires 'source' in the attributes
+	Replaces existing BuildContext methods to verify parameter names,
+	for example ``bld(source=)`` has no ending *s*
 	"""
 	oldcall = getattr(Build.BuildContext, m)
 	def call(self, *k, **kw):
@@ -103,14 +114,13 @@ def replace(m):
 			if x in kw:
 				if x == 'iscopy' and 'subst' in getattr(self, 'features', ''):
 					continue
-				err = True
-				Logs.error('Fix the typo %r -> %r on %r' % (x, typos[x], ret))
+				Logs.error('Fix the typo %r -> %r on %r', x, typos[x], ret)
 		return ret
 	setattr(Build.BuildContext, m, call)
 
 def enhance_lib():
 	"""
-	modify existing classes and methods
+	Modifies existing classes and methods to enable error verification
 	"""
 	for m in meths_typos:
 		replace(m)
@@ -118,26 +128,36 @@ def enhance_lib():
 	# catch '..' in ant_glob patterns
 	def ant_glob(self, *k, **kw):
 		if k:
-			lst=Utils.to_list(k[0])
+			lst = Utils.to_list(k[0])
 			for pat in lst:
-				if '..' in pat.split('/'):
-					Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'" % k[0])
-		if kw.get('remove', True):
-			try:
-				if self.is_child_of(self.ctx.bldnode) and not kw.get('quiet', False):
-					Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)' % self)
-			except AttributeError:
-				pass
+				sp = pat.split('/')
+				if '..' in sp:
+					Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0])
+				if '.' in sp:
+					Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0])
 		return self.old_ant_glob(*k, **kw)
 	Node.Node.old_ant_glob = Node.Node.ant_glob
 	Node.Node.ant_glob = ant_glob
 
+	# catch ant_glob on build folders
+	def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
+		if remove:
+			try:
+				if self.is_child_of(self.ctx.bldnode) and not quiet:
+					quiet = True
+					Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self)
+			except AttributeError:
+				pass
+		return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet)
+	Node.Node.old_ant_iter = Node.Node.ant_iter
+	Node.Node.ant_iter = ant_iter
+
 	# catch conflicting ext_in/ext_out/before/after declarations
 	old = Task.is_before
 	def is_before(t1, t2):
 		ret = old(t1, t2)
 		if ret and old(t2, t1):
-			Logs.error('Contradictory order constraints in classes %r %r' % (t1, t2))
+			Logs.error('Contradictory order constraints in classes %r %r', t1, t2)
 		return ret
 	Task.is_before = is_before
 
@@ -149,7 +169,7 @@ def enhance_lib():
 			Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
 		for x in ('c', 'cxx', 'd', 'fc'):
 			if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]:
-				Logs.error('%r features is probably missing %r' % (self, x))
+				Logs.error('%r features is probably missing %r', self, x)
 	TaskGen.feature('*')(check_err_features)
 
 	# check for erroneous order constraints
@@ -157,12 +177,12 @@ def enhance_lib():
 		if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features):
 			for x in ('before', 'after', 'ext_in', 'ext_out'):
 				if hasattr(self, x):
-					Logs.warn('Erroneous order constraint %r on non-rule based task generator %r' % (x, self))
+					Logs.warn('Erroneous order constraint %r on non-rule based task generator %r', x, self)
 		else:
 			for x in ('before', 'after'):
 				for y in self.to_list(getattr(self, x, [])):
-					if not Task.classes.get(y, None):
-						Logs.error('Erroneous order constraint %s=%r on %r (no such class)' % (x, y, self))
+					if not Task.classes.get(y):
+						Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self)
 	TaskGen.feature('*')(check_err_order)
 
 	# check for @extension used with @feature/@before_method/@after_method
@@ -197,24 +217,21 @@ def enhance_lib():
 	TaskGen.task_gen.use_rec = use_rec
 
 	# check for env.append
-	def getattri(self, name, default=None):
+	def _getattr(self, name, default=None):
 		if name == 'append' or name == 'add':
 			raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
 		elif name == 'prepend':
 			raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
 		if name in self.__slots__:
-			return object.__getattr__(self, name, default)
+			return super(ConfigSet.ConfigSet, self).__getattr__(name, default)
 		else:
 			return self[name]
-	ConfigSet.ConfigSet.__getattr__ = getattri
+	ConfigSet.ConfigSet.__getattr__ = _getattr
 
 
 def options(opt):
 	"""
-	Add a few methods
+	Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options
 	"""
 	enhance_lib()
 
-def configure(conf):
-	pass
-

+ 55 - 51
sdk/waf/waflib/Tools/fc.py

@@ -1,48 +1,58 @@
 #! /usr/bin/env python
 # encoding: utf-8
 # DC 2008
-# Thomas Nagy 2010 (ita)
+# Thomas Nagy 2016-2018 (ita)
 
 """
-fortran support
+Fortran support
 """
 
-import re
-
-from waflib import Utils, Task, TaskGen, Logs
+from waflib import Utils, Task, Errors
 from waflib.Tools import ccroot, fc_config, fc_scan
-from waflib.TaskGen import feature, before_method, after_method, extension
+from waflib.TaskGen import extension
 from waflib.Configure import conf
 
-ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES'])
-ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
-ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
+ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS'])
+ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'LDFLAGS'])
+ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'LDFLAGS'])
 ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS'])
 
-@feature('fcprogram', 'fcshlib', 'fcstlib', 'fcprogram_test')
-def dummy(self):
-	pass
-
-@extension('.f', '.f90', '.F', '.F90', '.for', '.FOR')
+@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08')
 def fc_hook(self, node):
-	"Bind the typical Fortran file extensions to the creation of a :py:class:`waflib.Tools.fc.fc` instance"
+	"Binds the Fortran file extensions create :py:class:`waflib.Tools.fc.fc` instances"
 	return self.create_compiled_task('fc', node)
 
 @conf
 def modfile(conf, name):
 	"""
-	Turn a module name into the right module file name.
+	Turns a module name into the right module file name.
 	Defaults to all lower case.
 	"""
-	return {'lower'     :name.lower() + '.mod',
-		'lower.MOD' :name.upper() + '.MOD',
-		'UPPER.mod' :name.upper() + '.mod',
-		'UPPER'     :name.upper() + '.MOD'}[conf.env.FC_MOD_CAPITALIZATION or 'lower']
+	if name.find(':') >= 0:
+		# Depending on a submodule!
+		separator = conf.env.FC_SUBMOD_SEPARATOR or '@'
+		# Ancestors of the submodule will be prefixed to the
+		# submodule name, separated by a colon.
+		modpath = name.split(':')
+		# Only the ancestor (actual) module and the submodule name
+		# will be used for the filename.
+		modname = modpath[0] + separator + modpath[-1]
+		suffix = conf.env.FC_SUBMOD_SUFFIX or '.smod'
+	else:
+		modname = name
+		suffix = '.mod'
+
+	return {'lower'     :modname.lower() + suffix.lower(),
+		'lower.MOD' :modname.lower() + suffix.upper(),
+		'UPPER.mod' :modname.upper() + suffix.lower(),
+		'UPPER'     :modname.upper() + suffix.upper()}[conf.env.FC_MOD_CAPITALIZATION or 'lower']
 
 def get_fortran_tasks(tsk):
 	"""
-	Obtain all other fortran tasks from the same build group. Those tasks must not have
+	Obtains all fortran tasks from the same build group. Those tasks must not have
 	the attribute 'nomod' or 'mod_fortran_done'
+
+	:return: a list of :py:class:`waflib.Tools.fc.fc` instances
 	"""
 	bld = tsk.generator.bld
 	tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator))
@@ -50,27 +60,24 @@ def get_fortran_tasks(tsk):
 
 class fc(Task.Task):
 	"""
-	The fortran tasks can only run when all fortran tasks in the current group are ready to be executed
-	This may cause a deadlock if another fortran task is waiting for something that cannot happen (circular dependency)
-	in this case, set the 'nomod=True' on those tasks instances to break the loop
+	Fortran tasks can only run when all fortran tasks in a current task group are ready to be executed
+	This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency)
+	Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop
 	"""
-
 	color = 'GREEN'
-	run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}'
+	run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}'
 	vars = ["FORTRANMODPATHFLAG"]
 
 	def scan(self):
-		"""scanner for fortran dependencies"""
+		"""Fortran dependency scanner"""
 		tmp = fc_scan.fortran_parser(self.generator.includes_nodes)
 		tmp.task = self
 		tmp.start(self.inputs[0])
-		if Logs.verbose:
-			Logs.debug('deps: deps for %r: %r; unresolved %r' % (self.inputs, tmp.nodes, tmp.names))
 		return (tmp.nodes, tmp.names)
 
 	def runnable_status(self):
 		"""
-		Set the mod file outputs and the dependencies on the mod files over all the fortran tasks
+		Sets the mod file outputs and the dependencies on the mod files over all Fortran tasks
 		executed by the main thread so there are no concurrency issues
 		"""
 		if getattr(self, 'mod_fortran_done', None):
@@ -92,12 +99,11 @@ class fc(Task.Task):
 			ret = tsk.runnable_status()
 			if ret == Task.ASK_LATER:
 				# we have to wait for one of the other fortran tasks to be ready
-				# this may deadlock if there are dependencies between the fortran tasks
+				# this may deadlock if there are dependencies between fortran tasks
 				# but this should not happen (we are setting them here!)
 				for x in lst:
 					x.mod_fortran_done = None
 
-				# TODO sort the list of tasks in bld.producer.outstanding to put all fortran tasks at the end
 				return Task.ASK_LATER
 
 		ins = Utils.defaultdict(set)
@@ -111,7 +117,7 @@ class fc(Task.Task):
 					name = bld.modfile(x.replace('MOD@', ''))
 					node = bld.srcnode.find_or_declare(name)
 					tsk.set_outputs(node)
-					outs[id(node)].add(tsk)
+					outs[node].add(tsk)
 
 		# the .mod files to use
 		for tsk in lst:
@@ -123,12 +129,14 @@ class fc(Task.Task):
 					if node and node not in tsk.outputs:
 						if not node in bld.node_deps[key]:
 							bld.node_deps[key].append(node)
-						ins[id(node)].add(tsk)
+						ins[node].add(tsk)
 
 		# if the intersection matches, set the order
 		for k in ins.keys():
 			for a in ins[k]:
 				a.run_after.update(outs[k])
+				for x in outs[k]:
+					self.generator.bld.producer.revdeps[x].add(a)
 
 				# the scanner cannot output nodes, so we have to set them
 				# ourselves as task.dep_nodes (additional input nodes)
@@ -148,21 +156,21 @@ class fc(Task.Task):
 		return super(fc, self).runnable_status()
 
 class fcprogram(ccroot.link_task):
-	"""Link fortran programs"""
+	"""Links Fortran programs"""
 	color = 'YELLOW'
-	run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB}'
+	run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}'
 	inst_to = '${BINDIR}'
 
 class fcshlib(fcprogram):
-	"""Link fortran libraries"""
+	"""Links Fortran libraries"""
 	inst_to = '${LIBDIR}'
 
-class fcprogram_test(fcprogram):
-	"""Custom link task to obtain the compiler outputs for fortran configuration tests"""
+class fcstlib(ccroot.stlink_task):
+	"""Links Fortran static libraries (uses ar by default)"""
+	pass # do not remove the pass statement
 
-	def can_retrieve_cache(self):
-		"""This task is always executed"""
-		return False
+class fcprogram_test(fcprogram):
+	"""Custom link task to obtain compiler outputs for Fortran configuration tests"""
 
 	def runnable_status(self):
 		"""This task is always executed"""
@@ -172,12 +180,12 @@ class fcprogram_test(fcprogram):
 		return ret
 
 	def exec_command(self, cmd, **kw):
-		"""Store the compiler std our/err onto the build context, to bld.out + bld.err"""
+		"""Stores the compiler std our/err onto the build context, to bld.out + bld.err"""
 		bld = self.generator.bld
 
 		kw['shell'] = isinstance(cmd, str)
 		kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE
-		kw['cwd'] = bld.variant_dir
+		kw['cwd'] = self.get_cwd()
 		bld.out = bld.err = ''
 
 		bld.to_log('command: %s\n' % cmd)
@@ -185,15 +193,11 @@ class fcprogram_test(fcprogram):
 		kw['output'] = 0
 		try:
 			(bld.out, bld.err) = bld.cmd_and_log(cmd, **kw)
-		except Exception as e:
+		except Errors.WafError:
 			return -1
 
 		if bld.out:
-			bld.to_log("out: %s\n" % bld.out)
+			bld.to_log('out: %s\n' % bld.out)
 		if bld.err:
-			bld.to_log("err: %s\n" % bld.err)
-
-class fcstlib(ccroot.stlink_task):
-	"""Link fortran static libraries (uses ar by default)"""
-	pass # do not remove the pass statement
+			bld.to_log('err: %s\n' % bld.err)
 

+ 161 - 141
sdk/waf/waflib/Tools/fc_config.py

@@ -1,16 +1,15 @@
 #! /usr/bin/env python
 # encoding: utf-8
 # DC 2008
-# Thomas Nagy 2010 (ita)
+# Thomas Nagy 2016-2018 (ita)
 
 """
 Fortran configuration helpers
 """
 
-import re, shutil, os, sys, string, shlex
+import re, os, sys, shlex
 from waflib.Configure import conf
-from waflib.TaskGen import feature, after_method, before_method
-from waflib import Build, Utils
+from waflib.TaskGen import feature, before_method
 
 FC_FRAGMENT = '        program main\n        end     program main\n'
 FC_FRAGMENT2 = '        PROGRAM MAIN\n        END\n' # what's the actual difference between these?
@@ -18,45 +17,51 @@ FC_FRAGMENT2 = '        PROGRAM MAIN\n        END\n' # what's the actual differe
 @conf
 def fc_flags(conf):
 	"""
-	Define common fortran configuration flags and file extensions
+	Defines common fortran configuration flags and file extensions
 	"""
 	v = conf.env
 
-	v['FC_SRC_F']    = []
-	v['FC_TGT_F']    = ['-c', '-o']
-	v['FCINCPATH_ST']  = '-I%s'
-	v['FCDEFINES_ST']  = '-D%s'
+	v.FC_SRC_F    = []
+	v.FC_TGT_F    = ['-c', '-o']
+	v.FCINCPATH_ST  = '-I%s'
+	v.FCDEFINES_ST  = '-D%s'
 
-	if not v['LINK_FC']: v['LINK_FC'] = v['FC']
-	v['FCLNK_SRC_F'] = []
-	v['FCLNK_TGT_F'] = ['-o']
+	if not v.LINK_FC:
+		v.LINK_FC = v.FC
 
-	v['FCFLAGS_fcshlib']   = ['-fpic']
-	v['LINKFLAGS_fcshlib'] = ['-shared']
-	v['fcshlib_PATTERN']   = 'lib%s.so'
+	v.FCLNK_SRC_F = []
+	v.FCLNK_TGT_F = ['-o']
 
-	v['fcstlib_PATTERN']   = 'lib%s.a'
+	v.FCFLAGS_fcshlib   = ['-fpic']
+	v.LINKFLAGS_fcshlib = ['-shared']
+	v.fcshlib_PATTERN   = 'lib%s.so'
 
-	v['FCLIB_ST']       = '-l%s'
-	v['FCLIBPATH_ST']   = '-L%s'
-	v['FCSTLIB_ST']     = '-l%s'
-	v['FCSTLIBPATH_ST'] = '-L%s'
-	v['FCSTLIB_MARKER'] = '-Wl,-Bstatic'
-	v['FCSHLIB_MARKER'] = '-Wl,-Bdynamic'
+	v.fcstlib_PATTERN   = 'lib%s.a'
 
-	v['SONAME_ST']           = '-Wl,-h,%s'
+	v.FCLIB_ST       = '-l%s'
+	v.FCLIBPATH_ST   = '-L%s'
+	v.FCSTLIB_ST     = '-l%s'
+	v.FCSTLIBPATH_ST = '-L%s'
+	v.FCSTLIB_MARKER = '-Wl,-Bstatic'
+	v.FCSHLIB_MARKER = '-Wl,-Bdynamic'
+
+	v.SONAME_ST      = '-Wl,-h,%s'
 
 @conf
 def fc_add_flags(conf):
 	"""
-	FCFLAGS?
+	Adds FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env
 	"""
-	conf.add_os_flags('FCFLAGS')
-	conf.add_os_flags('LDFLAGS', 'LINKFLAGS')
+	conf.add_os_flags('FCPPFLAGS', dup=False)
+	conf.add_os_flags('FCFLAGS', dup=False)
+	conf.add_os_flags('LINKFLAGS', dup=False)
+	conf.add_os_flags('LDFLAGS', dup=False)
 
 @conf
 def check_fortran(self, *k, **kw):
-	"""See if the fortran compiler works by compiling a simple fortran program"""
+	"""
+	Compiles a Fortran program to ensure that the settings are correct
+	"""
 	self.check_cc(
 		fragment         = FC_FRAGMENT,
 		compile_filename = 'test.f',
@@ -66,8 +71,8 @@ def check_fortran(self, *k, **kw):
 @conf
 def check_fc(self, *k, **kw):
 	"""
-	Same as :py:func:`waflib.Tools.c_config.check` but default to the *Fortran* programming language
-	(Overriding the C defaults in :py:func:`waflib.Tools.c_config.validate_c` here)
+	Same as :py:func:`waflib.Tools.c_config.check` but defaults to the *Fortran* programming language
+	(this overrides the C defaults in :py:func:`waflib.Tools.c_config.validate_c`)
 	"""
 	kw['compiler'] = 'fc'
 	if not 'compile_mode' in kw:
@@ -88,35 +93,34 @@ def check_fc(self, *k, **kw):
 @conf
 def fortran_modifier_darwin(conf):
 	"""
-	Define fortran flags and extensions for the OSX systems
+	Defines Fortran flags and extensions for OSX systems
 	"""
 	v = conf.env
-	v['FCFLAGS_fcshlib']   = ['-fPIC']
-	v['LINKFLAGS_fcshlib'] = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1']
-	v['fcshlib_PATTERN']   = 'lib%s.dylib'
-	v['FRAMEWORKPATH_ST']  = '-F%s'
-	v['FRAMEWORK_ST']      = '-framework %s'
-
-	v['LINKFLAGS_fcstlib'] = []
+	v.FCFLAGS_fcshlib   = ['-fPIC']
+	v.LINKFLAGS_fcshlib = ['-dynamiclib']
+	v.fcshlib_PATTERN   = 'lib%s.dylib'
+	v.FRAMEWORKPATH_ST  = '-F%s'
+	v.FRAMEWORK_ST      = ['-framework']
 
-	v['FCSHLIB_MARKER']    = ''
-	v['FCSTLIB_MARKER']    = ''
-	v['SONAME_ST']         = ''
+	v.LINKFLAGS_fcstlib = []
 
+	v.FCSHLIB_MARKER    = ''
+	v.FCSTLIB_MARKER    = ''
+	v.SONAME_ST         = ''
 
 @conf
 def fortran_modifier_win32(conf):
-	"""Define fortran flags for the windows platforms"""
+	"""
+	Defines Fortran flags for Windows platforms
+	"""
 	v = conf.env
-	v['fcprogram_PATTERN'] = v['fcprogram_test_PATTERN']  = '%s.exe'
-
-	v['fcshlib_PATTERN']   = '%s.dll'
-	v['implib_PATTERN']    = 'lib%s.dll.a'
-	v['IMPLIB_ST']         = '-Wl,--out-implib,%s'
+	v.fcprogram_PATTERN = v.fcprogram_test_PATTERN  = '%s.exe'
 
-	v['FCFLAGS_fcshlib']   = []
+	v.fcshlib_PATTERN   = '%s.dll'
+	v.implib_PATTERN    = '%s.dll.a'
+	v.IMPLIB_ST         = '-Wl,--out-implib,%s'
 
-	v.append_value('FCFLAGS_fcshlib', ['-DDLL_EXPORT']) # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
+	v.FCFLAGS_fcshlib   = []
 
 	# Auto-import is enabled by default even without this option,
 	# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
@@ -125,27 +129,23 @@ def fortran_modifier_win32(conf):
 
 @conf
 def fortran_modifier_cygwin(conf):
-	"""Define fortran flags for use on cygwin"""
+	"""
+	Defines Fortran flags for use on cygwin
+	"""
 	fortran_modifier_win32(conf)
 	v = conf.env
-	v['fcshlib_PATTERN'] = 'cyg%s.dll'
+	v.fcshlib_PATTERN = 'cyg%s.dll'
 	v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base'])
-	v['FCFLAGS_fcshlib'] = []
+	v.FCFLAGS_fcshlib = []
+
 # ------------------------------------------------------------------------
 
 @conf
 def check_fortran_dummy_main(self, *k, **kw):
 	"""
-	Guess if a main function is needed by compiling a code snippet with
-	the C compiler and link with the Fortran compiler
-
-	TODO: (DC)
-	- handling dialects (F77, F90, etc... -> needs core support first)
-	- fix dummy main check (AC_FC_DUMMY_MAIN vs AC_FC_MAIN)
-
-	TODO: what does the above mean? (ita)
+	Determines if a main function is needed by compiling a code snippet with
+	the C compiler and linking it with the Fortran compiler (useful on unix-like systems)
 	"""
-
 	if not self.env.CC:
 		self.fatal('A c compiler is required for check_fortran_dummy_main')
 
@@ -178,12 +178,12 @@ def check_fortran_dummy_main(self, *k, **kw):
 # ------------------------------------------------------------------------
 
 GCC_DRIVER_LINE = re.compile('^Driving:')
-POSIX_STATIC_EXT = re.compile('\S+\.a')
-POSIX_LIB_FLAGS = re.compile('-l\S+')
+POSIX_STATIC_EXT = re.compile(r'\S+\.a')
+POSIX_LIB_FLAGS = re.compile(r'-l\S+')
 
 @conf
 def is_link_verbose(self, txt):
-	"""Return True if 'useful' link options can be found in txt"""
+	"""Returns True if 'useful' link options can be found in txt"""
 	assert isinstance(txt, str)
 	for line in txt.splitlines():
 		if not GCC_DRIVER_LINE.search(line):
@@ -194,18 +194,17 @@ def is_link_verbose(self, txt):
 @conf
 def check_fortran_verbose_flag(self, *k, **kw):
 	"""
-	Check what kind of verbose (-v) flag works, then set it to env.FC_VERBOSE_FLAG
+	Checks what kind of verbose (-v) flag works, then sets it to env.FC_VERBOSE_FLAG
 	"""
 	self.start_msg('fortran link verbose flag')
-	for x in ['-v', '--verbose', '-verbose', '-V']:
+	for x in ('-v', '--verbose', '-verbose', '-V'):
 		try:
 			self.check_cc(
 				features = 'fc fcprogram_test',
 				fragment = FC_FRAGMENT2,
 				compile_filename = 'test.f',
 				linkflags = [x],
-				mandatory=True
-				)
+				mandatory=True)
 		except self.errors.ConfigurationError:
 			pass
 		else:
@@ -231,7 +230,7 @@ else:
 RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED]
 
 def _match_ignore(line):
-	"""Returns True if the line should be ignored (fortran test for verbosity)."""
+	"""Returns True if the line should be ignored (Fortran verbose flag test)"""
 	for i in RLINKFLAGS_IGNORED:
 		if i.match(line):
 			return True
@@ -240,7 +239,6 @@ def _match_ignore(line):
 def parse_fortran_link(lines):
 	"""Given the output of verbose link of Fortran compiler, this returns a
 	list of flags necessary for linking using the standard linker."""
-	# TODO: On windows ?
 	final_flags = []
 	for line in lines:
 		if not GCC_DRIVER_LINE.match(line):
@@ -250,6 +248,45 @@ def parse_fortran_link(lines):
 SPACE_OPTS = re.compile('^-[LRuYz]$')
 NOSPACE_OPTS = re.compile('^-[RL]')
 
+def _parse_flink_token(lexer, token, tmp_flags):
+	# Here we go (convention for wildcard is shell, not regex !)
+	#   1 TODO: we first get some root .a libraries
+	#   2 TODO: take everything starting by -bI:*
+	#   3 Ignore the following flags: -lang* | -lcrt*.o | -lc |
+	#   -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*)
+	#   4 take into account -lkernel32
+	#   5 For options of the kind -[[LRuYz]], as they take one argument
+	#   after, the actual option is the next token
+	#   6 For -YP,*: take and replace by -Larg where arg is the old
+	#   argument
+	#   7 For -[lLR]*: take
+
+	# step 3
+	if _match_ignore(token):
+		pass
+	# step 4
+	elif token.startswith('-lkernel32') and sys.platform == 'cygwin':
+		tmp_flags.append(token)
+	# step 5
+	elif SPACE_OPTS.match(token):
+		t = lexer.get_token()
+		if t.startswith('P,'):
+			t = t[2:]
+		for opt in t.split(os.pathsep):
+			tmp_flags.append('-L%s' % opt)
+	# step 6
+	elif NOSPACE_OPTS.match(token):
+		tmp_flags.append(token)
+	# step 7
+	elif POSIX_LIB_FLAGS.match(token):
+		tmp_flags.append(token)
+	else:
+		# ignore anything not explicitly taken into account
+		pass
+
+	t = lexer.get_token()
+	return t
+
 def _parse_flink_line(line, final_flags):
 	"""private"""
 	lexer = shlex.shlex(line, posix = True)
@@ -258,45 +295,7 @@ def _parse_flink_line(line, final_flags):
 	t = lexer.get_token()
 	tmp_flags = []
 	while t:
-		def parse(token):
-			# Here we go (convention for wildcard is shell, not regex !)
-			#   1 TODO: we first get some root .a libraries
-			#   2 TODO: take everything starting by -bI:*
-			#   3 Ignore the following flags: -lang* | -lcrt*.o | -lc |
-			#   -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*)
-			#   4 take into account -lkernel32
-			#   5 For options of the kind -[[LRuYz]], as they take one argument
-			#   after, the actual option is the next token
-			#   6 For -YP,*: take and replace by -Larg where arg is the old
-			#   argument
-			#   7 For -[lLR]*: take
-
-			# step 3
-			if _match_ignore(token):
-				pass
-			# step 4
-			elif token.startswith('-lkernel32') and sys.platform == 'cygwin':
-				tmp_flags.append(token)
-			# step 5
-			elif SPACE_OPTS.match(token):
-				t = lexer.get_token()
-				if t.startswith('P,'):
-					t = t[2:]
-				for opt in t.split(os.pathsep):
-					tmp_flags.append('-L%s' % opt)
-			# step 6
-			elif NOSPACE_OPTS.match(token):
-				tmp_flags.append(token)
-			# step 7
-			elif POSIX_LIB_FLAGS.match(token):
-				tmp_flags.append(token)
-			else:
-				# ignore anything not explicitely taken into account
-				pass
-
-			t = lexer.get_token()
-			return t
-		t = parse(t)
+		t = _parse_flink_token(lexer, t, tmp_flags)
 
 	final_flags.extend(tmp_flags)
 	return final_flags
@@ -304,7 +303,7 @@ def _parse_flink_line(line, final_flags):
 @conf
 def check_fortran_clib(self, autoadd=True, *k, **kw):
 	"""
-	Obtain the flags for linking with the C library
+	Obtains the flags for linking with the C library
 	if this check works, add uselib='CLIB' to your task generators
 	"""
 	if not self.env.FC_VERBOSE_FLAG:
@@ -332,24 +331,30 @@ def check_fortran_clib(self, autoadd=True, *k, **kw):
 
 def getoutput(conf, cmd, stdin=False):
 	"""
-	TODO a bit redundant, can be removed anytime
+	Obtains Fortran command outputs
 	"""
-	if stdin:
-		stdin = Utils.subprocess.PIPE
+	from waflib import Errors
+	if conf.env.env:
+		env = conf.env.env
 	else:
-		stdin = None
-	env = conf.env.env or None
+		env = dict(os.environ)
+		env['LANG'] = 'C'
+	input = stdin and '\n'.encode() or None
 	try:
-		p = Utils.subprocess.Popen(cmd, stdin=stdin, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=env)
-		if stdin:
-			p.stdin.write('\n'.encode())
-		out, err = p.communicate()
+		out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input)
+	except Errors.WafError as e:
+		# An WafError might indicate an error code during the command
+		# execution, in this case we still obtain the stderr and stdout,
+		# which we can use to find the version string.
+		if not (hasattr(e, 'stderr') and hasattr(e, 'stdout')):
+			raise e
+		else:
+			# Ignore the return code and return the original
+			# stdout and stderr.
+			out = e.stdout
+			err = e.stderr
 	except Exception:
 		conf.fatal('could not determine the compiler version %r' % cmd)
-	if not isinstance(out, str):
-		out = out.decode(sys.stdout.encoding or 'iso8859-1')
-	if not isinstance(err, str):
-		err = err.decode(sys.stdout.encoding or 'iso8859-1')
 	return (out, err)
 
 # ------------------------------------------------------------------------
@@ -394,9 +399,9 @@ def mangling_schemes():
 	(used in check_fortran_mangling)
 	the order is tuned for gfortan
 	"""
-	for u in ['_', '']:
-		for du in ['', '_']:
-			for c in ["lower", "upper"]:
+	for u in ('_', ''):
+		for du in ('', '_'):
+			for c in ("lower", "upper"):
 				yield (u, du, c)
 
 def mangle_name(u, du, c, name):
@@ -421,13 +426,12 @@ def check_fortran_mangling(self, *k, **kw):
 	for (u, du, c) in mangling_schemes():
 		try:
 			self.check_cc(
-				compile_filename = [],
-				features         = 'link_main_routines_func',
-				msg = 'nomsg',
-				errmsg = 'nomsg',
-				mandatory=True,
-				dummy_func_nounder = mangle_name(u, du, c, "foobar"),
-				dummy_func_under   = mangle_name(u, du, c, "foo_bar"),
+				compile_filename   = [],
+				features           = 'link_main_routines_func',
+				msg                = 'nomsg',
+				errmsg             = 'nomsg',
+				dummy_func_nounder = mangle_name(u, du, c, 'foobar'),
+				dummy_func_under   = mangle_name(u, du, c, 'foo_bar'),
 				main_func_name     = self.env.FC_MAIN
 			)
 		except self.errors.ConfigurationError:
@@ -439,25 +443,27 @@ def check_fortran_mangling(self, *k, **kw):
 	else:
 		self.end_msg(False)
 		self.fatal('mangler not found')
-
 	return (u, du, c)
 
 @feature('pyext')
 @before_method('propagate_uselib_vars', 'apply_link')
 def set_lib_pat(self):
-	"""Set the fortran flags for linking with the python library"""
-	self.env['fcshlib_PATTERN'] = self.env['pyext_PATTERN']
+	"""Sets the Fortran flags for linking with Python"""
+	self.env.fcshlib_PATTERN = self.env.pyext_PATTERN
 
 @conf
 def detect_openmp(self):
-	for x in ['-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp']:
+	"""
+	Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS``
+	"""
+	for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
 		try:
 			self.check_fc(
-				msg='Checking for OpenMP flag %s' % x,
-				fragment='program main\n  call omp_get_num_threads()\nend program main',
-				fcflags=x,
-				linkflags=x,
-				uselib_store='OPENMP'
+				msg          = 'Checking for OpenMP flag %s' % x,
+				fragment     = 'program main\n  call omp_get_num_threads()\nend program main',
+				fcflags      = x,
+				linkflags    = x,
+				uselib_store = 'OPENMP'
 			)
 		except self.errors.ConfigurationError:
 			pass
@@ -466,3 +472,17 @@ def detect_openmp(self):
 	else:
 		self.fatal('Could not find OpenMP')
 
+@conf
+def check_gfortran_o_space(self):
+	if self.env.FC_NAME != 'GFORTRAN' or int(self.env.FC_VERSION[0]) > 4:
+		# This is for old compilers and only for gfortran.
+		# No idea how other implementations handle this. Be safe and bail out.
+		return
+	self.env.stash()
+	self.env.FCLNK_TGT_F = ['-o', '']
+	try:
+		self.check_fc(msg='Checking if the -o link must be split from arguments', fragment=FC_FRAGMENT, features='fc fcshlib')
+	except self.errors.ConfigurationError:
+		self.env.revert()
+	else:
+		self.env.commit()

+ 18 - 19
sdk/waf/waflib/Tools/fc_scan.py

@@ -1,31 +1,28 @@
 #! /usr/bin/env python
 # encoding: utf-8
 # DC 2008
-# Thomas Nagy 2010 (ita)
+# Thomas Nagy 2016-2018 (ita)
 
 import re
 
-from waflib import Utils, Task, TaskGen, Logs
-from waflib.TaskGen import feature, before_method, after_method, extension
-from waflib.Configure import conf
-
-INC_REGEX = """(?:^|['">]\s*;)\s*INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
-USE_REGEX = """(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
-MOD_REGEX = """(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+INC_REGEX = r"""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
+USE_REGEX = r"""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+MOD_REGEX = r"""(?:^|;)\s*MODULE(?!\s+(?:PROCEDURE|SUBROUTINE|FUNCTION))\s+(\w+)"""
+SMD_REGEX = r"""(?:^|;)\s*SUBMODULE\s*\(([\w:]+)\)\s*(\w+)"""
 
 re_inc = re.compile(INC_REGEX, re.I)
 re_use = re.compile(USE_REGEX, re.I)
 re_mod = re.compile(MOD_REGEX, re.I)
+re_smd = re.compile(SMD_REGEX, re.I)
 
 class fortran_parser(object):
 	"""
-	This parser will return:
+	This parser returns:
 
-	* the nodes corresponding to the module names that will be produced
+	* the nodes corresponding to the module names to produce
 	* the nodes corresponding to the include files used
-	* the module names used by the fortran file
+	* the module names used by the fortran files
 	"""
-
 	def __init__(self, incpaths):
 		self.seen = []
 		"""Files already parsed"""
@@ -41,7 +38,7 @@ class fortran_parser(object):
 
 	def find_deps(self, node):
 		"""
-		Parse a fortran file to read the dependencies used and provided
+		Parses a Fortran file to obtain the dependencies used/provided
 
 		:param node: fortran file to read
 		:type node: :py:class:`waflib.Node.Node`
@@ -63,11 +60,15 @@ class fortran_parser(object):
 			m = re_mod.search(line)
 			if m:
 				mods.append(m.group(1))
+			m = re_smd.search(line)
+			if m:
+				uses.append(m.group(1))
+				mods.append('{0}:{1}'.format(m.group(1),m.group(2)))
 		return (incs, uses, mods)
 
 	def start(self, node):
 		"""
-		Start the parsing. Use the stack self.waiting to hold the nodes to iterate on
+		Start parsing. Use the stack ``self.waiting`` to hold nodes to iterate on
 
 		:param node: fortran file
 		:type node: :py:class:`waflib.Node.Node`
@@ -79,10 +80,9 @@ class fortran_parser(object):
 
 	def iter(self, node):
 		"""
-		Process a single file in the search for dependencies, extract the files used
-		the modules used, and the modules provided.
+		Processes a single file during dependency parsing. Extracts files used
+		modules used and modules provided.
 		"""
-		path = node.abspath()
 		incs, uses, mods = self.find_deps(node)
 		for x in incs:
 			if x in self.seen:
@@ -102,7 +102,7 @@ class fortran_parser(object):
 
 	def tryfind_header(self, filename):
 		"""
-		Try to find an include and add it the nodes to process
+		Adds an include file to the list of nodes to process
 
 		:param filename: file name
 		:type filename: string
@@ -118,4 +118,3 @@ class fortran_parser(object):
 			if not filename in self.names:
 				self.names.append(filename)
 
-

+ 18 - 8
sdk/waf/waflib/Tools/flex.py

@@ -1,14 +1,16 @@
 #!/usr/bin/env python
 # encoding: utf-8
 # John O'Meara, 2006
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 
 """
 The **flex** program is a code generator which creates C or C++ files.
 The generated files are compiled into object files.
 """
 
-import waflib.TaskGen, os, re
+import os, re
+from waflib import Task, TaskGen
+from waflib.Tools import ccroot
 
 def decide_ext(self, node):
 	if 'cxx' in self.features:
@@ -20,12 +22,13 @@ def flexfun(tsk):
 	bld = tsk.generator.bld
 	wd = bld.variant_dir
 	def to_list(xx):
-		if isinstance(xx, str): return [xx]
+		if isinstance(xx, str):
+			return [xx]
 		return xx
 	tsk.last_cmd = lst = []
-	lst.extend(to_list(env['FLEX']))
-	lst.extend(to_list(env['FLEXFLAGS']))
-	inputs = [a.path_from(bld.bldnode) for a in tsk.inputs]
+	lst.extend(to_list(env.FLEX))
+	lst.extend(to_list(env.FLEXFLAGS))
+	inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs]
 	if env.FLEX_MSYS:
 		inputs = [x.replace(os.sep, '/') for x in inputs]
 	lst.extend(inputs)
@@ -33,13 +36,19 @@ def flexfun(tsk):
 	txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
 	tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207
 
-waflib.TaskGen.declare_chain(
+TaskGen.declare_chain(
 	name = 'flex',
 	rule = flexfun, # issue #854
 	ext_in = '.l',
 	decider = decide_ext,
 )
 
+# To support the following:
+# bld(features='c', flexflags='-P/foo')
+Task.classes['flex'].vars = ['FLEXFLAGS', 'FLEX']
+ccroot.USELIB_VARS['c'].add('FLEXFLAGS')
+ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS')
+
 def configure(conf):
 	"""
 	Detect the *flex* program
@@ -47,6 +56,7 @@ def configure(conf):
 	conf.find_program('flex', var='FLEX')
 	conf.env.FLEXFLAGS = ['-t']
 
-	if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX):
+	if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]):
 		# this is the flex shipped with MSYS
 		conf.env.FLEX_MSYS = True
+

+ 6 - 7
sdk/waf/waflib/Tools/g95.py

@@ -1,7 +1,7 @@
 #! /usr/bin/env python
 # encoding: utf-8
 # KWS 2010
-# Thomas Nagy 2010 (ita)
+# Thomas Nagy 2016-2018 (ita)
 
 import re
 from waflib import Utils
@@ -11,16 +11,15 @@ from waflib.Configure import conf
 @conf
 def find_g95(conf):
 	fc = conf.find_program('g95', var='FC')
-	fc = conf.cmd_to_list(fc)
 	conf.get_g95_version(fc)
 	conf.env.FC_NAME = 'G95'
 
 @conf
 def g95_flags(conf):
 	v = conf.env
-	v['FCFLAGS_fcshlib']   = ['-fPIC']
-	v['FORTRANMODFLAG']  = ['-fmod=', ''] # template for module path
-	v['FCFLAGS_DEBUG'] = ['-Werror'] # why not
+	v.FCFLAGS_fcshlib   = ['-fPIC']
+	v.FORTRANMODFLAG  = ['-fmod=', ''] # template for module path
+	v.FCFLAGS_DEBUG = ['-Werror'] # why not
 
 @conf
 def g95_modifier_win32(conf):
@@ -36,7 +35,7 @@ def g95_modifier_darwin(conf):
 
 @conf
 def g95_modifier_platform(conf):
-	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+	dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
 	g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None)
 	if g95_modifier_func:
 		g95_modifier_func()
@@ -55,7 +54,7 @@ def get_g95_version(conf, fc):
 	if not match:
 		conf.fatal('cannot determine g95 version')
 	k = match.groupdict()
-	conf.env['FC_VERSION'] = (k['major'], k['minor'])
+	conf.env.FC_VERSION = (k['major'], k['minor'])
 
 def configure(conf):
 	conf.find_g95()

+ 2 - 1
sdk/waf/waflib/Tools/gas.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2008-2010 (ita)
+# Thomas Nagy, 2008-2018 (ita)
 
 "Detect as/gas/gcc for compiling assembly files"
 
@@ -16,3 +16,4 @@ def configure(conf):
 	conf.env.ASLNK_TGT_F = ['-o']
 	conf.find_ar()
 	conf.load('asm')
+	conf.env.ASM_NAME = 'gas'

+ 63 - 59
sdk/waf/waflib/Tools/gcc.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 # Ralf Habacker, 2006 (rh)
 # Yinon Ehrlich, 2009
 
@@ -8,8 +8,6 @@
 gcc/llvm detection.
 """
 
-import os, sys
-from waflib import Configure, Options, Utils
 from waflib.Tools import ccroot, ar
 from waflib.Configure import conf
 
@@ -19,10 +17,8 @@ def find_gcc(conf):
 	Find the program gcc, and if present, try to detect its version number
 	"""
 	cc = conf.find_program(['gcc', 'cc'], var='CC')
-	cc = conf.cmd_to_list(cc)
 	conf.get_cc_version(cc, gcc=True)
 	conf.env.CC_NAME = 'gcc'
-	conf.env.CC      = cc
 
 @conf
 def gcc_common_flags(conf):
@@ -31,54 +27,51 @@ def gcc_common_flags(conf):
 	"""
 	v = conf.env
 
-	v['CC_SRC_F']            = []
-	v['CC_TGT_F']            = ['-c', '-o']
+	v.CC_SRC_F            = []
+	v.CC_TGT_F            = ['-c', '-o']
 
-	# linker
-	if not v['LINK_CC']: v['LINK_CC'] = v['CC']
-	v['CCLNK_SRC_F']         = []
-	v['CCLNK_TGT_F']         = ['-o']
-	v['CPPPATH_ST']          = '-I%s'
-	v['DEFINES_ST']          = '-D%s'
+	if not v.LINK_CC:
+		v.LINK_CC = v.CC
 
-	v['LIB_ST']              = '-l%s' # template for adding libs
-	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-	v['STLIB_ST']            = '-l%s'
-	v['STLIBPATH_ST']        = '-L%s'
-	v['RPATH_ST']            = '-Wl,-rpath,%s'
+	v.CCLNK_SRC_F         = []
+	v.CCLNK_TGT_F         = ['-o']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
 
-	v['SONAME_ST']           = '-Wl,-h,%s'
-	v['SHLIB_MARKER']        = '-Wl,-Bdynamic'
-	v['STLIB_MARKER']        = '-Wl,-Bstatic'
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+	v.RPATH_ST            = '-Wl,-rpath,%s'
 
-	# program
-	v['cprogram_PATTERN']    = '%s'
+	v.SONAME_ST           = '-Wl,-h,%s'
+	v.SHLIB_MARKER        = '-Wl,-Bdynamic'
+	v.STLIB_MARKER        = '-Wl,-Bstatic'
 
-	# shared librar
-	v['CFLAGS_cshlib']       = ['-fPIC']
-	v['LINKFLAGS_cshlib']    = ['-shared']
-	v['cshlib_PATTERN']      = 'lib%s.so'
+	v.cprogram_PATTERN    = '%s'
 
-	# static lib
-	v['LINKFLAGS_cstlib']    = ['-Wl,-Bstatic']
-	v['cstlib_PATTERN']      = 'lib%s.a'
+	v.CFLAGS_cshlib       = ['-fPIC']
+	v.LINKFLAGS_cshlib    = ['-shared']
+	v.cshlib_PATTERN      = 'lib%s.so'
 
-	# osx stuff
-	v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
-	v['CFLAGS_MACBUNDLE']    = ['-fPIC']
-	v['macbundle_PATTERN']   = '%s.bundle'
+	v.LINKFLAGS_cstlib    = ['-Wl,-Bstatic']
+	v.cstlib_PATTERN      = 'lib%s.a'
+
+	v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
+	v.CFLAGS_MACBUNDLE    = ['-fPIC']
+	v.macbundle_PATTERN   = '%s.bundle'
 
 @conf
 def gcc_modifier_win32(conf):
 	"""Configuration flags for executing gcc on Windows"""
 	v = conf.env
-	v['cprogram_PATTERN']    = '%s.exe'
+	v.cprogram_PATTERN    = '%s.exe'
 
-	v['cshlib_PATTERN']      = '%s.dll'
-	v['implib_PATTERN']      = 'lib%s.dll.a'
-	v['IMPLIB_ST']           = '-Wl,--out-implib,%s'
+	v.cshlib_PATTERN      = '%s.dll'
+	v.implib_PATTERN      = '%s.dll.a'
+	v.IMPLIB_ST           = '-Wl,--out-implib,%s'
 
-	v['CFLAGS_cshlib']       = []
+	v.CFLAGS_cshlib       = []
 
 	# Auto-import is enabled by default even without this option,
 	# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
@@ -90,42 +83,53 @@ def gcc_modifier_cygwin(conf):
 	"""Configuration flags for executing gcc on Cygwin"""
 	gcc_modifier_win32(conf)
 	v = conf.env
-	v['cshlib_PATTERN'] = 'cyg%s.dll'
+	v.cshlib_PATTERN = 'cyg%s.dll'
 	v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base'])
-	v['CFLAGS_cshlib'] = []
+	v.CFLAGS_cshlib = []
 
 @conf
 def gcc_modifier_darwin(conf):
 	"""Configuration flags for executing gcc on MacOS"""
 	v = conf.env
-	v['CFLAGS_cshlib']       = ['-fPIC']
-	v['LINKFLAGS_cshlib']    = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1']
-	v['cshlib_PATTERN']      = 'lib%s.dylib'
-	v['FRAMEWORKPATH_ST']    = '-F%s'
-	v['FRAMEWORK_ST']        = ['-framework']
-	v['ARCH_ST']             = ['-arch']
+	v.CFLAGS_cshlib       = ['-fPIC']
+	v.LINKFLAGS_cshlib    = ['-dynamiclib']
+	v.cshlib_PATTERN      = 'lib%s.dylib'
+	v.FRAMEWORKPATH_ST    = '-F%s'
+	v.FRAMEWORK_ST        = ['-framework']
+	v.ARCH_ST             = ['-arch']
 
-	v['LINKFLAGS_cstlib']    = []
+	v.LINKFLAGS_cstlib    = []
 
-	v['SHLIB_MARKER']        = []
-	v['STLIB_MARKER']        = []
-	v['SONAME_ST']           = []
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.SONAME_ST           = []
 
 @conf
 def gcc_modifier_aix(conf):
 	"""Configuration flags for executing gcc on AIX"""
 	v = conf.env
-	v['LINKFLAGS_cprogram']  = ['-Wl,-brtl']
-	v['LINKFLAGS_cshlib']    = ['-shared','-Wl,-brtl,-bexpfull']
-	v['SHLIB_MARKER']        = []
+	v.LINKFLAGS_cprogram  = ['-Wl,-brtl']
+	v.LINKFLAGS_cshlib    = ['-shared','-Wl,-brtl,-bexpfull']
+	v.SHLIB_MARKER        = []
 
 @conf
 def gcc_modifier_hpux(conf):
 	v = conf.env
-	v['SHLIB_MARKER']        = []
-	v['STLIB_MARKER']        = '-Bstatic'
-	v['CFLAGS_cshlib']       = ['-fPIC','-DPIC']
-	v['cshlib_PATTERN']      = 'lib%s.sl'
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.CFLAGS_cshlib       = ['-fPIC','-DPIC']
+	v.cshlib_PATTERN      = 'lib%s.sl'
+
+@conf
+def gcc_modifier_openbsd(conf):
+	conf.env.SONAME_ST = []
+
+@conf
+def gcc_modifier_osf1V(conf):
+	v = conf.env
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.SONAME_ST           = []
 
 @conf
 def gcc_modifier_platform(conf):
@@ -148,5 +152,5 @@ def configure(conf):
 	conf.cc_load_tools()
 	conf.cc_add_flags()
 	conf.link_add_flags()
-
+	conf.check_gcc_o_space()
 

+ 17 - 22
sdk/waf/waflib/Tools/gdc.py

@@ -2,51 +2,46 @@
 # encoding: utf-8
 # Carlos Rafael Giani, 2007 (dv)
 
-import sys
 from waflib.Tools import ar, d
 from waflib.Configure import conf
 
 @conf
 def find_gdc(conf):
 	"""
-	Find the program gdc and set the variable *D*
+	Finds the program gdc and set the variable *D*
 	"""
 	conf.find_program('gdc', var='D')
 
-	out = conf.cmd_and_log([conf.env.D, '--version'])
-	if out.find("gdc ") == -1:
+	out = conf.cmd_and_log(conf.env.D + ['--version'])
+	if out.find("gdc") == -1:
 		conf.fatal("detected compiler is not gdc")
 
 @conf
 def common_flags_gdc(conf):
 	"""
-	Set the flags required by *gdc*
+	Sets the flags required by *gdc*
 	"""
 	v = conf.env
 
-	# _DFLAGS _DIMPORTFLAGS
+	v.DFLAGS            = []
 
-	# for mory info about the meaning of this dict see dmd.py
-	v['DFLAGS']            = []
+	v.D_SRC_F           = ['-c']
+	v.D_TGT_F           = '-o%s'
 
-	v['D_SRC_F']           = ['-c']
-	v['D_TGT_F']           = '-o%s'
+	v.D_LINKER          = v.D
+	v.DLNK_SRC_F        = ''
+	v.DLNK_TGT_F        = '-o%s'
+	v.DINC_ST           = '-I%s'
 
-	# linker
-	v['D_LINKER']          = v['D']
-	v['DLNK_SRC_F']        = ''
-	v['DLNK_TGT_F']        = '-o%s'
-	v['DINC_ST']           = '-I%s'
+	v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
+	v.DSTLIB_ST = v.DSHLIB_ST         = '-l%s'
+	v.DSTLIBPATH_ST = v.DLIBPATH_ST   = '-L%s'
 
-	v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
-	v['DSTLIB_ST'] = v['DSHLIB_ST']         = '-l%s'
-	v['DSTLIBPATH_ST'] = v['DLIBPATH_ST']   = '-L%s'
+	v.LINKFLAGS_dshlib  = ['-shared']
 
-	v['LINKFLAGS_dshlib']  = ['-shared']
-
-	v['DHEADER_ext']       = '.di'
+	v.DHEADER_ext       = '.di'
 	v.DFLAGS_d_with_header = '-fintfc'
-	v['D_HDR_F']           = '-fintfc-file=%s'
+	v.D_HDR_F           = '-fintfc-file=%s'
 
 def configure(conf):
 	"""

+ 12 - 10
sdk/waf/waflib/Tools/gfortran.py

@@ -1,7 +1,7 @@
 #! /usr/bin/env python
 # encoding: utf-8
 # DC 2008
-# Thomas Nagy 2010 (ita)
+# Thomas Nagy 2016-2018 (ita)
 
 import re
 from waflib import Utils
@@ -13,16 +13,15 @@ def find_gfortran(conf):
 	"""Find the gfortran program (will look in the environment variable 'FC')"""
 	fc = conf.find_program(['gfortran','g77'], var='FC')
 	# (fallback to g77 for systems, where no gfortran is available)
-	fc = conf.cmd_to_list(fc)
 	conf.get_gfortran_version(fc)
 	conf.env.FC_NAME = 'GFORTRAN'
 
 @conf
 def gfortran_flags(conf):
 	v = conf.env
-	v['FCFLAGS_fcshlib']   = ['-fPIC']
-	v['FORTRANMODFLAG']  = ['-J', ''] # template for module path
-	v['FCFLAGS_DEBUG'] = ['-Werror'] # why not
+	v.FCFLAGS_fcshlib = ['-fPIC']
+	v.FORTRANMODFLAG = ['-J', ''] # template for module path
+	v.FCFLAGS_DEBUG = ['-Werror'] # why not
 
 @conf
 def gfortran_modifier_win32(conf):
@@ -38,7 +37,7 @@ def gfortran_modifier_darwin(conf):
 
 @conf
 def gfortran_modifier_platform(conf):
-	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+	dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
 	gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None)
 	if gfortran_modifier_func:
 		gfortran_modifier_func()
@@ -51,8 +50,10 @@ def get_gfortran_version(conf, fc):
 	version_re = re.compile(r"GNU\s*Fortran", re.I).search
 	cmd = fc + ['--version']
 	out, err = fc_config.getoutput(conf, cmd, stdin=False)
-	if out: match = version_re(out)
-	else: match = version_re(err)
+	if out:
+		match = version_re(out)
+	else:
+		match = version_re(err)
 	if not match:
 		conf.fatal('Could not determine the compiler type')
 
@@ -64,7 +65,7 @@ def get_gfortran_version(conf, fc):
 		conf.fatal('Could not determine the compiler type')
 
 	k = {}
-	out = out.split('\n')
+	out = out.splitlines()
 	import shlex
 
 	for line in out:
@@ -80,7 +81,7 @@ def get_gfortran_version(conf, fc):
 	def isT(var):
 		return var in k and k[var] != '0'
 
-	conf.env['FC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
+	conf.env.FC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
 
 def configure(conf):
 	conf.find_gfortran()
@@ -89,3 +90,4 @@ def configure(conf):
 	conf.fc_add_flags()
 	conf.gfortran_flags()
 	conf.gfortran_modifier_platform()
+	conf.check_gfortran_o_space()

+ 182 - 74
sdk/waf/waflib/Tools/glib2.py

@@ -1,6 +1,6 @@
 #! /usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 
 """
 Support for GLib2 tools:
@@ -8,18 +8,21 @@ Support for GLib2 tools:
 * marshal
 * enums
 * gsettings
+* gresource
 """
 
 import os
-from waflib import Task, Utils, Options, Errors, Logs
-from waflib.TaskGen import taskgen_method, before_method, after_method, feature
+import functools
+from waflib import Context, Task, Utils, Options, Errors, Logs
+from waflib.TaskGen import taskgen_method, before_method, feature, extension
+from waflib.Configure import conf
 
 ################## marshal files
 
 @taskgen_method
 def add_marshal_file(self, filename, prefix):
 	"""
-	Add a file to the list of marshal files to process. Store them in the attribute *marshal_list*.
+	Adds a file to the list of marshal files to process. Store them in the attribute *marshal_list*.
 
 	:param filename: xml file to compile
 	:type filename: string
@@ -34,8 +37,8 @@ def add_marshal_file(self, filename, prefix):
 @before_method('process_source')
 def process_marshal(self):
 	"""
-	Process the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances.
-	Add the c file created to the list of source to process.
+	Processes the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances.
+	Adds the c file created to the list of source to process.
 	"""
 	for f, prefix in getattr(self, 'marshal_list', []):
 		node = self.path.find_resource(f)
@@ -52,10 +55,11 @@ def process_marshal(self):
 	self.source.append(c_node)
 
 class glib_genmarshal(Task.Task):
-
+	vars    = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL']
+	color   = 'BLUE'
+	ext_out = ['.h']
 	def run(self):
-
-		bld = self.inputs[0].__class__.ctx
+		bld = self.generator.bld
 
 		get = self.env.get_flat
 		cmd1 = "%s %s --prefix=%s --header > %s" % (
@@ -66,7 +70,8 @@ class glib_genmarshal(Task.Task):
 		)
 
 		ret = bld.exec_command(cmd1)
-		if ret: return ret
+		if ret:
+			return ret
 
 		#print self.outputs[1].abspath()
 		c = '''#include "%s"\n''' % self.outputs[0].name
@@ -80,16 +85,12 @@ class glib_genmarshal(Task.Task):
 		)
 		return bld.exec_command(cmd2)
 
-	vars    = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL']
-	color   = 'BLUE'
-	ext_out = ['.h']
-
 ########################## glib-mkenums
 
 @taskgen_method
 def add_enums_from_template(self, source='', target='', template='', comments=''):
 	"""
-	Add a file to the list of enum files to process. Store them in the attribute *enums_list*.
+	Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
 
 	:param source: enum file to process
 	:type source: string
@@ -120,7 +121,7 @@ def add_enums(self, source='', target='',
               file_head='', file_prod='', file_tail='', enum_prod='',
               value_head='', value_prod='', value_tail='', comments=''):
 	"""
-	Add a file to the list of enum files to process. Store them in the attribute *enums_list*.
+	Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
 
 	:param source: enum file to process
 	:type source: string
@@ -154,7 +155,7 @@ def add_enums(self, source='', target='',
 @before_method('process_source')
 def process_enums(self):
 	"""
-	Process the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances.
+	Processes the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances.
 	"""
 	for enum in getattr(self, 'enums_list', []):
 		task = self.create_task('glib_mkenums')
@@ -168,7 +169,7 @@ def process_enums(self):
 			raise Errors.WafError('missing source ' + str(enum))
 		source_list = [self.path.find_resource(k) for k in source_list]
 		inputs += source_list
-		env['GLIB_MKENUMS_SOURCE'] = [k.abspath() for k in source_list]
+		env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
 
 		# find the target
 		if not enum['target']:
@@ -176,7 +177,7 @@ def process_enums(self):
 		tgt_node = self.path.find_or_declare(enum['target'])
 		if tgt_node.name.endswith('.c'):
 			self.source.append(tgt_node)
-		env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath()
+		env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
 
 
 		options = []
@@ -197,7 +198,7 @@ def process_enums(self):
 			if enum[param]:
 				options.append('%s %r' % (option, enum[param]))
 
-		env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)
+		env.GLIB_MKENUMS_OPTIONS = ' '.join(options)
 
 		# update the task instance
 		task.set_inputs(inputs)
@@ -205,7 +206,7 @@ def process_enums(self):
 
 class glib_mkenums(Task.Task):
 	"""
-	Process enum files
+	Processes enum files
 	"""
 	run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
 	color   = 'PINK'
@@ -216,7 +217,7 @@ class glib_mkenums(Task.Task):
 @taskgen_method
 def add_settings_schemas(self, filename_list):
 	"""
-	Add settings files to process, add them to *settings_schema_files*
+	Adds settings files to process to *settings_schema_files*
 
 	:param filename_list: files
 	:type filename_list: list of string
@@ -232,7 +233,7 @@ def add_settings_schemas(self, filename_list):
 @taskgen_method
 def add_settings_enums(self, namespace, filename_list):
 	"""
-	This function may be called only once by task generator to set the enums namespace.
+	Called only once by task generator to set the enums namespace.
 
 	:param namespace: namespace
 	:type namespace: string
@@ -240,31 +241,17 @@ def add_settings_enums(self, namespace, filename_list):
 	:type filename_list: file list
 	"""
 	if hasattr(self, 'settings_enum_namespace'):
-		raise Errors.WafError("Tried to add gsettings enums to '%s' more than once" % self.name)
+		raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name)
 	self.settings_enum_namespace = namespace
 
-	if type(filename_list) != 'list':
+	if not isinstance(filename_list, list):
 		filename_list = [filename_list]
 	self.settings_enum_files = filename_list
 
-
-def r_change_ext(self, ext):
-	"""
-	Change the extension from the *last* dot in the filename. The gsettings schemas
-	often have names of the form org.gsettings.test.gschema.xml
-	"""
-	name = self.name
-	k = name.rfind('.')
-	if k >= 0:
-		name = name[:k] + ext
-	else:
-		name = name + ext
-	return self.parent.find_or_declare([name])
-
 @feature('glib2')
 def process_settings(self):
 	"""
-	Process the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
+	Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
 	same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks.
 
 	"""
@@ -272,7 +259,7 @@ def process_settings(self):
 	install_files = []
 
 	settings_schema_files = getattr(self, 'settings_schema_files', [])
-	if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']:
+	if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS:
 		raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
 
 	# 1. process gsettings_enum_files (generate .enums.xml)
@@ -283,18 +270,18 @@ def process_settings(self):
 		source_list = self.settings_enum_files
 		source_list = [self.path.find_resource(k) for k in source_list]
 		enums_task.set_inputs(source_list)
-		enums_task.env['GLIB_MKENUMS_SOURCE'] = [k.abspath() for k in source_list]
+		enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
 
 		target = self.settings_enum_namespace + '.enums.xml'
 		tgt_node = self.path.find_or_declare(target)
 		enums_task.set_outputs(tgt_node)
-		enums_task.env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath()
+		enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
 		enums_tgt_node = [tgt_node]
 
-		install_files.append (tgt_node)
+		install_files.append(tgt_node)
 
 		options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead "  <@type@ id=\\"%s.@EnumName@\\">" --vprod "    <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail "  </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace)
-		enums_task.env['GLIB_MKENUMS_OPTIONS'] = options
+		enums_task.env.GLIB_MKENUMS_OPTIONS = options
 
 	# 2. process gsettings_schema_files (validate .gschema.xml files)
 	#
@@ -303,76 +290,197 @@ def process_settings(self):
 
 		schema_node = self.path.find_resource(schema)
 		if not schema_node:
-			raise Errors.WafError("Cannot find the schema file '%s'" % schema)
+			raise Errors.WafError("Cannot find the schema file %r" % schema)
 		install_files.append(schema_node)
 		source_list = enums_tgt_node + [schema_node]
 
 		schema_task.set_inputs (source_list)
-		schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS'] = [("--schema-file=" + k.abspath()) for k in source_list]
+		schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list]
 
-		target_node = r_change_ext (schema_node, '.xml.valid')
+		target_node = schema_node.change_ext('.xml.valid')
 		schema_task.set_outputs (target_node)
-		schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT'] = target_node.abspath()
+		schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath()
 
 	# 3. schemas install task
 	def compile_schemas_callback(bld):
-		if not bld.is_install: return
-		Logs.pprint ('YELLOW','Updating GSettings schema cache')
-		command = Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}", bld.env)
-		ret = self.bld.exec_command(command)
+		if not bld.is_install:
+			return
+		compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS)
+		destdir = Options.options.destdir
+		paths = bld._compile_schemas_registered
+		if destdir:
+			paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths)
+		for path in paths:
+			Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path)
+			if self.bld.exec_command(compile_schemas + [path]):
+				Logs.warn('Could not update GSettings schema cache %r' % path)
 
 	if self.bld.is_install:
-		if not self.env['GSETTINGSSCHEMADIR']:
+		schemadir = self.env.GSETTINGSSCHEMADIR
+		if not schemadir:
 			raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
 
 		if install_files:
-			self.bld.install_files (self.env['GSETTINGSSCHEMADIR'], install_files)
-
-			if not hasattr(self.bld, '_compile_schemas_registered'):
-				self.bld.add_post_fun (compile_schemas_callback)
-				self.bld._compile_schemas_registered = True
+			self.add_install_files(install_to=schemadir, install_from=install_files)
+			registered_schemas = getattr(self.bld, '_compile_schemas_registered', None)
+			if not registered_schemas:
+				registered_schemas = self.bld._compile_schemas_registered = set()
+				self.bld.add_post_fun(compile_schemas_callback)
+			registered_schemas.add(schemadir)
 
 class glib_validate_schema(Task.Task):
 	"""
-	Validate schema files
+	Validates schema files
 	"""
 	run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
 	color   = 'PINK'
 
-def configure(conf):
+################## gresource
+
+@extension('.gresource.xml')
+def process_gresource_source(self, node):
+	"""
+	Creates tasks that turn ``.gresource.xml`` files to C code
 	"""
-	Find the following programs:
+	if not self.env.GLIB_COMPILE_RESOURCES:
+		raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure")
 
-	* *glib-genmarshal* and set *GLIB_GENMARSHAL*
-	* *glib-mkenums* and set *GLIB_MKENUMS*
-	* *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory)
+	if 'gresource' in self.features:
+		return
+
+	h_node = node.change_ext('_xml.h')
+	c_node = node.change_ext('_xml.c')
+	self.create_task('glib_gresource_source', node, [h_node, c_node])
+	self.source.append(c_node)
+
+@feature('gresource')
+def process_gresource_bundle(self):
+	"""
+	Creates tasks to turn ``.gresource`` files from ``.gresource.xml`` files::
+
+		def build(bld):
+			bld(
+				features='gresource',
+				source=['resources1.gresource.xml', 'resources2.gresource.xml'],
+				install_path='${LIBDIR}/${PACKAGE}'
+			)
+
+	:param source: XML files to process
+	:type source: list of string
+	:param install_path: installation path
+	:type install_path: string
+	"""
+	for i in self.to_list(self.source):
+		node = self.path.find_resource(i)
+
+		task = self.create_task('glib_gresource_bundle', node, node.change_ext(''))
+		inst_to = getattr(self, 'install_path', None)
+		if inst_to:
+			self.add_install_files(install_to=inst_to, install_from=task.outputs)
+
+class glib_gresource_base(Task.Task):
+	"""
+	Base class for gresource based tasks
+	"""
+	color    = 'BLUE'
+	base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'
+
+	def scan(self):
+		"""
+		Scans gresource dependencies through ``glib-compile-resources --generate-dependencies command``
+		"""
+		bld = self.generator.bld
+		kw = {}
+		kw['cwd'] = self.get_cwd()
+		kw['quiet'] = Context.BOTH
+
+		cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % (
+			self.inputs[0].parent.srcpath(),
+			self.inputs[0].bld_dir(),
+			self.inputs[0].bldpath()
+		), self.env)
+
+		output = bld.cmd_and_log(cmd, **kw)
+
+		nodes = []
+		names = []
+		for dep in output.splitlines():
+			if dep:
+				node = bld.bldnode.find_node(dep)
+				if node:
+					nodes.append(node)
+				else:
+					names.append(dep)
+
+		return (nodes, names)
+
+class glib_gresource_source(glib_gresource_base):
+	"""
+	Task to generate C source code (.h and .c files) from a gresource.xml file
+	"""
+	vars    = ['GLIB_COMPILE_RESOURCES']
+	fun_h   = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}')
+	fun_c   = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}')
+	ext_out = ['.h']
 
-	And set the variable *GSETTINGSSCHEMADIR*
+	def run(self):
+		return self.fun_h[0](self) or self.fun_c[0](self)
+
+class glib_gresource_bundle(glib_gresource_base):
+	"""
+	Task to generate a .gresource binary file from a gresource.xml file
 	"""
+	run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}'
+	shell   = True # temporary workaround for #795
+
+@conf
+def find_glib_genmarshal(conf):
 	conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
-	conf.find_perl_program('glib-mkenums', var='GLIB_MKENUMS')
 
+@conf
+def find_glib_mkenums(conf):
+	conf.find_program('glib-mkenums', var='GLIB_MKENUMS')
+
+@conf
+def find_glib_compile_schemas(conf):
 	# when cross-compiling, gsettings.m4 locates the program with the following:
 	#   pkg-config --variable glib_compile_schemas gio-2.0
-	conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS', mandatory=False)
+	conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS')
 
 	def getstr(varname):
 		return getattr(Options.options, varname, getattr(conf.env,varname, ''))
 
-	# TODO make this dependent on the gnu_dirs tool?
 	gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
 	if not gsettingsschemadir:
 		datadir = getstr('DATADIR')
 		if not datadir:
-			prefix = conf.env['PREFIX']
+			prefix = conf.env.PREFIX
 			datadir = os.path.join(prefix, 'share')
 		gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas')
 
-	conf.env['GSETTINGSSCHEMADIR'] = gsettingsschemadir
+	conf.env.GSETTINGSSCHEMADIR = gsettingsschemadir
 
-def options(opt):
+@conf
+def find_glib_compile_resources(conf):
+	conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES')
+
+def configure(conf):
 	"""
-	Add the ``--gsettingsschemadir`` command-line option
+	Finds the following programs:
+
+	* *glib-genmarshal* and set *GLIB_GENMARSHAL*
+	* *glib-mkenums* and set *GLIB_MKENUMS*
+	* *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory)
+	* *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory)
 	"""
-	opt.add_option('--gsettingsschemadir', help='GSettings schema location [Default: ${datadir}/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR')
+	conf.find_glib_genmarshal()
+	conf.find_glib_mkenums()
+	conf.find_glib_compile_schemas(mandatory=False)
+	conf.find_glib_compile_resources(mandatory=False)
 
+def options(opt):
+	"""
+	Adds the ``--gsettingsschemadir`` command-line option
+	"""
+	gr = opt.add_option_group('Installation directories')
+	gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR')

+ 53 - 48
sdk/waf/waflib/Tools/gnu_dirs.py

@@ -16,60 +16,62 @@ found in autotools, and will update the environment with the following
 installation variables:
 
 ============== ========================================= =======================
-Variable       Description                               Value
+Variable       Description                               Default Value
 ============== ========================================= =======================
-PREFIX         architecture-independent files            /usr/local
-EXEC_PREFIX    architecture-dependent files              PREFIX
-BINDIR         user executables                          EXEC_PREFIX/bin
-SBINDIR        user executables                          EXEC_PREFIX/sbin
-LIBEXECDIR     program executables                       EXEC_PREFIX/libexec
-SYSCONFDIR     read-only single-machine data             PREFIX/etc
-SHAREDSTATEDIR modifiable architecture-independent data  PREFIX/com
-LOCALSTATEDIR  modifiable single-machine data            PREFIX/var
+PREFIX         installation prefix                       /usr/local
+EXEC_PREFIX    installation prefix for binaries          PREFIX
+BINDIR         user commands                             EXEC_PREFIX/bin
+SBINDIR        system binaries                           EXEC_PREFIX/sbin
+LIBEXECDIR     program-specific binaries                 EXEC_PREFIX/libexec
+SYSCONFDIR     host-specific configuration               PREFIX/etc
+SHAREDSTATEDIR architecture-independent variable data    PREFIX/com
+LOCALSTATEDIR  variable data                             PREFIX/var
 LIBDIR         object code libraries                     EXEC_PREFIX/lib
-INCLUDEDIR     C header files                            PREFIX/include
-OLDINCLUDEDIR  C header files for non-gcc                /usr/include
-DATAROOTDIR    read-only arch.-independent data root     PREFIX/share
-DATADIR        read-only architecture-independent data   DATAROOTDIR
-INFODIR        info documentation                        DATAROOTDIR/info
+INCLUDEDIR     header files                              PREFIX/include
+OLDINCLUDEDIR  header files for non-GCC compilers        /usr/include
+DATAROOTDIR    architecture-independent data root        PREFIX/share
+DATADIR        architecture-independent data             DATAROOTDIR
+INFODIR        GNU "info" documentation                  DATAROOTDIR/info
 LOCALEDIR      locale-dependent data                     DATAROOTDIR/locale
-MANDIR         man documentation                         DATAROOTDIR/man
+MANDIR         manual pages                              DATAROOTDIR/man
 DOCDIR         documentation root                        DATAROOTDIR/doc/APPNAME
-HTMLDIR        html documentation                        DOCDIR
-DVIDIR         dvi documentation                         DOCDIR
-PDFDIR         pdf documentation                         DOCDIR
-PSDIR          ps documentation                          DOCDIR
+HTMLDIR        HTML documentation                        DOCDIR
+DVIDIR         DVI documentation                         DOCDIR
+PDFDIR         PDF documentation                         DOCDIR
+PSDIR          PostScript documentation                  DOCDIR
 ============== ========================================= =======================
 """
 
-import os
+import os, re
 from waflib import Utils, Options, Context
 
-_options = [x.split(', ') for x in '''
-bindir, user executables, ${EXEC_PREFIX}/bin
-sbindir, system admin executables, ${EXEC_PREFIX}/sbin
-libexecdir, program executables, ${EXEC_PREFIX}/libexec
-sysconfdir, read-only single-machine data, ${PREFIX}/etc
-sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
-localstatedir, modifiable single-machine data, ${PREFIX}/var
-libdir, object code libraries, ${EXEC_PREFIX}/lib
-includedir, C header files, ${PREFIX}/include
-oldincludedir, C header files for non-gcc, /usr/include
-datarootdir, read-only arch.-independent data root, ${PREFIX}/share
-datadir, read-only architecture-independent data, ${DATAROOTDIR}
-infodir, info documentation, ${DATAROOTDIR}/info
+gnuopts = '''
+bindir, user commands, ${EXEC_PREFIX}/bin
+sbindir, system binaries, ${EXEC_PREFIX}/sbin
+libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec
+sysconfdir, host-specific configuration, ${PREFIX}/etc
+sharedstatedir, architecture-independent variable data, ${PREFIX}/com
+localstatedir, variable data, ${PREFIX}/var
+libdir, object code libraries, ${EXEC_PREFIX}/lib%s
+includedir, header files, ${PREFIX}/include
+oldincludedir, header files for non-GCC compilers, /usr/include
+datarootdir, architecture-independent data root, ${PREFIX}/share
+datadir, architecture-independent data, ${DATAROOTDIR}
+infodir, GNU "info" documentation, ${DATAROOTDIR}/info
 localedir, locale-dependent data, ${DATAROOTDIR}/locale
-mandir, man documentation, ${DATAROOTDIR}/man
+mandir, manual pages, ${DATAROOTDIR}/man
 docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
-htmldir, html documentation, ${DOCDIR}
-dvidir, dvi documentation, ${DOCDIR}
-pdfdir, pdf documentation, ${DOCDIR}
-psdir, ps documentation, ${DOCDIR}
-'''.split('\n') if x]
+htmldir, HTML documentation, ${DOCDIR}
+dvidir, DVI documentation, ${DOCDIR}
+pdfdir, PDF documentation, ${DOCDIR}
+psdir, PostScript documentation, ${DOCDIR}
+''' % Utils.lib64()
+
+_options = [x.split(', ') for x in gnuopts.splitlines() if x]
 
 def configure(conf):
 	"""
-	Read the command-line options to set lots of variables in *conf.env*. The variables
+	Reads the command-line options to set lots of variables in *conf.env*. The variables
 	BINDIR and LIBDIR will be overwritten.
 	"""
 	def get_param(varname, default):
@@ -94,16 +96,16 @@ def configure(conf):
 					complete = False
 
 	if not complete:
-		lst = [name for name, _, _ in _options if not env[name.upper()]]
+		lst = [x for x, _, _ in _options if not env[x.upper()]]
 		raise conf.errors.WafError('Variable substitution failure %r' % lst)
 
 def options(opt):
 	"""
-	Add lots of command-line options, for example::
+	Adds lots of command-line options, for example::
 
 		--exec-prefix: EXEC_PREFIX
 	"""
-	inst_dir = opt.add_option_group('Installation directories',
+	inst_dir = opt.add_option_group('Installation prefix',
 'By default, "waf install" will put the files in\
  "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
  than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
@@ -112,18 +114,21 @@ def options(opt):
 		option = opt.parser.get_option(k)
 		if option:
 			opt.parser.remove_option(k)
-			inst_dir.add_option(option)
+			inst_dir.add_argument(k, dest=option.dest, help=option.help, default=option.default)
 
 	inst_dir.add_option('--exec-prefix',
-		help = 'installation prefix [Default: ${PREFIX}]',
+		help = 'installation prefix for binaries [PREFIX]',
 		default = '',
 		dest = 'EXEC_PREFIX')
 
-	dirs_options = opt.add_option_group('Pre-defined installation directories', '')
+	dirs_options = opt.add_option_group('Installation directories')
 
 	for name, help, default in _options:
 		option_name = '--' + name
+
+		opt.parser.remove_option(option_name)
+
 		str_default = default
-		str_help = '%s [Default: %s]' % (help, str_default)
-		dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
+		str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default))
+		dirs_options.add_option(option_name, help=str_help, default='', dest=name)
 

+ 64 - 59
sdk/waf/waflib/Tools/gxx.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 # Ralf Habacker, 2006 (rh)
 # Yinon Ehrlich, 2009
 
@@ -8,21 +8,17 @@
 g++/llvm detection.
 """
 
-import os, sys
-from waflib import Configure, Options, Utils
 from waflib.Tools import ccroot, ar
 from waflib.Configure import conf
 
 @conf
 def find_gxx(conf):
 	"""
-	Find the program g++, and if present, try to detect its version number
+	Finds the program g++, and if present, try to detect its version number
 	"""
 	cxx = conf.find_program(['g++', 'c++'], var='CXX')
-	cxx = conf.cmd_to_list(cxx)
 	conf.get_cc_version(cxx, gcc=True)
 	conf.env.CXX_NAME = 'gcc'
-	conf.env.CXX      = cxx
 
 @conf
 def gxx_common_flags(conf):
@@ -31,54 +27,51 @@ def gxx_common_flags(conf):
 	"""
 	v = conf.env
 
-	v['CXX_SRC_F']           = []
-	v['CXX_TGT_F']           = ['-c', '-o']
+	v.CXX_SRC_F           = []
+	v.CXX_TGT_F           = ['-c', '-o']
 
-	# linker
-	if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
-	v['CXXLNK_SRC_F']        = []
-	v['CXXLNK_TGT_F']        = ['-o']
-	v['CPPPATH_ST']          = '-I%s'
-	v['DEFINES_ST']          = '-D%s'
+	if not v.LINK_CXX:
+		v.LINK_CXX = v.CXX
 
-	v['LIB_ST']              = '-l%s' # template for adding libs
-	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-	v['STLIB_ST']            = '-l%s'
-	v['STLIBPATH_ST']        = '-L%s'
-	v['RPATH_ST']            = '-Wl,-rpath,%s'
+	v.CXXLNK_SRC_F        = []
+	v.CXXLNK_TGT_F        = ['-o']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
 
-	v['SONAME_ST']           = '-Wl,-h,%s'
-	v['SHLIB_MARKER']        = '-Wl,-Bdynamic'
-	v['STLIB_MARKER']        = '-Wl,-Bstatic'
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+	v.RPATH_ST            = '-Wl,-rpath,%s'
 
-	# program
-	v['cxxprogram_PATTERN']  = '%s'
+	v.SONAME_ST           = '-Wl,-h,%s'
+	v.SHLIB_MARKER        = '-Wl,-Bdynamic'
+	v.STLIB_MARKER        = '-Wl,-Bstatic'
 
-	# shared library
-	v['CXXFLAGS_cxxshlib']   = ['-fPIC']
-	v['LINKFLAGS_cxxshlib']  = ['-shared']
-	v['cxxshlib_PATTERN']    = 'lib%s.so'
+	v.cxxprogram_PATTERN  = '%s'
 
-	# static lib
-	v['LINKFLAGS_cxxstlib']  = ['-Wl,-Bstatic']
-	v['cxxstlib_PATTERN']    = 'lib%s.a'
+	v.CXXFLAGS_cxxshlib   = ['-fPIC']
+	v.LINKFLAGS_cxxshlib  = ['-shared']
+	v.cxxshlib_PATTERN    = 'lib%s.so'
 
-	# osx stuff
-	v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
-	v['CXXFLAGS_MACBUNDLE']  = ['-fPIC']
-	v['macbundle_PATTERN']   = '%s.bundle'
+	v.LINKFLAGS_cxxstlib  = ['-Wl,-Bstatic']
+	v.cxxstlib_PATTERN    = 'lib%s.a'
+
+	v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
+	v.CXXFLAGS_MACBUNDLE  = ['-fPIC']
+	v.macbundle_PATTERN   = '%s.bundle'
 
 @conf
 def gxx_modifier_win32(conf):
 	"""Configuration flags for executing gcc on Windows"""
 	v = conf.env
-	v['cxxprogram_PATTERN']  = '%s.exe'
+	v.cxxprogram_PATTERN  = '%s.exe'
 
-	v['cxxshlib_PATTERN']    = '%s.dll'
-	v['implib_PATTERN']      = 'lib%s.dll.a'
-	v['IMPLIB_ST']           = '-Wl,--out-implib,%s'
+	v.cxxshlib_PATTERN    = '%s.dll'
+	v.implib_PATTERN      = '%s.dll.a'
+	v.IMPLIB_ST           = '-Wl,--out-implib,%s'
 
-	v['CXXFLAGS_cxxshlib']   = []
+	v.CXXFLAGS_cxxshlib   = []
 
 	# Auto-import is enabled by default even without this option,
 	# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
@@ -90,43 +83,54 @@ def gxx_modifier_cygwin(conf):
 	"""Configuration flags for executing g++ on Cygwin"""
 	gxx_modifier_win32(conf)
 	v = conf.env
-	v['cxxshlib_PATTERN']    = 'cyg%s.dll'
+	v.cxxshlib_PATTERN    = 'cyg%s.dll'
 	v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base'])
-	v['CXXFLAGS_cxxshlib']   = []
+	v.CXXFLAGS_cxxshlib   = []
 
 @conf
 def gxx_modifier_darwin(conf):
 	"""Configuration flags for executing g++ on MacOS"""
 	v = conf.env
-	v['CXXFLAGS_cxxshlib']   = ['-fPIC']
-	v['LINKFLAGS_cxxshlib']  = ['-dynamiclib', '-Wl,-compatibility_version,1', '-Wl,-current_version,1']
-	v['cxxshlib_PATTERN']    = 'lib%s.dylib'
-	v['FRAMEWORKPATH_ST']    = '-F%s'
-	v['FRAMEWORK_ST']        = ['-framework']
-	v['ARCH_ST']             = ['-arch']
+	v.CXXFLAGS_cxxshlib   = ['-fPIC']
+	v.LINKFLAGS_cxxshlib  = ['-dynamiclib']
+	v.cxxshlib_PATTERN    = 'lib%s.dylib'
+	v.FRAMEWORKPATH_ST    = '-F%s'
+	v.FRAMEWORK_ST        = ['-framework']
+	v.ARCH_ST             = ['-arch']
 
-	v['LINKFLAGS_cxxstlib']  = []
+	v.LINKFLAGS_cxxstlib  = []
 
-	v['SHLIB_MARKER']        = []
-	v['STLIB_MARKER']        = []
-	v['SONAME_ST']           = []
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.SONAME_ST           = []
 
 @conf
 def gxx_modifier_aix(conf):
 	"""Configuration flags for executing g++ on AIX"""
 	v = conf.env
-	v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl']
+	v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
 
-	v['LINKFLAGS_cxxshlib']  = ['-shared', '-Wl,-brtl,-bexpfull']
-	v['SHLIB_MARKER']        = []
+	v.LINKFLAGS_cxxshlib  = ['-shared', '-Wl,-brtl,-bexpfull']
+	v.SHLIB_MARKER        = []
 
 @conf
 def gxx_modifier_hpux(conf):
 	v = conf.env
-	v['SHLIB_MARKER']        = []
-	v['STLIB_MARKER']        = '-Bstatic'
-	v['CFLAGS_cxxshlib']     = ['-fPIC','-DPIC']
-	v['cxxshlib_PATTERN']    = 'lib%s.sl'
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.CFLAGS_cxxshlib     = ['-fPIC','-DPIC']
+	v.cxxshlib_PATTERN    = 'lib%s.sl'
+
+@conf
+def gxx_modifier_openbsd(conf):
+	conf.env.SONAME_ST = []
+
+@conf
+def gcc_modifier_osf1V(conf):
+	v = conf.env
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
+	v.SONAME_ST           = []
 
 @conf
 def gxx_modifier_platform(conf):
@@ -149,4 +153,5 @@ def configure(conf):
 	conf.cxx_load_tools()
 	conf.cxx_add_flags()
 	conf.link_add_flags()
+	conf.check_gcc_o_space('cxx')
 

+ 6 - 19
sdk/waf/waflib/Tools/icc.py

@@ -1,36 +1,23 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Stian Selnes, 2008
-# Thomas Nagy 2009-2010 (ita)
+# Stian Selnes 2008
+# Thomas Nagy 2009-2018 (ita)
 
 """
-Detect the Intel C compiler
+Detects the Intel C compiler
 """
 
-import os, sys
 from waflib.Tools import ccroot, ar, gcc
 from waflib.Configure import conf
 
 @conf
 def find_icc(conf):
 	"""
-	Find the program icc and execute it to ensure it really is icc
+	Finds the program icc and execute it to ensure it really is icc
 	"""
-	if sys.platform == 'cygwin':
-		conf.fatal('The Intel compiler does not work on Cygwin')
-
-	v = conf.env
-	cc = None
-	if v['CC']: cc = v['CC']
-	elif 'CC' in conf.environ: cc = conf.environ['CC']
-	if not cc: cc = conf.find_program('icc', var='CC')
-	if not cc: cc = conf.find_program('ICL', var='CC')
-	if not cc: conf.fatal('Intel C Compiler (icc) was not found')
-	cc = conf.cmd_to_list(cc)
-
+	cc = conf.find_program(['icx', 'icc', 'ICL'], var='CC')
 	conf.get_cc_version(cc, icc=True)
-	v['CC'] = cc
-	v['CC_NAME'] = 'icc'
+	conf.env.CC_NAME = 'icc'
 
 def configure(conf):
 	conf.find_icc()

+ 5 - 17
sdk/waf/waflib/Tools/icpc.py

@@ -1,34 +1,22 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy 2009-2010 (ita)
+# Thomas Nagy 2009-2018 (ita)
 
 """
-Detect the Intel C++ compiler
+Detects the Intel C++ compiler
 """
 
-import os, sys
 from waflib.Tools import ccroot, ar, gxx
 from waflib.Configure import conf
 
 @conf
 def find_icpc(conf):
 	"""
-	Find the program icpc, and execute it to ensure it really is icpc
+	Finds the program icpc, and execute it to ensure it really is icpc
 	"""
-	if sys.platform == 'cygwin':
-		conf.fatal('The Intel compiler does not work on Cygwin')
-
-	v = conf.env
-	cxx = None
-	if v['CXX']: cxx = v['CXX']
-	elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
-	if not cxx: cxx = conf.find_program('icpc', var='CXX')
-	if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found')
-	cxx = conf.cmd_to_list(cxx)
-
+	cxx = conf.find_program(['icpx', 'icpc'], var='CXX')
 	conf.get_cc_version(cxx, icc=True)
-	v['CXX'] = cxx
-	v['CXX_NAME'] = 'icc'
+	conf.env.CXX_NAME = 'icc'
 
 def configure(conf):
 	conf.find_icpc()

+ 387 - 26
sdk/waf/waflib/Tools/ifort.py

@@ -1,27 +1,46 @@
 #! /usr/bin/env python
 # encoding: utf-8
 # DC 2008
-# Thomas Nagy 2010 (ita)
+# Thomas Nagy 2016-2018 (ita)
 
-import re
-from waflib import Utils
-from waflib.Tools import fc, fc_config, fc_scan, ar
+import os, re, traceback
+from waflib import Utils, Logs, Errors
+from waflib.Tools import fc, fc_config, fc_scan, ar, ccroot
 from waflib.Configure import conf
+from waflib.TaskGen import after_method, feature
 
 @conf
 def find_ifort(conf):
-	fc = conf.find_program('ifort', var='FC')
-	fc = conf.cmd_to_list(fc)
+	fc = conf.find_program(['ifx', 'ifort'], var='FC')
 	conf.get_ifort_version(fc)
 	conf.env.FC_NAME = 'IFORT'
 
 @conf
-def ifort_modifier_cygwin(conf):
-	raise NotImplementedError("Ifort on cygwin not yet implemented")
+def ifort_modifier_win32(self):
+	v = self.env
+	v.IFORT_WIN32 = True
+	v.FCSTLIB_MARKER = ''
+	v.FCSHLIB_MARKER = ''
 
-@conf
-def ifort_modifier_win32(conf):
-	fc_config.fortran_modifier_win32(conf)
+	v.FCLIB_ST = v.FCSTLIB_ST = '%s.lib'
+	v.FCLIBPATH_ST = v.STLIBPATH_ST = '/LIBPATH:%s'
+	v.FCINCPATH_ST = '/I%s'
+	v.FCDEFINES_ST = '/D%s'
+
+	v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'
+	v.fcshlib_PATTERN = '%s.dll'
+	v.fcstlib_PATTERN = v.implib_PATTERN = '%s.lib'
+
+	v.FCLNK_TGT_F = '/out:'
+	v.FC_TGT_F = ['/c', '/o', '']
+	v.FCFLAGS_fcshlib = ''
+	v.LINKFLAGS_fcshlib = '/DLL'
+	v.AR_TGT_F = '/out:'
+	v.IMPLIB_ST = '/IMPLIB:%s'
+
+	v.append_value('LINKFLAGS', '/subsystem:console')
+	if v.IFORT_MANIFEST:
+		v.append_value('LINKFLAGS', ['/MANIFEST'])
 
 @conf
 def ifort_modifier_darwin(conf):
@@ -29,32 +48,374 @@ def ifort_modifier_darwin(conf):
 
 @conf
 def ifort_modifier_platform(conf):
-	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
+	dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
 	ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None)
 	if ifort_modifier_func:
 		ifort_modifier_func()
 
 @conf
 def get_ifort_version(conf, fc):
-	"""get the compiler version"""
+	"""
+	Detects the compiler version and sets ``conf.env.FC_VERSION``
+	"""
+	version_re = re.compile(r"\bIntel\b.*\bVersion\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
+	if Utils.is_win32:
+		cmd = fc
+	else:
+		cmd = fc + ['-logo']
 
-	version_re = re.compile(r"ifort\s*\(IFORT\)\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
-	cmd = fc + ['--version']
 	out, err = fc_config.getoutput(conf, cmd, stdin=False)
-	if out:
-		match = version_re(out)
-	else:
-		match = version_re(err)
+	match = version_re(out) or version_re(err)
 	if not match:
 		conf.fatal('cannot determine ifort version.')
 	k = match.groupdict()
-	conf.env['FC_VERSION'] = (k['major'], k['minor'])
+	conf.env.FC_VERSION = (k['major'], k['minor'])
 
 def configure(conf):
-	conf.find_ifort()
-	conf.find_program('xiar', var='AR')
-	conf.env.ARFLAGS = 'rcs'
-	conf.fc_flags()
-	conf.fc_add_flags()
-	conf.ifort_modifier_platform()
+	"""
+	Detects the Intel Fortran compilers
+	"""
+	if Utils.is_win32:
+		compiler, version, path, includes, libdirs, arch = conf.detect_ifort()
+		v = conf.env
+		v.DEST_CPU = arch
+		v.PATH = path
+		v.INCLUDES = includes
+		v.LIBPATH = libdirs
+		v.MSVC_COMPILER = compiler
+		try:
+			v.MSVC_VERSION = float(version)
+		except ValueError:
+			v.MSVC_VERSION = float(version[:-3])
+
+		conf.find_ifort_win32()
+		conf.ifort_modifier_win32()
+	else:
+		conf.find_ifort()
+		conf.find_program('xiar', var='AR')
+		conf.find_ar()
+		conf.fc_flags()
+		conf.fc_add_flags()
+		conf.ifort_modifier_platform()
+
+
+all_ifort_platforms = [('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86')]
+"""List of icl platforms"""
+
+@conf
+def gather_ifort_versions(conf, versions):
+	"""
+	List compiler versions by looking up registry keys
+	"""
+	version_pattern = re.compile(r'^...?.?\....?.?')
+	try:
+		all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran')
+	except OSError:
+		try:
+			all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran')
+		except OSError:
+			return
+	index = 0
+	while 1:
+		try:
+			version = Utils.winreg.EnumKey(all_versions, index)
+		except OSError:
+			break
+		index += 1
+		if not version_pattern.match(version):
+			continue
+		targets = {}
+		for target,arch in all_ifort_platforms:
+			if target=='intel64':
+				targetDir='EM64T_NATIVE'
+			else:
+				targetDir=target
+			try:
+				Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
+				icl_version=Utils.winreg.OpenKey(all_versions,version)
+				path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
+				pass
+			else:
+				batch_file = os.path.join(path, 'bin', 'ifortvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+				else:
+					batch_file = os.path.join(path, 'env', 'vars.bat')
+					if os.path.isfile(batch_file):
+						targets[target] = target_compiler(conf, 'oneapi', arch, version, target, batch_file)
+
+		for target,arch in all_ifort_platforms:
+			try:
+				icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
+				path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+			except OSError:
+				continue
+			else:
+				batch_file=os.path.join(path,'bin','ifortvars.bat')
+				if os.path.isfile(batch_file):
+					targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
+		major = version[0:2]
+		versions['intel ' + major] = targets
+
+@conf
+def setup_ifort(conf, versiondict):
+	"""
+	Checks installed compilers and targets and returns the first combination from the user's
+	options, env, or the global supported lists that checks.
+
+	:param versiondict: dict(platform -> dict(architecture -> configuration))
+	:type versiondict: dict(string -> dict(string -> target_compiler)
+	:return: the compiler, revision, path, include dirs, library paths and target architecture
+	:rtype: tuple of strings
+	"""
+	platforms = Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_ifort_platforms]
+	desired_versions = conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys())))
+	for version in desired_versions:
+		try:
+			targets = versiondict[version]
+		except KeyError:
+			continue
+		for arch in platforms:
+			try:
+				cfg = targets[arch]
+			except KeyError:
+				continue
+			cfg.evaluate()
+			if cfg.is_valid:
+				compiler,revision = version.rsplit(' ', 1)
+				return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
+	conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))
+
+@conf
+def get_ifort_version_win32(conf, compiler, version, target, vcvars):
+	# FIXME hack
+	try:
+		conf.msvc_cnt += 1
+	except AttributeError:
+		conf.msvc_cnt = 1
+	batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
+	batfile.write("""@echo off
+set INCLUDE=
+set LIB=
+call "%s" %s
+echo PATH=%%PATH%%
+echo INCLUDE=%%INCLUDE%%
+echo LIB=%%LIB%%;%%LIBPATH%%
+""" % (vcvars,target))
+	sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()])
+	batfile.delete()
+	lines = sout.splitlines()
+
+	if not lines[0]:
+		lines.pop(0)
+
+	MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
+	for line in lines:
+		if line.startswith('PATH='):
+			path = line[5:]
+			MSVC_PATH = path.split(';')
+		elif line.startswith('INCLUDE='):
+			MSVC_INCDIR = [i for i in line[8:].split(';') if i]
+		elif line.startswith('LIB='):
+			MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
+	if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
+		conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)')
+
+	# Check if the compiler is usable at all.
+	# The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
+	env = dict(os.environ)
+	env.update(PATH = path)
+	compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+	fc = conf.find_program(compiler_name, path_list=MSVC_PATH)
+
+	# delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
+	if 'CL' in env:
+		del(env['CL'])
+
+	try:
+		conf.cmd_and_log(fc + ['/help'], env=env)
+	except UnicodeError:
+		st = traceback.format_exc()
+		if conf.logger:
+			conf.logger.error(st)
+		conf.fatal('ifort: Unicode error - check the code page?')
+	except Exception as e:
+		Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e))
+		conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)')
+	else:
+		Logs.debug('ifort: get_ifort_version: %r %r %r -> OK', compiler, version, target)
+	finally:
+		conf.env[compiler_name] = ''
+
+	return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
+
+class target_compiler(object):
+	"""
+	Wraps a compiler configuration; call evaluate() to determine
+	whether the configuration is usable.
+	"""
+	def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None):
+		"""
+		:param ctx: configuration context to use to eventually get the version environment
+		:param compiler: compiler name
+		:param cpu: target cpu
+		:param version: compiler version number
+		:param bat_target: ?
+		:param bat: path to the batch file to run
+		:param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths)
+		"""
+		self.conf = ctx
+		self.name = None
+		self.is_valid = False
+		self.is_done = False
+
+		self.compiler = compiler
+		self.cpu = cpu
+		self.version = version
+		self.bat_target = bat_target
+		self.bat = bat
+		self.callback = callback
+
+	def evaluate(self):
+		if self.is_done:
+			return
+		self.is_done = True
+		try:
+			vs = self.conf.get_ifort_version_win32(self.compiler, self.version, self.bat_target, self.bat)
+		except Errors.ConfigurationError:
+			self.is_valid = False
+			return
+		if self.callback:
+			vs = self.callback(self, vs)
+		self.is_valid = True
+		(self.bindirs, self.incdirs, self.libdirs) = vs
+
+	def __str__(self):
+		return str((self.bindirs, self.incdirs, self.libdirs))
+
+	def __repr__(self):
+		return repr((self.bindirs, self.incdirs, self.libdirs))
+
+@conf
+def detect_ifort(self):
+	return self.setup_ifort(self.get_ifort_versions(False))
+
+@conf
+def get_ifort_versions(self, eval_and_save=True):
+	"""
+	:return: platforms to compiler configurations
+	:rtype: dict
+	"""
+	dct = {}
+	self.gather_ifort_versions(dct)
+	return dct
+
+def _get_prog_names(self, compiler):
+	if compiler == 'oneapi':
+		compiler_name = 'ifx'
+		linker_name = 'XILINK'
+		lib_name = 'XILIB'
+	elif compiler == 'intel':
+		compiler_name = 'ifort'
+		linker_name = 'XILINK'
+		lib_name = 'XILIB'
+	else:
+		# assumes CL.exe
+		compiler_name = 'CL'
+		linker_name = 'LINK'
+		lib_name = 'LIB'
+	return compiler_name, linker_name, lib_name
+
+@conf
+def find_ifort_win32(conf):
+	# the autodetection is supposed to be performed before entering in this method
+	v = conf.env
+	path = v.PATH
+	compiler = v.MSVC_COMPILER
+	version = v.MSVC_VERSION
+
+	compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
+	v.IFORT_MANIFEST = (compiler == 'intel' and version >= 11)
+
+	# compiler
+	fc = conf.find_program(compiler_name, var='FC', path_list=path)
+
+	# before setting anything, check if the compiler is really intel fortran
+	env = dict(conf.environ)
+	if path:
+		env.update(PATH = ';'.join(path))
+	if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env):
+		conf.fatal('not intel fortran compiler could not be identified')
+
+	v.FC_NAME = 'IFORT'
+
+	if not v.LINK_FC:
+		conf.find_program(linker_name, var='LINK_FC', path_list=path, mandatory=True)
+
+	if not v.AR:
+		conf.find_program(lib_name, path_list=path, var='AR', mandatory=True)
+		v.ARFLAGS = ['/nologo']
+
+	# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
+	if v.IFORT_MANIFEST:
+		conf.find_program('MT', path_list=path, var='MT')
+		v.MTFLAGS = ['/nologo']
+
+	try:
+		conf.load('winres')
+	except Errors.WafError:
+		Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+
+#######################################################################################################
+##### conf above, build below
+
+@after_method('apply_link')
+@feature('fc')
+def apply_flags_ifort(self):
+	"""
+	Adds additional flags implied by msvc, such as subsystems and pdb files::
+
+		def build(bld):
+			bld.stlib(source='main.c', target='bar', subsystem='gruik')
+	"""
+	if not self.env.IFORT_WIN32 or not getattr(self, 'link_task', None):
+		return
+
+	is_static = isinstance(self.link_task, ccroot.stlink_task)
+
+	subsystem = getattr(self, 'subsystem', '')
+	if subsystem:
+		subsystem = '/subsystem:%s' % subsystem
+		flags = is_static and 'ARFLAGS' or 'LINKFLAGS'
+		self.env.append_value(flags, subsystem)
+
+	if not is_static:
+		for f in self.env.LINKFLAGS:
+			d = f.lower()
+			if d[1:] == 'debug':
+				pdbnode = self.link_task.outputs[0].change_ext('.pdb')
+				self.link_task.outputs.append(pdbnode)
+
+				if getattr(self, 'install_task', None):
+					self.pdb_install_task = self.add_install_files(install_to=self.install_task.install_to, install_from=pdbnode)
+
+				break
+
+@feature('fcprogram', 'fcshlib', 'fcprogram_test')
+@after_method('apply_link')
+def apply_manifest_ifort(self):
+	"""
+	Enables manifest embedding in Fortran DLLs when using ifort on Windows
+	See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
+	"""
+	if self.env.IFORT_WIN32 and getattr(self, 'link_task', None):
+		# it seems ifort.exe cannot be called for linking
+		self.link_task.env.FC = self.env.LINK_FC
+
+	if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self, 'link_task', None):
+		out_node = self.link_task.outputs[0]
+		man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
+		self.link_task.outputs.append(man_node)
+		self.env.DO_MANIFEST = True
 

+ 107 - 54
sdk/waf/waflib/Tools/intltool.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 
 """
 Support for translation tools such as msgfmt and intltool
@@ -18,7 +18,8 @@ Usage::
 		bld(
 			features  = "intltool_in",
 			podir     = "../po",
-			flags     = ["-d", "-q", "-u", "-c"],
+			style     = "desktop",
+			flags     = ["-u"],
 			source    = 'kupfer.desktop.in',
 			install_path = "${DATADIR}/applications",
 		)
@@ -27,22 +28,47 @@ Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
 """
 
 import os, re
-from waflib import Configure, TaskGen, Task, Utils, Runner, Options, Build, Logs
+from waflib import Context, Task, Utils, Logs
 import waflib.Tools.ccroot
-from waflib.TaskGen import feature, before_method
+from waflib.TaskGen import feature, before_method, taskgen_method
 from waflib.Logs import error
+from waflib.Configure import conf
+
+_style_flags = {
+	'ba': '-b',
+	'desktop': '-d',
+	'keys': '-k',
+	'quoted': '--quoted-style',
+	'quotedxml': '--quotedxml-style',
+	'rfc822deb': '-r',
+	'schemas': '-s',
+	'xml': '-x',
+}
+
+@taskgen_method
+def ensure_localedir(self):
+	"""
+	Expands LOCALEDIR from DATAROOTDIR/locale if possible, or falls back to PREFIX/share/locale
+	"""
+	# use the tool gnu_dirs to provide options to define this
+	if not self.env.LOCALEDIR:
+		if self.env.DATAROOTDIR:
+			self.env.LOCALEDIR = os.path.join(self.env.DATAROOTDIR, 'locale')
+		else:
+			self.env.LOCALEDIR = os.path.join(self.env.PREFIX, 'share', 'locale')
 
 @before_method('process_source')
 @feature('intltool_in')
 def apply_intltool_in_f(self):
 	"""
-	Create tasks to translate files by intltool-merge::
+	Creates tasks to translate files by intltool-merge::
 
 		def build(bld):
 			bld(
 				features  = "intltool_in",
 				podir     = "../po",
-				flags     = ["-d", "-q", "-u", "-c"],
+				style     = "desktop",
+				flags     = ["-u"],
 				source    = 'kupfer.desktop.in',
 				install_path = "${DATADIR}/applications",
 			)
@@ -51,40 +77,57 @@ def apply_intltool_in_f(self):
 	:type podir: string
 	:param source: source files to process
 	:type source: list of string
+	:param style: the intltool-merge mode of operation, can be one of the following values:
+	  ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``.
+	  See the ``intltool-merge`` man page for more information about supported modes of operation.
+	:type style: string
 	:param flags: compilation flags ("-quc" by default)
 	:type flags: list of string
 	:param install_path: installation path
 	:type install_path: string
 	"""
-	try: self.meths.remove('process_source')
-	except ValueError: pass
+	try:
+		self.meths.remove('process_source')
+	except ValueError:
+		pass
 
-	if not self.env.LOCALEDIR:
-		self.env.LOCALEDIR = self.env.PREFIX + '/share/locale'
+	self.ensure_localedir()
 
-	for i in self.to_list(self.source):
-		node = self.path.find_resource(i)
+	podir = getattr(self, 'podir', '.')
+	podirnode = self.path.find_dir(podir)
+	if not podirnode:
+		error("could not find the podir %r" % podir)
+		return
 
-		podir = getattr(self, 'podir', 'po')
-		podirnode = self.path.find_dir(podir)
-		if not podirnode:
-			error("could not find the podir %r" % podir)
-			continue
+	cache = getattr(self, 'intlcache', '.intlcache')
+	self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)]
+	self.env.INTLPODIR = podirnode.bldpath()
+	self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT))
 
-		cache = getattr(self, 'intlcache', '.intlcache')
-		self.env['INTLCACHE'] = os.path.join(self.path.bldpath(), podir, cache)
-		self.env['INTLPODIR'] = podirnode.bldpath()
-		self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
+	if '-c' in self.env.INTLFLAGS:
+		self.bld.fatal('Redundant -c flag in intltool task %r' % self)
+
+	style = getattr(self, 'style', None)
+	if style:
+		try:
+			style_flag = _style_flags[style]
+		except KeyError:
+			self.bld.fatal('intltool_in style "%s" is not valid' % style)
+
+		self.env.append_unique('INTLFLAGS', [style_flag])
+
+	for i in self.to_list(self.source):
+		node = self.path.find_resource(i)
 
 		task = self.create_task('intltool', node, node.change_ext(''))
-		inst = getattr(self, 'install_path', '${LOCALEDIR}')
+		inst = getattr(self, 'install_path', None)
 		if inst:
-			self.bld.install_files(inst, task.outputs)
+			self.add_install_files(install_to=inst, install_from=task.outputs)
 
 @feature('intltool_po')
 def apply_intltool_po(self):
 	"""
-	Create tasks to process po files::
+	Creates tasks to process po files::
 
 		def build(bld):
 			bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
@@ -100,26 +143,26 @@ def apply_intltool_po(self):
 
 	The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
 	"""
-	try: self.meths.remove('process_source')
-	except ValueError: pass
+	try:
+		self.meths.remove('process_source')
+	except ValueError:
+		pass
 
-	if not self.env.LOCALEDIR:
-		self.env.LOCALEDIR = self.env.PREFIX + '/share/locale'
+	self.ensure_localedir()
 
-	appname = getattr(self, 'appname', 'set_your_app_name')
-	podir = getattr(self, 'podir', '')
+	appname = getattr(self, 'appname', getattr(Context.g_module, Context.APPNAME, 'set_your_app_name'))
+	podir = getattr(self, 'podir', '.')
 	inst = getattr(self, 'install_path', '${LOCALEDIR}')
 
 	linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
 	if linguas:
 		# scan LINGUAS file for locales to process
-		file = open(linguas.abspath())
-		langs = []
-		for line in file.readlines():
-			# ignore lines containing comments
-			if not line.startswith('#'):
-				langs += line.split()
-		file.close()
+		with open(linguas.abspath()) as f:
+			langs = []
+			for line in f.readlines():
+				# ignore lines containing comments
+				if not line.startswith('#'):
+					langs += line.split()
 		re_linguas = re.compile('[-a-zA-Z_@.]+')
 		for lang in langs:
 			# Make sure that we only process lines which contain locales
@@ -131,46 +174,56 @@ def apply_intltool_po(self):
 					filename = task.outputs[0].name
 					(langname, ext) = os.path.splitext(filename)
 					inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
-					self.bld.install_as(inst_file, task.outputs[0], chmod=getattr(self, 'chmod', Utils.O644), env=task.env)
+					self.add_install_as(install_to=inst_file, install_from=task.outputs[0],
+						chmod=getattr(self, 'chmod', Utils.O644))
 
 	else:
 		Logs.pprint('RED', "Error no LINGUAS file found in po directory")
 
 class po(Task.Task):
 	"""
-	Compile .po files into .gmo files
+	Compiles .po files into .gmo files
 	"""
 	run_str = '${MSGFMT} -o ${TGT} ${SRC}'
 	color   = 'BLUE'
 
 class intltool(Task.Task):
 	"""
-	Let intltool-merge translate an input file
+	Calls intltool-merge to update translation files
 	"""
-	run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
+	run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
 	color   = 'BLUE'
 
+@conf
+def find_msgfmt(conf):
+	"""
+	Detects msgfmt and sets the ``MSGFMT`` variable
+	"""
+	conf.find_program('msgfmt', var='MSGFMT')
+
+@conf
+def find_intltool_merge(conf):
+	"""
+	Detects intltool-merge
+	"""
+	if not conf.env.PERL:
+		conf.find_program('perl', var='PERL')
+	conf.env.INTLCACHE_ST = '--cache=%s'
+	conf.env.INTLFLAGS_DEFAULT = ['-q', '-u']
+	conf.find_program('intltool-merge', interpreter='PERL', var='INTLTOOL')
+
 def configure(conf):
 	"""
-	Detect the program *msgfmt* and set *conf.env.MSGFMT*.
-	Detect the program *intltool-merge* and set *conf.env.INTLTOOL*.
+	Detects the program *msgfmt* and set *conf.env.MSGFMT*.
+	Detects the program *intltool-merge* and set *conf.env.INTLTOOL*.
 	It is possible to set INTLTOOL in the environment, but it must not have spaces in it::
 
 		$ INTLTOOL="/path/to/the program/intltool" waf configure
 
 	If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*.
 	"""
-	conf.find_program('msgfmt', var='MSGFMT')
-	conf.find_perl_program('intltool-merge', var='INTLTOOL')
-
-	prefix  = conf.env.PREFIX
-	datadir = conf.env.DATADIR
-	if not datadir:
-		datadir = os.path.join(prefix,'share')
-
-	conf.define('LOCALEDIR', os.path.join(datadir, 'locale').replace('\\', '\\\\'))
-	conf.define('DATADIR', datadir.replace('\\', '\\\\'))
-
+	conf.find_msgfmt()
+	conf.find_intltool_merge()
 	if conf.env.CC or conf.env.CXX:
 		conf.check(header_name='locale.h')
 

+ 22 - 31
sdk/waf/waflib/Tools/irixcc.py

@@ -1,60 +1,51 @@
 #! /usr/bin/env python
+# encoding: utf-8
 # imported from samba
 
 """
-compiler definition for irix/MIPSpro cc compiler
-based on suncc.py from waf
+Compiler definition for irix/MIPSpro cc compiler
 """
 
-import os
-from waflib import Utils
+from waflib import Errors
 from waflib.Tools import ccroot, ar
 from waflib.Configure import conf
 
 @conf
 def find_irixcc(conf):
 	v = conf.env
-	cc = None
-	if v['CC']: cc = v['CC']
-	elif 'CC' in conf.environ: cc = conf.environ['CC']
-	if not cc: cc = conf.find_program('cc', var='CC')
-	if not cc: conf.fatal('irixcc was not found')
-	cc = conf.cmd_to_list(cc)
-
+	cc = conf.find_program('cc', var='CC')
 	try:
 		conf.cmd_and_log(cc + ['-version'])
-	except Exception:
+	except Errors.WafError:
 		conf.fatal('%r -version could not be executed' % cc)
-
-	v['CC']  = cc
-	v['CC_NAME'] = 'irix'
+	v.CC_NAME = 'irix'
 
 @conf
 def irixcc_common_flags(conf):
 	v = conf.env
 
-	v['CC_SRC_F']            = ''
-	v['CC_TGT_F']            = ['-c', '-o']
-	v['CPPPATH_ST']          = '-I%s'
-	v['DEFINES_ST']          = '-D%s'
+	v.CC_SRC_F            = ''
+	v.CC_TGT_F            = ['-c', '-o']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
+
+	if not v.LINK_CC:
+		v.LINK_CC = v.CC
 
-	# linker
-	if not v['LINK_CC']: v['LINK_CC'] = v['CC']
-	v['CCLNK_SRC_F']         = ''
-	v['CCLNK_TGT_F']         = ['-o']
+	v.CCLNK_SRC_F         = ''
+	v.CCLNK_TGT_F         = ['-o']
 
-	v['LIB_ST']              = '-l%s' # template for adding libs
-	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-	v['STLIB_ST']            = '-l%s'
-	v['STLIBPATH_ST']        = '-L%s'
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
 
-	v['cprogram_PATTERN']     = '%s'
-	v['cshlib_PATTERN']       = 'lib%s.so'
-	v['cstlib_PATTERN']      = 'lib%s.a'
+	v.cprogram_PATTERN    = '%s'
+	v.cshlib_PATTERN      = 'lib%s.so'
+	v.cstlib_PATTERN      = 'lib%s.a'
 
 def configure(conf):
 	conf.find_irixcc()
-	conf.find_cpp()
 	conf.find_ar()
 	conf.irixcc_common_flags()
 	conf.cc_load_tools()

+ 235 - 113
sdk/waf/waflib/Tools/javaw.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 
 """
 Java support
@@ -24,17 +24,99 @@ You would have to run::
    java -jar /path/to/jython.jar waf configure
 
 [1] http://www.jython.org/
+
+Usage
+=====
+
+Load the "java" tool.
+
+def configure(conf):
+	conf.load('java')
+
+Java tools will be autodetected and eventually, if present, the quite
+standard JAVA_HOME environment variable will be used. The also standard
+CLASSPATH variable is used for library searching.
+
+In configuration phase checks can be done on the system environment, for
+example to check if a class is known in the classpath::
+
+	conf.check_java_class('java.io.FileOutputStream')
+
+or if the system supports JNI applications building::
+
+	conf.check_jni_headers()
+
+
+The java tool supports compiling java code, creating jar files and
+creating javadoc documentation. This can be either done separately or
+together in a single definition. For example to manage them separately::
+
+	bld(features  = 'javac',
+		srcdir    = 'src',
+		compat    = '1.7',
+		use       = 'animals',
+		name      = 'cats-src',
+	)
+
+	bld(features  = 'jar',
+		basedir   = '.',
+		destfile  = '../cats.jar',
+		name      = 'cats',
+		use       = 'cats-src'
+	)
+
+
+Or together by defining all the needed attributes::
+
+	bld(features   = 'javac jar javadoc',
+		srcdir     = 'src/',  # folder containing the sources to compile
+		outdir     = 'src',   # folder where to output the classes (in the build directory)
+		compat     = '1.6',   # java compatibility version number
+		classpath  = ['.', '..'],
+
+		# jar
+		basedir    = 'src', # folder containing the classes and other files to package (must match outdir)
+		destfile   = 'foo.jar', # do not put the destfile in the folder of the java classes!
+		use        = 'NNN',
+		jaropts    = ['-C', 'default/src/', '.'], # can be used to give files
+		manifest   = 'src/Manifest.mf', # Manifest file to include
+
+		# javadoc
+		javadoc_package = ['com.meow' , 'com.meow.truc.bar', 'com.meow.truc.foo'],
+		javadoc_output  = 'javadoc',
+	)
+
+External jar dependencies can be mapped to a standard waf "use" dependency by
+setting an environment variable with a CLASSPATH prefix in the configuration,
+for example::
+
+	conf.env.CLASSPATH_NNN = ['aaaa.jar', 'bbbb.jar']
+
+and then NNN can be freely used in rules as::
+
+	use        = 'NNN',
+
+In the java tool the dependencies via use are not transitive by default, as
+this necessity depends on the code. To enable recursive dependency scanning
+use on a specific rule:
+
+		recurse_use = True
+
+Or build-wise by setting RECURSE_JAVA:
+
+		bld.env.RECURSE_JAVA = True
+
+Unit tests can be integrated in the waf unit test environment using the javatest extra.
 """
 
-import os, re, tempfile, shutil
-from waflib import TaskGen, Task, Utils, Options, Build, Errors, Node, Logs
+import os, shutil
+from waflib import Task, Utils, Errors, Node
 from waflib.Configure import conf
-from waflib.TaskGen import feature, before_method, after_method
+from waflib.TaskGen import feature, before_method, after_method, taskgen_method
 
 from waflib.Tools import ccroot
 ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS'])
 
-
 SOURCE_RE = '**/*.java'
 JAR_RE = '**/*'
 
@@ -69,8 +151,6 @@ def apply_java(self):
 		sourcepath='.', srcdir='.',
 		jar_mf_attributes={}, jar_mf_classpath=[])
 
-	nodes_lst = []
-
 	outdir = getattr(self, 'outdir', None)
 	if outdir:
 		if not isinstance(outdir, Node.Node):
@@ -79,7 +159,7 @@ def apply_java(self):
 		outdir = self.path.get_bld()
 	outdir.mkdir()
 	self.outdir = outdir
-	self.env['OUTDIR'] = outdir.abspath()
+	self.env.OUTDIR = outdir.abspath()
 
 	self.javac_task = tsk = self.create_task('javac')
 	tmp = []
@@ -95,10 +175,11 @@ def apply_java(self):
 			if not y:
 				self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
 		tmp.append(y)
+
 	tsk.srcdir = tmp
 
 	if getattr(self, 'compat', None):
-		tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])
+		tsk.env.append_value('JAVACFLAGS', ['-source', str(self.compat)])
 
 	if hasattr(self, 'sourcepath'):
 		fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
@@ -109,36 +190,86 @@ def apply_java(self):
 	if names:
 		tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
 
+
+@taskgen_method
+def java_use_rec(self, name, **kw):
+	"""
+	Processes recursively the *use* attribute for each referred java compilation
+	"""
+	if name in self.tmp_use_seen:
+		return
+
+	self.tmp_use_seen.append(name)
+
+	try:
+		y = self.bld.get_tgen_by_name(name)
+	except Errors.WafError:
+		self.uselib.append(name)
+		return
+	else:
+		y.post()
+		# Add generated JAR name for CLASSPATH. Task ordering (set_run_after)
+		# is already guaranteed by ordering done between the single tasks
+		if hasattr(y, 'jar_task'):
+			self.use_lst.append(y.jar_task.outputs[0].abspath())
+		else:
+			if hasattr(y,'outdir'):
+				self.use_lst.append(y.outdir.abspath())
+			else:
+				self.use_lst.append(y.path.get_bld().abspath())
+
+	for x in self.to_list(getattr(y, 'use', [])):
+		self.java_use_rec(x)
+
 @feature('javac')
+@before_method('propagate_uselib_vars')
 @after_method('apply_java')
 def use_javac_files(self):
 	"""
-	Process the *use* attribute referring to other java compilations
+	Processes the *use* attribute referring to other java compilations
 	"""
-	lst = []
+	self.use_lst = []
+	self.tmp_use_seen = []
 	self.uselib = self.to_list(getattr(self, 'uselib', []))
 	names = self.to_list(getattr(self, 'use', []))
 	get = self.bld.get_tgen_by_name
 	for x in names:
 		try:
-			y = get(x)
-		except Exception:
+			tg = get(x)
+		except Errors.WafError:
 			self.uselib.append(x)
 		else:
-			y.post()
-			lst.append(y.jar_task.outputs[0].abspath())
-			self.javac_task.set_run_after(y.jar_task)
+			tg.post()
+			if hasattr(tg, 'jar_task'):
+				self.use_lst.append(tg.jar_task.outputs[0].abspath())
+				self.javac_task.set_run_after(tg.jar_task)
+				self.javac_task.dep_nodes.extend(tg.jar_task.outputs)
+			else:
+				if hasattr(tg, 'outdir'):
+					base_node = tg.outdir
+				else:
+					base_node = tg.path.get_bld()
+
+				self.use_lst.append(base_node.abspath())
+				self.javac_task.dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
+
+				for tsk in tg.tasks:
+					self.javac_task.set_run_after(tsk)
 
-	if lst:
-		self.env.append_value('CLASSPATH', lst)
+		# If recurse use scan is enabled recursively add use attribute for each used one
+		if getattr(self, 'recurse_use', False) or self.bld.env.RECURSE_JAVA:
+			self.java_use_rec(x)
+
+	self.env.prepend_value('CLASSPATH', self.use_lst)
 
 @feature('javac')
 @after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files')
 def set_classpath(self):
 	"""
-	Set the CLASSPATH value on the *javac* task previously created.
+	Sets the CLASSPATH value on the *javac* task previously created.
 	"""
-	self.env.append_value('CLASSPATH', getattr(self, 'classpath', []))
+	if getattr(self, 'classpath', None):
+		self.env.append_unique('CLASSPATH', getattr(self, 'classpath', []))
 	for x in self.tasks:
 		x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep
 
@@ -147,7 +278,7 @@ def set_classpath(self):
 @before_method('process_source')
 def jar_files(self):
 	"""
-	Create a jar task. There can be only one jar task by task generator.
+	Creates a jar task (one maximum per task generator)
 	"""
 	destfile = getattr(self, 'destfile', 'test.jar')
 	jaropts = getattr(self, 'jaropts', [])
@@ -165,7 +296,12 @@ def jar_files(self):
 	self.jar_task = tsk = self.create_task('jar_create')
 	if manifest:
 		jarcreate = getattr(self, 'jarcreate', 'cfm')
-		node = self.path.find_node(manifest)
+		if not isinstance(manifest,Node.Node):
+			node = self.path.find_resource(manifest)
+		else:
+			node = manifest
+		if not node:
+			self.bld.fatal('invalid manifest file %r for %r' % (manifest, self))
 		tsk.dep_nodes.append(node)
 		jaropts.insert(0, node.abspath())
 	else:
@@ -181,8 +317,8 @@ def jar_files(self):
 	jaropts.append(basedir.bldpath())
 	jaropts.append('.')
 
-	tsk.env['JAROPTS'] = jaropts
-	tsk.env['JARCREATE'] = jarcreate
+	tsk.env.JAROPTS = jaropts
+	tsk.env.JARCREATE = jarcreate
 
 	if getattr(self, 'javac_task', None):
 		tsk.set_run_after(self.javac_task)
@@ -191,25 +327,39 @@ def jar_files(self):
 @after_method('jar_files')
 def use_jar_files(self):
 	"""
-	Process the *use* attribute to set the build order on the
+	Processes the *use* attribute to set the build order on the
 	tasks created by another task generator.
 	"""
-	lst = []
 	self.uselib = self.to_list(getattr(self, 'uselib', []))
 	names = self.to_list(getattr(self, 'use', []))
 	get = self.bld.get_tgen_by_name
 	for x in names:
 		try:
 			y = get(x)
-		except Exception:
+		except Errors.WafError:
 			self.uselib.append(x)
 		else:
 			y.post()
 			self.jar_task.run_after.update(y.tasks)
 
-class jar_create(Task.Task):
+class JTask(Task.Task):
 	"""
-	Create a jar file
+	Base class for java and jar tasks; provides functionality to run long commands
+	"""
+	def split_argfile(self, cmd):
+		inline = [cmd[0]]
+		infile = []
+		for x in cmd[1:]:
+			# jar and javac do not want -J flags in @file
+			if x.startswith('-J'):
+				inline.append(x)
+			else:
+				infile.append(self.quote_flag(x))
+		return (inline, infile)
+
+class jar_create(JTask):
+	"""
+	Creates a jar file
 	"""
 	color   = 'GREEN'
 	run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'
@@ -223,98 +373,58 @@ class jar_create(Task.Task):
 			if not t.hasrun:
 				return Task.ASK_LATER
 		if not self.inputs:
-			global JAR_RE
 			try:
-				self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])]
+				self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False, quiet=True) if id(x) != id(self.outputs[0])]
 			except Exception:
 				raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self))
 		return super(jar_create, self).runnable_status()
 
-class javac(Task.Task):
+class javac(JTask):
 	"""
-	Compile java files
+	Compiles java files
 	"""
 	color   = 'BLUE'
-
-	nocache = True
-	"""
-	The .class files cannot be put into a cache at the moment
-	"""
-
+	run_str = '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}'
 	vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR']
 	"""
 	The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change.
 	"""
+	def uid(self):
+		"""Identify java tasks by input&output folder"""
+		lst = [self.__class__.__name__, self.generator.outdir.abspath()]
+		for x in self.srcdir:
+			lst.append(x.abspath())
+		return Utils.h_list(lst)
 
 	def runnable_status(self):
 		"""
-		Wait for dependent tasks to be complete, then read the file system to find the input nodes.
+		Waits for dependent tasks to be complete, then read the file system to find the input nodes.
 		"""
 		for t in self.run_after:
 			if not t.hasrun:
 				return Task.ASK_LATER
 
 		if not self.inputs:
-			global SOURCE_RE
 			self.inputs  = []
 			for x in self.srcdir:
-				self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
+				if x.exists():
+					self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False, quiet=True))
 		return super(javac, self).runnable_status()
 
-	def run(self):
-		"""
-		Execute the javac compiler
-		"""
-		env = self.env
-		gen = self.generator
-		bld = gen.bld
-		wd = bld.bldnode.abspath()
-		def to_list(xx):
-			if isinstance(xx, str): return [xx]
-			return xx
-		cmd = []
-		cmd.extend(to_list(env['JAVAC']))
-		cmd.extend(['-classpath'])
-		cmd.extend(to_list(env['CLASSPATH']))
-		cmd.extend(['-d'])
-		cmd.extend(to_list(env['OUTDIR']))
-		cmd.extend(to_list(env['JAVACFLAGS']))
-
-		files = [a.path_from(bld.bldnode) for a in self.inputs]
-
-		# workaround for command line length limit:
-		# http://support.microsoft.com/kb/830473
-		tmp = None
-		try:
-			if len(str(files)) + len(str(cmd)) > 8192:
-				(fd, tmp) = tempfile.mkstemp(dir=bld.bldnode.abspath())
-				try:
-					os.write(fd, '\n'.join(files).encode())
-				finally:
-					if tmp:
-						os.close(fd)
-				if Logs.verbose:
-					Logs.debug('runner: %r' % (cmd + files))
-				cmd.append('@' + tmp)
-			else:
-				cmd += files
-
-			ret = self.exec_command(cmd, cwd=wd, env=env.env or None)
-		finally:
-			if tmp:
-				os.remove(tmp)
-		return ret
-
 	def post_run(self):
 		"""
+		List class files created
 		"""
-		for n in self.generator.outdir.ant_glob('**/*.class'):
-			n.sig = Utils.h_file(n.abspath()) # careful with this
+		for node in self.generator.outdir.ant_glob('**/*.class', quiet=True):
+			self.generator.bld.node_sigs[node] = self.uid()
 		self.generator.bld.task_sigs[self.uid()] = self.cache_sig
 
 @feature('javadoc')
 @after_method('process_rule')
 def create_javadoc(self):
+	"""
+	Creates a javadoc task (feature 'javadoc')
+	"""
 	tsk = self.create_task('javadoc')
 	tsk.classpath = getattr(self, 'classpath', [])
 	self.javadoc_package = Utils.to_list(self.javadoc_package)
@@ -322,6 +432,9 @@ def create_javadoc(self):
 		self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output)
 
 class javadoc(Task.Task):
+	"""
+	Builds java documentation
+	"""
 	color = 'BLUE'
 
 	def __str__(self):
@@ -330,7 +443,7 @@ class javadoc(Task.Task):
 	def run(self):
 		env = self.env
 		bld = self.generator.bld
-		wd = bld.bldnode.abspath()
+		wd = bld.bldnode
 
 		#add src node + bld node (for generated java code)
 		srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir
@@ -343,7 +456,7 @@ class javadoc(Task.Task):
 		classpath = "".join(classpath)
 
 		self.last_cmd = lst = []
-		lst.extend(Utils.to_list(env['JAVADOC']))
+		lst.extend(Utils.to_list(env.JAVADOC))
 		lst.extend(['-d', self.generator.javadoc_output.abspath()])
 		lst.extend(['-sourcepath', srcpath])
 		lst.extend(['-classpath', classpath])
@@ -354,14 +467,14 @@ class javadoc(Task.Task):
 		self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
 
 	def post_run(self):
-		nodes = self.generator.javadoc_output.ant_glob('**')
-		for x in nodes:
-			x.sig = Utils.h_file(x.abspath())
+		nodes = self.generator.javadoc_output.ant_glob('**', quiet=True)
+		for node in nodes:
+			self.generator.bld.node_sigs[node] = self.uid()
 		self.generator.bld.task_sigs[self.uid()] = self.cache_sig
 
 def configure(self):
 	"""
-	Detect the javac, java and jar programs
+	Detects the javac, java and jar programs
 	"""
 	# If JAVA_PATH is set, we prepend it to the path list
 	java_path = self.environ['PATH'].split(os.pathsep)
@@ -369,37 +482,46 @@ def configure(self):
 
 	if 'JAVA_HOME' in self.environ:
 		java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path
-		self.env['JAVA_HOME'] = [self.environ['JAVA_HOME']]
+		self.env.JAVA_HOME = [self.environ['JAVA_HOME']]
 
 	for x in 'javac java jar javadoc'.split():
-		self.find_program(x, var=x.upper(), path_list=java_path)
-		self.env[x.upper()] = self.cmd_to_list(self.env[x.upper()])
+		self.find_program(x, var=x.upper(), path_list=java_path, mandatory=(x not in ('javadoc')))
+
+	if not self.env.JAVA_HOME:
+		# needed for jni
+		if self.env.JAVAC and len(Utils.to_list(self.env.JAVAC)) == 1:
+			# heuristic to find the correct JAVA_HOME
+			javac_path = Utils.to_list(self.env.JAVAC)[0]
+			java_dir = os.path.dirname(os.path.dirname(os.path.realpath(javac_path)))
+			if os.path.exists(os.path.join(java_dir, 'lib')):
+				self.env.JAVA_HOME = [java_dir]
 
 	if 'CLASSPATH' in self.environ:
-		v['CLASSPATH'] = self.environ['CLASSPATH']
+		v.CLASSPATH = self.environ['CLASSPATH']
 
-	if not v['JAR']: self.fatal('jar is required for making java packages')
-	if not v['JAVAC']: self.fatal('javac is required for compiling java classes')
+	if not v.JAR:
+		self.fatal('jar is required for making java packages')
+	if not v.JAVAC:
+		self.fatal('javac is required for compiling java classes')
 
-	v['JARCREATE'] = 'cf' # can use cvf
-	v['JAVACFLAGS'] = []
+	v.JARCREATE = 'cf' # can use cvf
+	v.JAVACFLAGS = []
 
 @conf
 def check_java_class(self, classname, with_classpath=None):
 	"""
-	Check if the specified java class exists
+	Checks if the specified java class exists
 
 	:param classname: class to check, like java.util.HashMap
 	:type classname: string
 	:param with_classpath: additional classpath to give
 	:type with_classpath: string
 	"""
-
 	javatestdir = '.waf-javatest'
 
 	classpath = javatestdir
-	if self.env['CLASSPATH']:
-		classpath += os.pathsep + self.env['CLASSPATH']
+	if self.env.CLASSPATH:
+		classpath += os.pathsep + self.env.CLASSPATH
 	if isinstance(with_classpath, str):
 		classpath += os.pathsep + with_classpath
 
@@ -409,10 +531,10 @@ def check_java_class(self, classname, with_classpath=None):
 	Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source)
 
 	# Compile the source
-	self.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)
+	self.exec_command(self.env.JAVAC + [os.path.join(javatestdir, 'Test.java')], shell=False)
 
 	# Try to run the app
-	cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
+	cmd = self.env.JAVA + ['-cp', classpath, 'Test', classname]
 	self.to_log("%s\n" % str(cmd))
 	found = self.exec_command(cmd, shell=False)
 
@@ -425,7 +547,7 @@ def check_java_class(self, classname, with_classpath=None):
 @conf
 def check_jni_headers(conf):
 	"""
-	Check for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets::
+	Checks for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets::
 
 		def options(opt):
 			opt.load('compiler_c')
@@ -437,7 +559,6 @@ def check_jni_headers(conf):
 		def build(bld):
 			bld.shlib(source='a.c', target='app', use='JAVA')
 	"""
-
 	if not conf.env.CC_NAME and not conf.env.CXX_NAME:
 		conf.fatal('load a compiler first (gcc, g++, ..)')
 
@@ -445,7 +566,7 @@ def check_jni_headers(conf):
 		conf.fatal('set JAVA_HOME in the system environment')
 
 	# jni requires the jvm
-	javaHome = conf.env['JAVA_HOME'][0]
+	javaHome = conf.env.JAVA_HOME[0]
 
 	dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
 	if dir is None:
@@ -460,12 +581,14 @@ def check_jni_headers(conf):
 	f = dir.ant_glob('**/*jvm.(so|dll|dylib)')
 	libDirs = [x.parent.abspath() for x in f] or [javaHome]
 
-	# On windows, we need both the .dll and .lib to link.  On my JDK, they are
+	# On windows, we need both the .dll and .lib to link. On my JDK, they are
 	# in different directories...
 	f = dir.ant_glob('**/*jvm.(lib)')
 	if f:
 		libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f]
 
+	if conf.env.DEST_OS == 'freebsd':
+		conf.env.append_unique('LINKFLAGS_JAVA', '-pthread')
 	for d in libDirs:
 		try:
 			conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
@@ -477,4 +600,3 @@ def check_jni_headers(conf):
 	else:
 		conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
 
-

+ 19 - 22
sdk/waf/waflib/Tools/ldc2.py

@@ -2,58 +2,55 @@
 # encoding: utf-8
 # Alex Rønne Petersen, 2012 (alexrp/Zor)
 
-import sys
 from waflib.Tools import ar, d
 from waflib.Configure import conf
 
 @conf
 def find_ldc2(conf):
 	"""
-	Find the program *ldc2* and set the variable *D*
+	Finds the program *ldc2* and set the variable *D*
 	"""
-
 	conf.find_program(['ldc2'], var='D')
 
-	out = conf.cmd_and_log([conf.env.D, '-version'])
+	out = conf.cmd_and_log(conf.env.D + ['-version'])
 	if out.find("based on DMD v2.") == -1:
 		conf.fatal("detected compiler is not ldc2")
 
 @conf
 def common_flags_ldc2(conf):
 	"""
-	Set the D flags required by *ldc2*
+	Sets the D flags required by *ldc2*
 	"""
-
 	v = conf.env
 
-	v['D_SRC_F']           = ['-c']
-	v['D_TGT_F']           = '-of%s'
+	v.D_SRC_F           = ['-c']
+	v.D_TGT_F           = '-of%s'
 
-	v['D_LINKER']          = v['D']
-	v['DLNK_SRC_F']        = ''
-	v['DLNK_TGT_F']        = '-of%s'
-	v['DINC_ST']           = '-I%s'
+	v.D_LINKER          = v.D
+	v.DLNK_SRC_F        = ''
+	v.DLNK_TGT_F        = '-of%s'
+	v.DINC_ST           = '-I%s'
 
-	v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
-	v['DSTLIB_ST'] = v['DSHLIB_ST']         = '-L-l%s'
-	v['DSTLIBPATH_ST'] = v['DLIBPATH_ST']   = '-L-L%s'
+	v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
+	v.DSTLIB_ST = v.DSHLIB_ST         = '-L-l%s'
+	v.DSTLIBPATH_ST = v.DLIBPATH_ST   = '-L-L%s'
 
-	v['LINKFLAGS_dshlib']  = ['-L-shared']
+	v.LINKFLAGS_dshlib  = ['-L-shared']
 
-	v['DHEADER_ext']       = '.di'
-	v['DFLAGS_d_with_header'] = ['-H', '-Hf']
-	v['D_HDR_F']           = '%s'
+	v.DHEADER_ext       = '.di'
+	v.DFLAGS_d_with_header = ['-H', '-Hf']
+	v.D_HDR_F           = '%s'
 
-	v['LINKFLAGS']     = []
-	v['DFLAGS_dshlib'] = ['-relocation-model=pic']
+	v.LINKFLAGS     = []
+	v.DFLAGS_dshlib = ['-relocation-model=pic']
 
 def configure(conf):
 	"""
 	Configuration for *ldc2*
 	"""
-
 	conf.find_ldc2()
 	conf.load('ar')
 	conf.load('d')
 	conf.common_flags_ldc2()
 	conf.d_platform_flags()
+

+ 3 - 3
sdk/waf/waflib/Tools/lua.py

@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # encoding: utf-8
 # Sebastian Schlingmann, 2008
-# Thomas Nagy, 2008-2010 (ita)
+# Thomas Nagy, 2008-2018 (ita)
 
 """
 Lua support.
@@ -16,14 +16,14 @@ Compile *.lua* files into *.luac*::
 """
 
 from waflib.TaskGen import extension
-from waflib import Task, Utils
+from waflib import Task
 
 @extension('.lua')
 def add_lua(self, node):
 	tsk = self.create_task('luac', node, node.change_ext('.luac'))
 	inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None)
 	if inst_to:
-		self.bld.install_files(inst_to, tsk.outputs)
+		self.add_install_files(install_to=inst_to, install_from=tsk.outputs)
 	return tsk
 
 class luac(Task.Task):

+ 41 - 0
sdk/waf/waflib/Tools/md5_tstamp.py

@@ -0,0 +1,41 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+"""
+Re-calculate md5 hashes of files only when the file time have changed::
+
+	def options(opt):
+		opt.load('md5_tstamp')
+
+The hashes can also reflect either the file contents (STRONGEST=True) or the
+file time and file size.
+
+The performance benefits of this module are usually insignificant.
+"""
+
+import os, stat
+from waflib import Utils, Build, Node
+
+STRONGEST = True
+
+Build.SAVED_ATTRS.append('hashes_md5_tstamp')
+def h_file(self):
+	filename = self.abspath()
+	st = os.stat(filename)
+
+	cache = self.ctx.hashes_md5_tstamp
+	if filename in cache and cache[filename][0] == st.st_mtime:
+		return cache[filename][1]
+
+	if STRONGEST:
+		ret = Utils.h_file(filename)
+	else:
+		if stat.S_ISDIR(st[stat.ST_MODE]):
+			raise IOError('Not a file')
+		ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest()
+
+	cache[filename] = (st.st_mtime, ret)
+	return ret
+h_file.__doc__ = Node.Node.h_file.__doc__
+Node.Node.h_file = h_file
+

File diff suppressed because it is too large
+ 363 - 330
sdk/waf/waflib/Tools/msvc.py


+ 9 - 2
sdk/waf/waflib/Tools/nasm.py

@@ -1,11 +1,12 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2008-2010 (ita)
+# Thomas Nagy, 2008-2018 (ita)
 
 """
 Nasm tool (asm processing)
 """
 
+import os
 import waflib.Tools.asm # leave this
 from waflib.TaskGen import feature
 
@@ -18,7 +19,13 @@ def configure(conf):
 	"""
 	Detect nasm/yasm and set the variable *AS*
 	"""
-	nasm = conf.find_program(['nasm', 'yasm'], var='AS')
+	conf.find_program(['nasm', 'yasm'], var='AS')
 	conf.env.AS_TGT_F = ['-o']
 	conf.env.ASLNK_TGT_F = ['-o']
 	conf.load('asm')
+	conf.env.ASMPATH_ST = '-I%s' + os.sep
+	txt = conf.cmd_and_log(conf.env.AS + ['--version'])
+	if 'yasm' in txt.lower():
+		conf.env.ASM_NAME = 'yasm'
+	else:
+		conf.env.ASM_NAME = 'nasm'

+ 24 - 0
sdk/waf/waflib/Tools/nobuild.py

@@ -0,0 +1,24 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015 (ita)
+
+"""
+Override the build commands to write empty files.
+This is useful for profiling and evaluating the Python overhead.
+
+To use::
+
+    def build(bld):
+        ...
+        bld.load('nobuild')
+
+"""
+
+from waflib import Task
+def build(bld):
+	def run(self):
+		for x in self.outputs:
+			x.write('')
+	for (name, cls) in Task.classes.items():
+		cls.run = run
+

+ 33 - 34
sdk/waf/waflib/Tools/perl.py

@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # encoding: utf-8
 # andersg at 0x63.nu 2007
-# Thomas Nagy 2010 (ita)
+# Thomas Nagy 2016-2018 (ita)
 
 """
 Support for Perl extensions. A C/C++ compiler is required::
@@ -24,7 +24,7 @@ Support for Perl extensions. A C/C++ compiler is required::
 """
 
 import os
-from waflib import Task, Options, Utils
+from waflib import Task, Options, Utils, Errors
 from waflib.Configure import conf
 from waflib.TaskGen import extension, feature, before_method
 
@@ -36,8 +36,9 @@ def init_perlext(self):
 	*lib* prefix from library names.
 	"""
 	self.uselib = self.to_list(getattr(self, 'uselib', []))
-	if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
-	self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['perlext_PATTERN']
+	if not 'PERLEXT' in self.uselib:
+		self.uselib.append('PERLEXT')
+	self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN
 
 @extension('.xs')
 def xsubpp_file(self, node):
@@ -63,7 +64,6 @@ def check_perl_version(self, minver=None):
 	minver is supposed to be a tuple
 	"""
 	res = True
-	
 	if minver:
 		cver = '.'.join(map(str,minver))
 	else:
@@ -71,18 +71,8 @@ def check_perl_version(self, minver=None):
 
 	self.start_msg('Checking for minimum perl version %s' % cver)
 
-	perl = getattr(Options.options, 'perlbinary', None)
-
-	if not perl:
-		perl = self.find_program('perl', var='PERL')
-	
-	if not perl:
-		self.end_msg("Perl not found", color="YELLOW")
-		return False
-	
-	self.env['PERL'] = perl
-
-	version = self.cmd_and_log([perl, "-e", 'printf \"%vd\", $^V'])
+	perl = self.find_program('perl', var='PERL', value=getattr(Options.options, 'perlbinary', None))
+	version = self.cmd_and_log(perl + ["-e", 'printf \"%vd\", $^V'])
 	if not version:
 		res = False
 		version = "Unknown"
@@ -91,7 +81,7 @@ def check_perl_version(self, minver=None):
 		if ver < minver:
 			res = False
 
-	self.end_msg(version, color=res and "GREEN" or "YELLOW")
+	self.end_msg(version, color=res and 'GREEN' or 'YELLOW')
 	return res
 
 @conf
@@ -105,11 +95,11 @@ def check_perl_module(self, module):
 		def configure(conf):
 			conf.check_perl_module("Some::Module 2.92")
 	"""
-	cmd = [self.env['PERL'], '-e', 'use %s' % module]
+	cmd = self.env.PERL + ['-e', 'use %s' % module]
 	self.start_msg('perl module %s' % module)
 	try:
 		r = self.cmd_and_log(cmd)
-	except Exception:
+	except Errors.WafError:
 		self.end_msg(False)
 		return None
 	self.end_msg(r or True)
@@ -131,27 +121,36 @@ def check_perl_ext_devel(self):
 	if not perl:
 		self.fatal('find perl first')
 
-	def read_out(cmd):
-		return Utils.to_list(self.cmd_and_log(perl + cmd))
-
-	env['LINKFLAGS_PERLEXT'] = read_out(" -MConfig -e'print $Config{lddlflags}'")
-	env['INCLUDES_PERLEXT'] = read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'")
-	env['CFLAGS_PERLEXT'] = read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'")
-
-	env['XSUBPP'] = read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'")
-	env['EXTUTILS_TYPEMAP'] = read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'")
+	def cmd_perl_config(s):
+		return perl + ['-MConfig', '-e', 'print \"%s\"' % s]
+	def cfg_str(cfg):
+		return self.cmd_and_log(cmd_perl_config(cfg))
+	def cfg_lst(cfg):
+		return Utils.to_list(cfg_str(cfg))
+	def find_xsubpp():
+		for var in ('privlib', 'vendorlib'):
+			xsubpp = cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}' % var)
+			if xsubpp and os.path.isfile(xsubpp[0]):
+				return xsubpp
+		return self.find_program('xsubpp')
+
+	env.LINKFLAGS_PERLEXT = cfg_lst('$Config{lddlflags}')
+	env.INCLUDES_PERLEXT = cfg_lst('$Config{archlib}/CORE')
+	env.CFLAGS_PERLEXT = cfg_lst('$Config{ccflags} $Config{cccdlflags}')
+	env.EXTUTILS_TYPEMAP = cfg_lst('$Config{privlib}/ExtUtils/typemap')
+	env.XSUBPP = find_xsubpp()
 
 	if not getattr(Options.options, 'perlarchdir', None):
-		env['ARCHDIR_PERL'] = self.cmd_and_log(perl + " -MConfig -e'print $Config{sitearch}'")
+		env.ARCHDIR_PERL = cfg_str('$Config{sitearch}')
 	else:
-		env['ARCHDIR_PERL'] = getattr(Options.options, 'perlarchdir')
+		env.ARCHDIR_PERL = getattr(Options.options, 'perlarchdir')
 
-	env['perlext_PATTERN'] = '%s.' + self.cmd_and_log(perl + " -MConfig -e'print $Config{dlext}'")
+	env.perlext_PATTERN = '%s.' + cfg_str('$Config{dlext}')
 
 def options(opt):
 	"""
 	Add the ``--with-perl-archdir`` and ``--with-perl-binary`` command-line options.
 	"""
-	opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
-	opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)
+	opt.add_option('--with-perl-binary', type=str, dest='perlbinary', help = 'Specify alternate perl binary', default=None)
+	opt.add_option('--with-perl-archdir', type=str, dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)
 

+ 368 - 250
sdk/waf/waflib/Tools/python.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2007-2010 (ita)
+# Thomas Nagy, 2007-2015 (ita)
 # Gustavo Carneiro (gjc), 2007
 
 """
@@ -19,7 +19,7 @@ Support for Python, detect the headers and libraries and provide
 """
 
 import os, sys
-from waflib import Utils, Options, Errors, Logs
+from waflib import Errors, Logs, Node, Options, Task, Utils
 from waflib.TaskGen import extension, before_method, after_method, feature
 from waflib.Configure import conf
 
@@ -47,92 +47,106 @@ Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers`
 
 INST = '''
 import sys, py_compile
-py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3])
+py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
 '''
 """
-Piece of Python code used in :py:func:`waflib.Tools.python.install_pyfile` for installing python files
+Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
 """
 
-DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']
-
-@extension('.py')
-def process_py(self, node):
+@before_method('process_source')
+@feature('py')
+def feature_py(self):
 	"""
-	Add a callback using :py:func:`waflib.Tools.python.install_pyfile` to install a python file
+	Create tasks to byte-compile .py files and install them, if requested
 	"""
-	try:
-		if not self.bld.is_install:
-			return
-	except AttributeError:
-		return
+	self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
+	install_from = getattr(self, 'install_from', None)
+	if install_from and not isinstance(install_from, Node.Node):
+		install_from = self.path.find_dir(install_from)
+	self.install_from = install_from
 
-	try:
-		if not self.install_path:
-			return
-	except AttributeError:
-		self.install_path = '${PYTHONDIR}'
-
-	# i wonder now why we wanted to do this after the build is over
-	# issue #901: people want to preserve the structure of installed files
-	def inst_py(ctx):
-		install_from = getattr(self, 'install_from', None)
-		if install_from:
-			install_from = self.path.find_dir(install_from)
-		install_pyfile(self, node, install_from)
-	self.bld.add_post_fun(inst_py)
-
-def install_pyfile(self, node, install_from=None):
-	"""
-	Execute the installation of a python file
+	ver = self.env.PYTHON_VERSION
+	if not ver:
+		self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')
+
+	if int(ver.replace('.', '')) > 31:
+		self.install_32 = True
 
-	:param node: python file
-	:type node: :py:class:`waflib.Node.Node`
+@extension('.py')
+def process_py(self, node):
 	"""
+	Add signature of .py file, so it will be byte-compiled when necessary
+	"""
+	assert(hasattr(self, 'install_path')), 'add features="py" for target "%s" in "%s/wscript".' % (self.target, self.path.abspath())
+	self.install_from = getattr(self, 'install_from', None)
+	relative_trick = getattr(self, 'relative_trick', True)
+	if self.install_from:
+		assert isinstance(self.install_from, Node.Node), \
+		'add features="py" for target "%s" in "%s/wscript" (%s).' % (self.target, self.path.abspath(), type(self.install_from))
+
+	# where to install the python file
+	if self.install_path:
+		if self.install_from:
+			self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=relative_trick)
+		else:
+			self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=relative_trick)
+
+	lst = []
+	if self.env.PYC:
+		lst.append('pyc')
+	if self.env.PYO:
+		lst.append('pyo')
+
+	if self.install_path:
+		if self.install_from:
+			target_dir = node.path_from(self.install_from) if relative_trick else node.name
+			pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env)
+		else:
+			target_dir = node.path_from(self.path) if relative_trick else node.name
+			pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env)
+	else:
+		pyd = node.abspath()
+
+	for ext in lst:
+		if self.env.PYTAG and not self.env.NOPYCACHE:
+			# __pycache__ installation for python 3.2 - PEP 3147
+			name = node.name[:-3]
+			pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext))
+			pyobj.parent.mkdir()
+		else:
+			pyobj = node.change_ext(".%s" % ext)
 
-	from_node = install_from or node.parent
-	tsk = self.bld.install_as(self.install_path + '/' + node.path_from(from_node), node, postpone=False)
-	path = tsk.get_install_path()
+		tsk = self.create_task(ext, node, pyobj)
+		tsk.pyd = pyd
 
-	if self.bld.is_install < 0:
-		Logs.info("+ removing byte compiled python files")
-		for x in 'co':
-			try:
-				os.remove(path + x)
-			except OSError:
-				pass
-
-	if self.bld.is_install > 0:
-		try:
-			st1 = os.stat(path)
-		except OSError:
-			Logs.error('The python file is missing, this should not happen')
-
-		for x in ['c', 'o']:
-			do_inst = self.env['PY' + x.upper()]
-			try:
-				st2 = os.stat(path + x)
-			except OSError:
-				pass
-			else:
-				if st1.st_mtime <= st2.st_mtime:
-					do_inst = False
-
-			if do_inst:
-				lst = (x == 'o') and [self.env['PYFLAGS_OPT']] or []
-				(a, b, c) = (path, path + x, tsk.get_install_path(destdir=False) + x)
-				argv = self.env['PYTHON'] + lst + ['-c', INST, a, b, c]
-				Logs.info('+ byte compiling %r' % (path + x))
-				env = self.env.env or None
-				ret = Utils.subprocess.Popen(argv, env=env).wait()
-				if ret:
-					raise Errors.WafError('py%s compilation failed %r' % (x, path))
+		if self.install_path:
+			self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=relative_trick)
 
-@feature('py')
-def feature_py(self):
+class pyc(Task.Task):
+	"""
+	Byte-compiling python files
 	"""
-	Dummy feature which does nothing
+	color = 'PINK'
+	def __str__(self):
+		node = self.outputs[0]
+		return node.path_from(node.ctx.launch_node())
+	def run(self):
+		cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
+		ret = self.generator.bld.exec_command(cmd)
+		return ret
+
+class pyo(Task.Task):
 	"""
-	pass
+	Byte-compiling python files
+	"""
+	color = 'PINK'
+	def __str__(self):
+		node = self.outputs[0]
+		return node.path_from(node.ctx.launch_node())
+	def run(self):
+		cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
+		ret = self.generator.bld.exec_command(cmd)
+		return ret
 
 @feature('pyext')
 @before_method('propagate_uselib_vars', 'apply_link')
@@ -158,6 +172,7 @@ def init_pyext(self):
 @feature('pyext')
 @before_method('apply_link', 'apply_bundle')
 def set_bundle(self):
+	"""Mac-specific pyext extension that enables bundles from c_osx.py"""
 	if Utils.unversioned_sys_platform() == 'darwin':
 		self.mac_bundle = True
 
@@ -172,93 +187,227 @@ def init_pyembed(self):
 		self.uselib.append('PYEMBED')
 
 @conf
-def get_python_variables(self, variables, imports=None):
+def get_sysconfig_variable(self, variable):
+	"""
+	Spawn a new python process to dump configuration variables
+
+	:param variable: variable to print
+	:type variable: string
+	:return: the variable value
+	:rtype: string
+	"""
+
+	env = dict(os.environ)
+	try:
+		del env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
+	except KeyError:
+		pass
+
+	cmd = self.env.PYTHON + ["-c", "import sysconfig; print(sysconfig.get_config_var(%r))" % variable]
+	out = self.cmd_and_log(cmd, env=env).strip()
+
+	if out == "None":
+		return ""
+	else:
+		return out
+
+@conf
+def get_sysconfig_variables(self, variables):
 	"""
 	Spawn a new python process to dump configuration variables
 
 	:param variables: variables to print
 	:type variables: list of string
-	:param imports: one import by element
-	:type imports: list of string
 	:return: the variable values
 	:rtype: list of string
 	"""
-	if not imports:
-		try:
-			imports = self.python_imports
-		except AttributeError:
-			imports = DISTUTILS_IMP
-
-	program = list(imports) # copy
-	program.append('')
-	for v in variables:
-		program.append("print(repr(%s))" % v)
-	os_env = dict(os.environ)
+	return [self.get_sysconfig_variable(variable=v) for v in variables]
+
+@conf
+def get_sysconfig_path(self, name):
+	"""
+	Spawn a new python process to dump configuration paths
+
+	:param name: path to print
+	:type variable: string
+	:return: the path value
+	:rtype: string
+	"""
+
+	env = dict(os.environ)
 	try:
-		del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
+		del env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
 	except KeyError:
 		pass
 
-	try:
-		out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
-	except Errors.WafError:
-		self.fatal('The distutils module is unusable: install "python-devel"?')
-	self.to_log(out)
-	return_values = []
-	for s in out.split('\n'):
-		s = s.strip()
-		if not s:
-			continue
-		if s == 'None':
-			return_values.append(None)
-		elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
-			return_values.append(eval(s))
-		elif s[0].isdigit():
-			return_values.append(int(s))
-		else: break
-	return return_values
+	if self.env.PREFIX:
+		# If project wide PREFIX is set, construct the install directory based on this
+		# Note: we could use sysconfig.get_preferred_scheme('user') but that is Python >= 3.10 only
+		pref_scheme = 'posix_user'  # Default to *nix name
+		if Utils.unversioned_sys_platform() == 'darwin':
+			pref_scheme = 'osx_framework_user'
+		elif Utils.unversioned_sys_platform() == 'win32':
+			pref_scheme = 'nt_user'
+		cmd = self.env.PYTHON + ["-c", "import sysconfig; print(sysconfig.get_path(%r, %r, {'userbase': %r}))" % (name, pref_scheme, self.env.PREFIX)]
+	else:
+		cmd = self.env.PYTHON + ["-c", "import sysconfig; print(sysconfig.get_path(%r))" % name]
+	out = self.cmd_and_log(cmd, env=env).strip()
+
+	if out == "None":
+		return ""
+	else:
+		return out
+
+@conf
+def test_pyembed(self, mode, msg='Testing pyembed configuration'):
+	self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg=msg,
+		fragment=FRAG, errmsg='Could not build a python embedded interpreter',
+		features='%s %sprogram pyembed' % (mode, mode))
+
+@conf
+def test_pyext(self, mode, msg='Testing pyext configuration'):
+	self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg=msg,
+		fragment=FRAG, errmsg='Could not build python extensions',
+		features='%s %sshlib pyext' % (mode, mode))
+
+@conf
+def python_cross_compile(self, features='pyembed pyext'):
+	"""
+	For cross-compilation purposes, it is possible to bypass the normal detection and set the flags that you want:
+	PYTHON_VERSION='3.4' PYTAG='cpython34' pyext_PATTERN="%s.so" PYTHON_LDFLAGS='-lpthread -ldl' waf configure
+
+	The following variables are used:
+	PYTHON_VERSION    required
+	PYTAG             required
+	PYTHON_LDFLAGS    required
+	pyext_PATTERN     required
+	PYTHON_PYEXT_LDFLAGS
+	PYTHON_PYEMBED_LDFLAGS
+	"""
+	features = Utils.to_list(features)
+	if not ('PYTHON_LDFLAGS' in self.environ or 'PYTHON_PYEXT_LDFLAGS' in self.environ or 'PYTHON_PYEMBED_LDFLAGS' in self.environ):
+		return False
+
+	for x in 'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
+		if not x in self.environ:
+			self.fatal('Please set %s in the os environment' % x)
+		else:
+			self.env[x] = self.environ[x]
+
+	xx = self.env.CXX_NAME and 'cxx' or 'c'
+	if 'pyext' in features:
+		flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
+		if flags is None:
+			self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')
+		else:
+			self.parse_flags(flags, 'PYEXT')
+		self.test_pyext(xx)
+	if 'pyembed' in features:
+		flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
+		if flags is None:
+			self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')
+		else:
+			self.parse_flags(flags, 'PYEMBED')
+		self.test_pyembed(xx)
+	return True
 
 @conf
-def check_python_headers(conf):
+def check_python_headers(conf, features='pyembed pyext'):
 	"""
-	Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
+	Check for headers and libraries necessary to extend or embed python by using the module *sysconfig*.
 	On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
 
 	* PYEXT: for compiling python extensions
 	* PYEMBED: for embedding a python interpreter
 	"""
-
-	# FIXME rewrite
-
+	features = Utils.to_list(features)
+	assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'"
 	env = conf.env
-	if not env['CC_NAME'] and not env['CXX_NAME']:
+	if not env.CC_NAME and not env.CXX_NAME:
 		conf.fatal('load a compiler first (gcc, g++, ..)')
 
-	if not env['PYTHON_VERSION']:
+	# bypass all the code below for cross-compilation
+	if conf.python_cross_compile(features):
+		return
+
+	if not env.PYTHON_VERSION:
 		conf.check_python_version()
 
-	pybin = conf.env.PYTHON
+	pybin = env.PYTHON
 	if not pybin:
 		conf.fatal('Could not find the python executable')
 
-	v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS'.split()
+	# so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
+	v = 'prefix SO EXT_SUFFIX LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
 	try:
-		lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
+		lst = conf.get_sysconfig_variables(variables=v)
 	except RuntimeError:
 		conf.fatal("Python development headers not found (-v for details).")
 
 	vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
-	conf.to_log("Configuration returned from %r:\n%r\n" % (pybin, '\n'.join(vals)))
+	conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals)))
 
 	dct = dict(zip(v, lst))
 	x = 'MACOSX_DEPLOYMENT_TARGET'
 	if dct[x]:
-		conf.env[x] = conf.environ[x] = dct[x]
+		env[x] = conf.environ[x] = str(dct[x])
+	env.pyext_PATTERN = '%s' + (dct['EXT_SUFFIX'] or dct['SO']) # SO is deprecated in 3.5 and removed in 3.11
 
-	env['pyext_PATTERN'] = '%s' + dct['SO'] # not a mistake
 
-	# Check for python libraries for embedding
+	# Try to get pythonX.Y-config
+	num = '.'.join(env.PYTHON_VERSION.split('.')[:2])
+	conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False)
 
+	if env.PYTHON_CONFIG:
+		# check python-config output only once
+		if conf.env.HAVE_PYTHON_H:
+			return
+
+		# python2.6-config requires 3 runs
+		all_flags = [['--cflags', '--libs', '--ldflags']]
+		if sys.hexversion < 0x2070000:
+			all_flags = [[k] for k in all_flags[0]]
+
+		xx = env.CXX_NAME and 'cxx' or 'c'
+
+		if 'pyembed' in features:
+			for flags in all_flags:
+				# Python 3.8 has different flags for pyembed, needs --embed
+				embedflags = flags + ['--embed']
+				try:
+					conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(embedflags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=embedflags)
+				except conf.errors.ConfigurationError:
+					# However Python < 3.8 doesn't accept --embed, so we need a fallback
+					conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags)
+
+			try:
+				conf.test_pyembed(xx)
+			except conf.errors.ConfigurationError:
+				# python bug 7352
+				if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
+					env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']])
+					conf.test_pyembed(xx)
+				else:
+					raise
+
+		if 'pyext' in features:
+			for flags in all_flags:
+				conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags)
+
+			try:
+				conf.test_pyext(xx)
+			except conf.errors.ConfigurationError:
+				# python bug 7352
+				if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
+					env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']])
+					conf.test_pyext(xx)
+				else:
+					raise
+
+		conf.define('HAVE_PYTHON_H', 1)
+		return
+
+	# No python-config, do something else on windows systems
 	all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
 	conf.parse_flags(all_flags, 'PYEMBED')
 
@@ -266,14 +415,15 @@ def check_python_headers(conf):
 	conf.parse_flags(all_flags, 'PYEXT')
 
 	result = None
-	#name = 'python' + env['PYTHON_VERSION']
+	if not dct["LDVERSION"]:
+		dct["LDVERSION"] = env.PYTHON_VERSION
 
-	# TODO simplify this
-	for name in ('python' + env['PYTHON_VERSION'], 'python' + env['PYTHON_VERSION'] + 'm', 'python' + env['PYTHON_VERSION'].replace('.', '')):
+	# further simplification will be complicated
+	for name in ('python' + dct['LDVERSION'], 'python' + env.PYTHON_VERSION + 'm', 'python' + env.PYTHON_VERSION.replace('.', '')):
 
 		# LIBPATH_PYEMBED is already set; see if it works.
-		if not result and env['LIBPATH_PYEMBED']:
-			path = env['LIBPATH_PYEMBED']
+		if not result and env.LIBPATH_PYEMBED:
+			path = env.LIBPATH_PYEMBED
 			conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
 			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)
 
@@ -289,87 +439,49 @@ def check_python_headers(conf):
 
 		if not result:
 			path = [os.path.join(dct['prefix'], "libs")]
-			conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
+			conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY rather than pythonX.Y (win32)\n")
 			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)
 
+		if not result:
+			path = [os.path.normpath(os.path.join(dct['INCLUDEPY'], '..', 'libs'))]
+			conf.to_log("\n\n# try again with -L$INCLUDEPY/../libs, and pythonXY rather than pythonX.Y (win32)\n")
+			result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $INCLUDEPY/../libs' % name)
+
 		if result:
 			break # do not forget to set LIBPATH_PYEMBED
 
 	if result:
-		env['LIBPATH_PYEMBED'] = path
+		env.LIBPATH_PYEMBED = path
 		env.append_value('LIB_PYEMBED', [name])
 	else:
 		conf.to_log("\n\n### LIB NOT FOUND\n")
 
 	# under certain conditions, python extensions must link to
 	# python libraries, not just python embedding programs.
-	if (Utils.is_win32 or sys.platform.startswith('os2')
-		or dct['Py_ENABLE_SHARED']):
-		env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
-		env['LIB_PYEXT'] = env['LIB_PYEMBED']
-
-	# We check that pythonX.Y-config exists, and if it exists we
-	# use it to get only the includes, else fall back to distutils.
-	num = '.'.join(env['PYTHON_VERSION'].split('.')[:2])
-	conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', mandatory=False)
-
-	includes = []
-	if conf.env.PYTHON_CONFIG:
-		for incstr in conf.cmd_and_log([ conf.env.PYTHON_CONFIG, '--includes']).strip().split():
-			# strip the -I or /I
-			if (incstr.startswith('-I') or incstr.startswith('/I')):
-				incstr = incstr[2:]
-			# append include path, unless already given
-			if incstr not in includes:
-				includes.append(incstr)
-		conf.to_log("Include path for Python extensions (found via python-config --includes): %r\n" % (includes,))
-		env['INCLUDES_PYEXT'] = includes
-		env['INCLUDES_PYEMBED'] = includes
-	else:
-		conf.to_log("Include path for Python extensions "
-			       "(found via distutils module): %r\n" % (dct['INCLUDEPY'],))
-		env['INCLUDES_PYEXT'] = [dct['INCLUDEPY']]
-		env['INCLUDES_PYEMBED'] = [dct['INCLUDEPY']]
+	if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
+		env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
+		env.LIB_PYEXT = env.LIB_PYEMBED
+
+	conf.to_log("Include path for Python extensions (found via sysconfig module): %r\n" % (dct['INCLUDEPY'],))
+	env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
+	env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]
 
 	# Code using the Python API needs to be compiled with -fno-strict-aliasing
-	if env['CC_NAME'] == 'gcc':
-		env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
-		env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
-	if env['CXX_NAME'] == 'gcc':
-		env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
-		env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
+	if env.CC_NAME == 'gcc':
+		env.append_unique('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
+		env.append_unique('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
+	if env.CXX_NAME == 'gcc':
+		env.append_unique('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
+		env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
 
 	if env.CC_NAME == "msvc":
-		from distutils.msvccompiler import MSVCCompiler
-		dist_compiler = MSVCCompiler()
-		dist_compiler.initialize()
-		env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
-		env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
-		env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
+		# From https://github.com/python/cpython/blob/main/Lib/distutils/msvccompiler.py
+		env.append_value('CFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/EHsc', '/DNDEBUG'])
+		env.append_value('CXXFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/EHsc', '/DNDEBUG'])
+		env.append_value('LINKFLAGS_PYEXT', ['/DLL', '/nologo', '/INCREMENTAL:NO'])
 
 	# See if it compiles
-	try:
-		conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H',
-		   uselib='PYEMBED', fragment=FRAG,
-		   errmsg=':-(')
-	except conf.errors.ConfigurationError:
-		# python3.2, oh yeah
-		xx = conf.env.CXX_NAME and 'cxx' or 'c'
-
-		flags = ['--cflags', '--libs', '--ldflags']
-
-		for f in flags:
-			conf.check_cfg(msg='Asking python-config for pyembed %s flags' % f,
-				path=conf.env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=[f])
-		conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', msg='Getting pyembed flags from python-config',
-			fragment=FRAG, errmsg='Could not build a python embedded interpreter',
-			features='%s %sprogram pyembed' % (xx, xx))
-
-		for f in flags:
-			conf.check_cfg(msg='Asking python-config for pyext %s flags' % f,
-				path=conf.env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=[f])
-		conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', msg='Getting pyext flags from python-config',
-			features='%s %sshlib pyext' % (xx, xx), fragment=FRAG, errmsg='Could not build python extensions')
+	conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Broken python installation? Get python-config now!')
 
 @conf
 def check_python_version(conf, minver=None):
@@ -377,9 +489,9 @@ def check_python_version(conf, minver=None):
 	Check if the python interpreter is found matching a given minimum version.
 	minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
 
-	If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
-	(eg. '2.4') of the actual python version found, and PYTHONDIR is
-	defined, pointing to the site-packages directory appropriate for
+	If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' (eg. '2.4')
+	of the actual python version found, and PYTHONDIR and PYTHONARCHDIR
+	are defined, pointing to the site-packages directories appropriate for
 	this python version, where modules/packages/extensions should be
 	installed.
 
@@ -387,46 +499,44 @@ def check_python_version(conf, minver=None):
 	:type minver: tuple of int
 	"""
 	assert minver is None or isinstance(minver, tuple)
-	pybin = conf.env['PYTHON']
+	pybin = conf.env.PYTHON
 	if not pybin:
 		conf.fatal('could not find the python executable')
 
 	# Get python version string
 	cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
-	Logs.debug('python: Running python command %r' % cmd)
+	Logs.debug('python: Running python command %r', cmd)
 	lines = conf.cmd_and_log(cmd).split()
-	assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
+	assert len(lines) == 5, "found %r lines, expected 5: %r" % (len(lines), lines)
 	pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
 
-	# compare python version with the minimum required
+	# Compare python version with the minimum required
 	result = (minver is None) or (pyver_tuple >= minver)
 
 	if result:
 		# define useful environment variables
 		pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
-		conf.env['PYTHON_VERSION'] = pyver
+		conf.env.PYTHON_VERSION = pyver
 
-		if 'PYTHONDIR' in conf.environ:
+		if 'PYTHONDIR' in conf.env:
+			# Check if --pythondir was specified
+			pydir = conf.env.PYTHONDIR
+		elif 'PYTHONDIR' in conf.environ:
+			# Check environment for PYTHONDIR
 			pydir = conf.environ['PYTHONDIR']
 		else:
-			if Utils.is_win32:
-				(python_LIBDEST, pydir) = conf.get_python_variables(
-					  ["get_config_var('LIBDEST') or ''",
-					   "get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']])
-			else:
-				python_LIBDEST = None
-				(pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']])
-			if python_LIBDEST is None:
-				if conf.env['LIBDIR']:
-					python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
-				else:
-					python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
-
+			pydir = conf.get_sysconfig_path('purelib')
 
-		if 'PYTHONARCHDIR' in conf.environ:
+		if 'PYTHONARCHDIR' in conf.env:
+			# Check if --pythonarchdir was specified
+			pyarchdir = conf.env.PYTHONARCHDIR
+		elif 'PYTHONARCHDIR' in conf.environ:
+			# Check environment for PYTHONDIR
 			pyarchdir = conf.environ['PYTHONARCHDIR']
 		else:
-			(pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']])
+			# Finally, try to guess
+			pyarchdir = conf.get_sysconfig_path('platlib')
+
 			if not pyarchdir:
 				pyarchdir = pydir
 
@@ -434,8 +544,8 @@ def check_python_version(conf, minver=None):
 			conf.define('PYTHONDIR', pydir)
 			conf.define('PYTHONARCHDIR', pyarchdir)
 
-		conf.env['PYTHONDIR'] = pydir
-		conf.env['PYTHONARCHDIR'] = pyarchdir
+		conf.env.PYTHONDIR = pydir
+		conf.env.PYTHONARCHDIR = pyarchdir
 
 	# Feedback
 	pyver_full = '.'.join(map(str, pyver_tuple[:3]))
@@ -443,7 +553,7 @@ def check_python_version(conf, minver=None):
 		conf.msg('Checking for python version', pyver_full)
 	else:
 		minver_str = '.'.join(map(str, minver))
-		conf.msg('Checking for python version', pyver_tuple, ">= %s" % (minver_str,) and 'GREEN' or 'YELLOW')
+		conf.msg('Checking for python version >= %s' % (minver_str,), pyver_full, color=result and 'GREEN' or 'YELLOW')
 
 	if not result:
 		conf.fatal('The python version is too old, expecting %r' % (minver,))
@@ -452,9 +562,9 @@ PYTHON_MODULE_TEMPLATE = '''
 import %s as current_module
 version = getattr(current_module, '__version__', None)
 if version is not None:
-    print(str(version))
+	print(str(version))
 else:
-    print('unknown version')
+	print('unknown version')
 '''
 
 @conf
@@ -469,13 +579,13 @@ def check_python_module(conf, module_name, condition=''):
 	:param module_name: module
 	:type module_name: string
 	"""
-	msg = 'Python module %s' % module_name
+	msg = "Checking for python module %r" % module_name
 	if condition:
 		msg = '%s (%s)' % (msg, condition)
 	conf.start_msg(msg)
 	try:
-		ret = conf.cmd_and_log(conf.env['PYTHON'] + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
-	except Exception:
+		ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
+	except Errors.WafError:
 		conf.end_msg(False)
 		conf.fatal('Could not find the python module %r' % module_name)
 
@@ -485,13 +595,12 @@ def check_python_module(conf, module_name, condition=''):
 		if ret == 'unknown version':
 			conf.fatal('Could not check the %s version' % module_name)
 
-		from distutils.version import LooseVersion
 		def num(*k):
 			if isinstance(k[0], int):
-				return LooseVersion('.'.join([str(x) for x in k]))
+				return Utils.loose_version('.'.join([str(x) for x in k]))
 			else:
-				return LooseVersion(k[0])
-		d = {'num': num, 'ver': LooseVersion(ret)}
+				return Utils.loose_version(k[0])
+		d = {'num': num, 'ver': Utils.loose_version(ret)}
 		ev = eval(condition, {}, d)
 		if not ev:
 			conf.fatal('The %s version does not satisfy the requirements' % module_name)
@@ -505,36 +614,45 @@ def configure(conf):
 	"""
 	Detect the python interpreter
 	"""
-	try:
-		conf.find_program('python', var='PYTHON')
-	except conf.errors.ConfigurationError:
-		Logs.warn("could not find a python executable, setting to sys.executable '%s'" % sys.executable)
-		conf.env.PYTHON = sys.executable
+	v = conf.env
+	if getattr(Options.options, 'pythondir', None):
+		v.PYTHONDIR = Options.options.pythondir
+	if getattr(Options.options, 'pythonarchdir', None):
+		v.PYTHONARCHDIR = Options.options.pythonarchdir
+	if getattr(Options.options, 'nopycache', None):
+		v.NOPYCACHE=Options.options.nopycache
 
-	if conf.env.PYTHON != sys.executable:
-		Logs.warn("python executable %r differs from system %r" % (conf.env.PYTHON, sys.executable))
-	conf.env.PYTHON = conf.cmd_to_list(conf.env.PYTHON)
+	if not v.PYTHON:
+		v.PYTHON = [getattr(Options.options, 'python', None) or sys.executable]
+	v.PYTHON = Utils.to_list(v.PYTHON)
+	conf.find_program('python', var='PYTHON')
 
-	v = conf.env
-	v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
-	v['PYFLAGS'] = ''
-	v['PYFLAGS_OPT'] = '-O'
+	v.PYFLAGS = ''
+	v.PYFLAGS_OPT = '-O'
+
+	v.PYC = getattr(Options.options, 'pyc', 1)
+	v.PYO = getattr(Options.options, 'pyo', 1)
 
-	v['PYC'] = getattr(Options.options, 'pyc', 1)
-	v['PYO'] = getattr(Options.options, 'pyo', 1)
+	try:
+		v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import sys\ntry:\n print(sys.implementation.cache_tag)\nexcept AttributeError:\n import imp\n print(imp.get_tag())\n"]).strip()
+	except Errors.WafError:
+		pass
 
 def options(opt):
 	"""
-	Add the options ``--nopyc`` and ``--nopyo``
+	Add python-specific options
 	"""
-	opt.add_option('--nopyc',
-			action='store_false',
-			default=1,
-			help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
-			dest = 'pyc')
-	opt.add_option('--nopyo',
-			action='store_false',
-			default=1,
-			help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
-			dest='pyo')
+	pyopt=opt.add_option_group("Python Options")
+	pyopt.add_option('--nopyc', dest = 'pyc', action='store_false', default=1,
+					 help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]')
+	pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1,
+					 help='Do not install optimised compiled .pyo files (configuration) [Default:install]')
+	pyopt.add_option('--nopycache',dest='nopycache', action='store_true',
+					 help='Do not use __pycache__ directory to install objects [Default:auto]')
+	pyopt.add_option('--python', dest="python",
+					 help='python binary to be used [Default: %s]' % sys.executable)
+	pyopt.add_option('--pythondir', dest='pythondir',
+					 help='Installation path for python modules (py, platform-independent .py and .pyc files)')
+	pyopt.add_option('--pythonarchdir', dest='pythonarchdir',
+					 help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')
 

+ 894 - 0
sdk/waf/waflib/Tools/qt5.py

@@ -0,0 +1,894 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2018 (ita)
+# Rafaël Kooi, 2023 (RA-Kooi)
+
+"""
+This tool helps with finding Qt5 and Qt6 tools and libraries,
+and also provides syntactic sugar for using Qt5 and Qt6 tools.
+
+The following snippet illustrates the tool usage::
+
+	def options(opt):
+		opt.load('compiler_cxx qt5')
+
+	def configure(conf):
+		conf.load('compiler_cxx qt5')
+
+	def build(bld):
+		bld(
+			features = 'qt5 cxx cxxprogram',
+			uselib   = 'QT5CORE QT5GUI QT5OPENGL QT5SVG',
+			source   = 'main.cpp textures.qrc aboutDialog.ui',
+			target   = 'window',
+		)
+
+Alternatively the following snippet illustrates Qt6 tool usage::
+
+    def options(opt):
+        opt.load('compiler_cxx qt5')
+
+    def configure(conf):
+        conf.want_qt6 = True
+        conf.load('compiler_cxx qt5')
+
+    def build(bld):
+        bld(
+            features = 'qt6 cxx cxxprogram',
+            uselib   = 'QT6CORE QT6GUI QT6OPENGL QT6SVG',
+            source   = 'main.cpp textures.qrc aboutDialog.ui',
+            target   = 'window',
+        )
+
+Here, the UI description and resource files will be processed
+to generate code.
+
+Usage
+=====
+
+Load the "qt5" tool.
+
+You also need to edit your sources accordingly:
+
+- the normal way of doing things is to have your C++ files
+  include the .moc file.
+  This is regarded as the best practice (and provides much faster
+  compilations).
+  It also implies that the include paths have beenset properly.
+
+- to have the include paths added automatically, use the following::
+
+     from waflib.TaskGen import feature, before_method, after_method
+     @feature('cxx')
+     @after_method('process_source')
+     @before_method('apply_incpaths')
+     def add_includes_paths(self):
+        incs = set(self.to_list(getattr(self, 'includes', '')))
+        for x in self.compiled_tasks:
+            incs.add(x.inputs[0].parent.path_from(self.path))
+        self.includes = sorted(incs)
+
+Note: another tool provides Qt processing that does not require
+.moc includes, see 'playground/slow_qt/'.
+
+A few options (--qt{dir,bin,...}) and environment variables
+(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
+tool path selection, etc; please read the source for more info.
+For Qt6 replace the QT5_ prefix with QT6_.
+
+The detection uses pkg-config on Linux by default. The list of
+libraries to be requested to pkg-config is formulated by scanning
+in the QTLIBS directory (that can be passed via --qtlibs or by
+setting the environment variable QT5_LIBDIR or QT6_LIBDIR otherwise is
+derived by querying qmake for QT_INSTALL_LIBS directory) for
+shared/static libraries present.
+Alternatively the list of libraries to be requested via pkg-config
+can be set using the qt5_vars attribute, ie:
+
+      conf.qt5_vars = ['Qt5Core', 'Qt5Gui', 'Qt5Widgets', 'Qt5Test'];
+
+For Qt6 use the qt6_vars attribute.
+
+This can speed up configuration phase if needed libraries are
+known beforehand, can improve detection on systems with a
+sparse QT5/Qt6 libraries installation (ie. NIX) and can improve
+detection of some header-only Qt modules (ie. Qt5UiPlugin).
+
+To force static library detection use:
+QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
+
+To use Qt6 set the want_qt6 attribute, ie:
+
+    conf.want_qt6 = True;
+"""
+
+try:
+	from xml.sax import make_parser
+	from xml.sax.handler import ContentHandler
+except ImportError:
+	has_xml = False
+	ContentHandler = object
+else:
+	has_xml = True
+
+import os, sys, re
+from waflib.Tools import cxx
+from waflib import Build, Task, Utils, Options, Errors, Context
+from waflib.TaskGen import feature, after_method, extension, before_method
+from waflib.Configure import conf
+from waflib import Logs
+
+MOC_H = ['.h', '.hpp', '.hxx', '.hh']
+"""
+File extensions associated to .moc files
+"""
+
+EXT_RCC = ['.qrc']
+"""
+File extension for the resource (.qrc) files
+"""
+
+EXT_UI  = ['.ui']
+"""
+File extension for the user interface (.ui) files
+"""
+
+EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C']
+"""
+File extensions of C++ files that may require a .moc processing
+"""
+
+class qxx(Task.classes['cxx']):
+	"""
+	Each C++ file can have zero or several .moc files to create.
+	They are known only when the files are scanned (preprocessor)
+	To avoid scanning the c++ files each time (parsing C/C++), the results
+	are retrieved from the task cache (bld.node_deps/bld.raw_deps).
+	The moc tasks are also created *dynamically* during the build.
+	"""
+
+	def __init__(self, *k, **kw):
+		Task.Task.__init__(self, *k, **kw)
+		self.moc_done = 0
+
+	def runnable_status(self):
+		"""
+		Compute the task signature to make sure the scanner was executed. Create the
+		moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary),
+		then postpone the task execution (there is no need to recompute the task signature).
+		"""
+		if self.moc_done:
+			return Task.Task.runnable_status(self)
+		else:
+			for t in self.run_after:
+				if not t.hasrun:
+					return Task.ASK_LATER
+			self.add_moc_tasks()
+			return Task.Task.runnable_status(self)
+
+	def create_moc_task(self, h_node, m_node):
+		"""
+		If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
+		It is not possible to change the file names, but we can assume that the moc transformation will be identical,
+		and the moc tasks can be shared in a global cache.
+		"""
+		try:
+			moc_cache = self.generator.bld.moc_cache
+		except AttributeError:
+			moc_cache = self.generator.bld.moc_cache = {}
+
+		try:
+			return moc_cache[h_node]
+		except KeyError:
+			tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
+			tsk.set_inputs(h_node)
+			tsk.set_outputs(m_node)
+			tsk.env.append_unique('MOC_FLAGS', '-i')
+
+			if self.generator:
+				self.generator.tasks.append(tsk)
+
+			# direct injection in the build phase (safe because called from the main thread)
+			gen = self.generator.bld.producer
+			gen.outstanding.append(tsk)
+			gen.total += 1
+
+			return tsk
+
+		else:
+			# remove the signature, it must be recomputed with the moc task
+			delattr(self, 'cache_sig')
+
+	def add_moc_tasks(self):
+		"""
+		Creates moc tasks by looking in the list of file dependencies ``bld.raw_deps[self.uid()]``
+		"""
+		node = self.inputs[0]
+		bld = self.generator.bld
+
+		# skip on uninstall due to generated files
+		if bld.is_install == Build.UNINSTALL:
+			return
+
+		try:
+			# compute the signature once to know if there is a moc file to create
+			self.signature()
+		except KeyError:
+			# the moc file may be referenced somewhere else
+			pass
+		else:
+			# remove the signature, it must be recomputed with the moc task
+			delattr(self, 'cache_sig')
+
+		include_nodes = [node.parent] + self.generator.includes_nodes
+
+		moctasks = []
+		mocfiles = set()
+		for d in bld.raw_deps.get(self.uid(), []):
+			if not d.endswith('.moc'):
+				continue
+
+			# process that base.moc only once
+			if d in mocfiles:
+				continue
+			mocfiles.add(d)
+
+			# find the source associated with the moc file
+			h_node = None
+			base2 = d[:-4]
+
+			# foo.moc from foo.cpp
+			prefix = node.name[:node.name.rfind('.')]
+			if base2 == prefix:
+				h_node = node
+			else:
+				# this deviates from the standard
+				# if bar.cpp includes foo.moc, then assume it is from foo.h
+				for x in include_nodes:
+					for e in MOC_H:
+						h_node = x.find_node(base2 + e)
+						if h_node:
+							break
+					else:
+						continue
+					break
+			if h_node:
+				m_node = h_node.change_ext('.moc')
+			else:
+				raise Errors.WafError('No source found for %r which is a moc file' % d)
+
+			# create the moc task
+			task = self.create_moc_task(h_node, m_node)
+			moctasks.append(task)
+
+		# simple scheduler dependency: run the moc task before others
+		self.run_after.update(set(moctasks))
+		self.moc_done = 1
+
+class trans_update(Task.Task):
+	"""Updates a .ts files from a list of C++ files"""
+	run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
+	color   = 'BLUE'
+
+class XMLHandler(ContentHandler):
+	"""
+	Parses ``.qrc`` files
+	"""
+	def __init__(self):
+		ContentHandler.__init__(self)
+		self.buf = []
+		self.files = []
+	def startElement(self, name, attrs):
+		if name == 'file':
+			self.buf = []
+	def endElement(self, name):
+		if name == 'file':
+			self.files.append(str(''.join(self.buf)))
+	def characters(self, cars):
+		self.buf.append(cars)
+
+@extension(*EXT_RCC)
+def create_rcc_task(self, node):
+	"Creates rcc and cxx tasks for ``.qrc`` files"
+	rcnode = node.change_ext('_rc.%d.cpp' % self.idx)
+	self.create_task('rcc', node, rcnode)
+	cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
+	try:
+		self.compiled_tasks.append(cpptask)
+	except AttributeError:
+		self.compiled_tasks = [cpptask]
+	return cpptask
+
+@extension(*EXT_UI)
+def create_uic_task(self, node):
+	"Create uic tasks for user interface ``.ui`` definition files"
+
+	"""
+	If UIC file is used in more than one bld, we would have a conflict in parallel execution
+	It is not possible to change the file names (like .self.idx. as for objects) as they have
+	to be referenced by the source file, but we can assume that the transformation will be identical
+	and the tasks can be shared in a global cache.
+	"""
+	try:
+		uic_cache = self.bld.uic_cache
+	except AttributeError:
+		uic_cache = self.bld.uic_cache = {}
+
+	if node not in uic_cache:
+		uictask = uic_cache[node] = self.create_task('ui5', node)
+		uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
+
+@extension('.ts')
+def add_lang(self, node):
+	"""Adds all the .ts file into ``self.lang``"""
+	self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
+
+@feature('qt5', 'qt6')
+@before_method('process_source')
+def process_mocs(self):
+	"""
+	Processes MOC files included in headers::
+
+		def build(bld):
+			bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE', moc='foo.h')
+
+	The build will run moc on foo.h to create moc_foo.n.cpp. The number in the file name
+	is provided to avoid name clashes when the same headers are used by several targets.
+	"""
+	lst = self.to_nodes(getattr(self, 'moc', []))
+	self.source = self.to_list(getattr(self, 'source', []))
+	for x in lst:
+		prefix = x.name[:x.name.rfind('.')] # foo.h -> foo
+		moc_target = 'moc_%s.%d.cpp' % (prefix, self.idx)
+		moc_node = x.parent.find_or_declare(moc_target)
+		self.source.append(moc_node)
+
+		self.create_task('moc', x, moc_node)
+
+@feature('qt5', 'qt6')
+@after_method('apply_link')
+def apply_qt5(self):
+	"""
+	Adds MOC_FLAGS which may be necessary for moc::
+
+		def build(bld):
+			bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE')
+
+	The additional parameters are:
+
+	:param lang: list of translation files (\\*.ts) to process
+	:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
+	:param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**)
+	:type update: bool
+	:param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file
+	:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
+	"""
+	if getattr(self, 'lang', None):
+		qmtasks = []
+		for x in self.to_list(self.lang):
+			if isinstance(x, str):
+				x = self.path.find_resource(x + '.ts')
+			qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx)))
+
+		if getattr(self, 'update', None) and Options.options.trans_qt5:
+			cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
+				a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
+			for x in qmtasks:
+				self.create_task('trans_update', cxxnodes, x.inputs)
+
+		if getattr(self, 'langname', None):
+			qmnodes = [x.outputs[0] for x in qmtasks]
+			rcnode = self.langname
+			if isinstance(rcnode, str):
+				rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx))
+			t = self.create_task('qm2rcc', qmnodes, rcnode)
+			k = create_rcc_task(self, t.outputs[0])
+			self.link_task.inputs.append(k.outputs[0])
+
+	lst = []
+	for flag in self.to_list(self.env.CXXFLAGS):
+		if len(flag) < 2:
+			continue
+		f = flag[0:2]
+		if f in ('-D', '-I', '/D', '/I'):
+			if (f[0] == '/'):
+				lst.append('-' + flag[1:])
+			else:
+				lst.append(flag)
+	self.env.append_value('MOC_FLAGS', lst)
+
+@extension(*EXT_QT5)
+def cxx_hook(self, node):
+	"""
+	Re-maps C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task.
+	"""
+	return self.create_compiled_task('qxx', node)
+
+class rcc(Task.Task):
+	"""
+	Processes ``.qrc`` files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
+	ext_out = ['.h']
+
+	def rcname(self):
+		return os.path.splitext(self.inputs[0].name)[0]
+
+	def scan(self):
+		"""Parse the *.qrc* files"""
+		if not has_xml:
+			Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+			return ([], [])
+
+		parser = make_parser()
+		curHandler = XMLHandler()
+		parser.setContentHandler(curHandler)
+		with open(self.inputs[0].abspath(), 'r') as f:
+			parser.parse(f)
+
+		nodes = []
+		names = []
+		root = self.inputs[0].parent
+		for x in curHandler.files:
+			nd = root.find_resource(x)
+			if nd:
+				nodes.append(nd)
+			else:
+				names.append(x)
+		return (nodes, names)
+
+	def quote_flag(self, x):
+		"""
+		Override Task.quote_flag. QT parses the argument files
+		differently than cl.exe and link.exe
+
+		:param x: flag
+		:type x: string
+		:return: quoted flag
+		:rtype: string
+		"""
+		return x
+
+
+class moc(Task.Task):
+	"""
+	Creates ``.moc`` files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
+
+	def quote_flag(self, x):
+		"""
+		Override Task.quote_flag. QT parses the argument files
+		differently than cl.exe and link.exe
+
+		:param x: flag
+		:type x: string
+		:return: quoted flag
+		:rtype: string
+		"""
+		return x
+
+
+class ui5(Task.Task):
+	"""
+	Processes ``.ui`` files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_UIC} ${SRC} -o ${TGT}'
+	ext_out = ['.h']
+
+class ts2qm(Task.Task):
+	"""
+	Generates ``.qm`` files from ``.ts`` files
+	"""
+	color   = 'BLUE'
+	run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+
+class qm2rcc(Task.Task):
+	"""
+	Generates ``.qrc`` files from ``.qm`` files
+	"""
+	color = 'BLUE'
+	after = 'ts2qm'
+	def run(self):
+		"""Create a qrc file including the inputs"""
+		txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
+		code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
+		self.outputs[0].write(code)
+
+def configure(self):
+	"""
+	Besides the configuration options, the environment variable QT5_ROOT may be used
+	to give the location of the qt5 libraries (absolute path).
+
+	The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg`
+	"""
+	if 'COMPILER_CXX' not in self.env:
+		self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
+
+	self.want_qt6 = getattr(self, 'want_qt6', False)
+
+	if self.want_qt6:
+		self.qt_vars = Utils.to_list(getattr(self, 'qt6_vars', []))
+	else:
+		self.qt_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
+
+	self.find_qt5_binaries()
+	self.set_qt5_libs_dir()
+	self.set_qt5_libs_to_check()
+	self.set_qt5_defines()
+	self.find_qt5_libraries()
+	self.add_qt5_rpath()
+	self.simplify_qt5_libs()
+
+	# warn about this during the configuration too
+	if not has_xml:
+		Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+
+	feature = 'qt6' if self.want_qt6 else 'qt5'
+
+	# Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
+	frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
+	uses = 'QT6CORE' if self.want_qt6 else 'QT5CORE'
+
+	# Qt6 requires C++17 (https://www.qt.io/blog/qt-6.0-released)
+	flag_list = []
+	if self.env.CXX_NAME == 'msvc':
+		stdflag = '/std:c++17' if self.want_qt6 else '/std:c++11'
+		flag_list = [[], ['/Zc:__cplusplus', '/permissive-', stdflag]]
+	else:
+		stdflag = '-std=c++17' if self.want_qt6 else '-std=c++11'
+		flag_list = [[], '-fPIE', '-fPIC', stdflag, [stdflag, '-fPIE'], [stdflag, '-fPIC']]
+	for flag in flag_list:
+		msg = 'See if Qt files compile '
+		if flag:
+			msg += 'with %s' % flag
+		try:
+			self.check(features=feature + ' cxx', use=uses, uselib_store=feature, cxxflags=flag, fragment=frag, msg=msg)
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			break
+	else:
+		self.fatal('Could not build a simple Qt application')
+
+	# FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
+	if Utils.unversioned_sys_platform() == 'freebsd':
+		frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
+		try:
+			self.check(features=feature + ' cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?')
+		except self.errors.ConfigurationError:
+			self.check(features=feature + ' cxx cxxprogram', use=uses, uselib_store=feature, libpath='/usr/local/lib', fragment=frag, msg='Is /usr/local/lib required?')
+
+@conf
+def find_qt5_binaries(self):
+	"""
+	Detects Qt programs such as qmake, moc, uic, lrelease
+	"""
+	env = self.env
+	opt = Options.options
+
+	qtdir = getattr(opt, 'qtdir', '')
+	qtbin = getattr(opt, 'qtbin', '')
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	paths = []
+
+	if qtdir:
+		qtbin = os.path.join(qtdir, 'bin')
+
+	# the qt directory has been given from QT5_ROOT - deduce the qt binary path
+	if not qtdir:
+		qtdir = self.environ.get('QT' + qt_ver + '_ROOT', '')
+		qtbin = self.environ.get('QT' + qt_ver + '_BIN') or os.path.join(qtdir, 'bin')
+
+	if qtbin:
+		paths = [qtbin]
+
+	# no qtdir, look in the path and in /usr/local/Trolltech
+	if not qtdir:
+		paths = self.environ.get('PATH', '').split(os.pathsep)
+		paths.extend([
+			'/usr/share/qt' + qt_ver + '/bin',
+			'/usr/local/lib/qt' + qt_ver + '/bin'])
+
+		try:
+			lst = Utils.listdir('/usr/local/Trolltech/')
+		except OSError:
+			pass
+		else:
+			if lst:
+				lst.sort()
+				lst.reverse()
+
+				# keep the highest version
+				qtdir = '/usr/local/Trolltech/%s/' % lst[0]
+				qtbin = os.path.join(qtdir, 'bin')
+				paths.append(qtbin)
+
+	# at the end, try to find qmake in the paths given
+	# keep the one with the highest version
+	cand = None
+	prev_ver = ['0', '0', '0']
+	qmake_vars = ['qmake-qt' + qt_ver, 'qmake' + qt_ver, 'qmake']
+
+	for qmk in qmake_vars:
+		try:
+			qmake = self.find_program(qmk, path_list=paths)
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			try:
+				version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
+			except self.errors.WafError:
+				pass
+			else:
+				if version:
+					new_ver = version.split('.')
+					if new_ver[0] == qt_ver and new_ver > prev_ver:
+						cand = qmake
+						prev_ver = new_ver
+
+	# qmake could not be found easily, rely on qtchooser
+	if not cand:
+		try:
+			self.find_program('qtchooser')
+		except self.errors.ConfigurationError:
+			pass
+		else:
+			cmd = self.env.QTCHOOSER + ['-qt=' + qt_ver, '-run-tool=qmake']
+			try:
+				version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION'])
+			except self.errors.WafError:
+				pass
+			else:
+				cand = cmd
+
+	if cand:
+		self.env.QMAKE = cand
+	else:
+		self.fatal('Could not find qmake for qt' + qt_ver)
+
+	# Once we have qmake, we want to query qmake for the paths where we want to look for tools instead
+	paths = []
+
+	self.env.QT_HOST_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_BINS']).strip()
+	paths.append(qtbin)
+
+	if self.want_qt6:
+		self.env.QT_HOST_LIBEXECS = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_LIBEXECS']).strip()
+		paths.append(self.env.QT_HOST_LIBEXECS)
+
+	def find_bin(lst, var):
+		if var in env:
+			return
+		for f in lst:
+			try:
+				ret = self.find_program(f, path_list=paths)
+			except self.errors.ConfigurationError:
+				pass
+			else:
+				env[var]=ret
+				break
+
+	find_bin(['uic-qt' + qt_ver, 'uic'], 'QT_UIC')
+	if not env.QT_UIC:
+		self.fatal('cannot find the uic compiler for qt' + qt_ver)
+
+	self.start_msg('Checking for uic version')
+	uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH)
+	uicver = ''.join(uicver).strip()
+	uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
+	self.end_msg(uicver)
+	if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1 or (self.want_qt6 and uicver.find(' 5.') != -1):
+		if self.want_qt6:
+			self.fatal('this uic compiler is for qt3 or qt4 or qt5, add uic for qt6 to your path')
+		else:
+			self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')
+
+	find_bin(['moc-qt' + qt_ver, 'moc'], 'QT_MOC')
+	find_bin(['rcc-qt' + qt_ver, 'rcc'], 'QT_RCC')
+	find_bin(['lrelease-qt' + qt_ver, 'lrelease'], 'QT_LRELEASE')
+	find_bin(['lupdate-qt' + qt_ver, 'lupdate'], 'QT_LUPDATE')
+
+	env.UIC_ST = '%s -o %s'
+	env.MOC_ST = '-o'
+	env.ui_PATTERN = 'ui_%s.h'
+	env.QT_LRELEASE_FLAGS = ['-silent']
+	env.MOCCPPPATH_ST = '-I%s'
+	env.MOCDEFINES_ST = '-D%s'
+
+@conf
+def set_qt5_libs_dir(self):
+	env = self.env
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT' + qt_ver + '_LIBDIR')
+
+	if not qtlibs:
+		try:
+			qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
+		except Errors.WafError:
+			qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
+			qtlibs = os.path.join(qtdir, 'lib')
+
+	self.msg('Found the Qt' + qt_ver + ' library path', qtlibs)
+
+	env.QTLIBS = qtlibs
+
+@conf
+def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static):
+	env = self.env
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	if force_static:
+		exts = ('.a', '.lib')
+		prefix = 'STLIB'
+	else:
+		exts = ('.so', '.lib')
+		prefix = 'LIB'
+
+	def lib_names():
+		for x in exts:
+			for k in ('', qt_ver) if Utils.is_win32 else ['']:
+				for p in ('lib', ''):
+					yield (p, name, k, x)
+
+	for tup in lib_names():
+		k = ''.join(tup)
+		path = os.path.join(qtlibs, k)
+		if os.path.exists(path):
+			if env.DEST_OS == 'win32':
+				libval = ''.join(tup[:-1])
+			else:
+				libval = name
+			env.append_unique(prefix + '_' + uselib, libval)
+			env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs)
+			env.append_unique('INCLUDES_' + uselib, qtincludes)
+			env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt' + qt_ver, 'Qt')))
+			return k
+	return False
+
+@conf
+def find_qt5_libraries(self):
+	env = self.env
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	qtincludes =  self.environ.get('QT' + qt_ver + '_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
+	force_static = self.environ.get('QT' + qt_ver + '_FORCE_STATIC')
+
+	try:
+		if self.environ.get('QT' + qt_ver + '_XCOMPILE'):
+			self.fatal('QT' + qt_ver + '_XCOMPILE Disables pkg-config detection')
+		self.check_cfg(atleast_pkgconfig_version='0.1')
+	except self.errors.ConfigurationError:
+		for i in self.qt_vars:
+			uselib = i.upper()
+			if Utils.unversioned_sys_platform() == 'darwin':
+				# Since at least qt 4.7.3 each library locates in separate directory
+				fwk = i.replace('Qt' + qt_ver, 'Qt')
+				frameworkName = fwk + '.framework'
+
+				qtDynamicLib = os.path.join(env.QTLIBS, frameworkName, fwk)
+				if os.path.exists(qtDynamicLib):
+					env.append_unique('FRAMEWORK_' + uselib, fwk)
+					env.append_unique('FRAMEWORKPATH_' + uselib, env.QTLIBS)
+					self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
+				else:
+					self.msg('Checking for %s' % i, False, 'YELLOW')
+				env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
+			else:
+				ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static)
+				if not force_static and not ret:
+					ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True)
+				self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW')
+	else:
+		path = '%s:%s:%s/pkgconfig:/usr/lib/qt%s/lib/pkgconfig:/opt/qt%s/lib/pkgconfig:/usr/lib/qt%s/lib:/opt/qt%s/lib' % (
+			self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS, qt_ver, qt_ver, qt_ver, qt_ver)
+		for i in self.qt_vars:
+			self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)
+
+@conf
+def simplify_qt5_libs(self):
+	"""
+	Since library paths make really long command-lines,
+	and since everything depends on qtcore, remove the qtcore ones from qtgui, etc
+	"""
+	env = self.env
+	def process_lib(vars_, coreval):
+		for d in vars_:
+			var = d.upper()
+			if var == 'QTCORE':
+				continue
+
+			value = env['LIBPATH_'+var]
+			if value:
+				core = env[coreval]
+				accu = []
+				for lib in value:
+					if lib in core:
+						continue
+					accu.append(lib)
+				env['LIBPATH_'+var] = accu
+	process_lib(self.qt_vars, 'LIBPATH_QTCORE')
+
+@conf
+def add_qt5_rpath(self):
+	"""
+	Defines rpath entries for Qt libraries
+	"""
+	env = self.env
+	if getattr(Options.options, 'want_rpath', False):
+		def process_rpath(vars_, coreval):
+			for d in vars_:
+				var = d.upper()
+				value = env['LIBPATH_' + var]
+				if value:
+					core = env[coreval]
+					accu = []
+					for lib in value:
+						if var != 'QTCORE':
+							if lib in core:
+								continue
+						accu.append('-Wl,--rpath='+lib)
+					env['RPATH_' + var] = accu
+		process_rpath(self.qt_vars, 'LIBPATH_QTCORE')
+
+@conf
+def set_qt5_libs_to_check(self):
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	if not self.qt_vars:
+		dirlst = Utils.listdir(self.env.QTLIBS)
+
+		pat = self.env.cxxshlib_PATTERN
+		if Utils.is_win32:
+			pat = pat.replace('.dll', '.lib')
+		if self.environ.get('QT' + qt_ver + '_FORCE_STATIC'):
+			pat = self.env.cxxstlib_PATTERN
+		if Utils.unversioned_sys_platform() == 'darwin':
+			pat = r"%s\.framework"
+
+		if self.want_qt6:
+			# match Qt6Name or QtName but not Qt5Name
+			mid_pattern = pat % 'Qt6?(?P<name>[^5]\\w+)'
+		else:
+			# match Qt5Name or QtName but not Qt6Name
+			mid_pattern = pat % 'Qt5?(?P<name>[^6]\\w+)'
+		re_qt = re.compile('^%s$' % mid_pattern)
+
+		for x in sorted(dirlst):
+			m = re_qt.match(x)
+			if m:
+				self.qt_vars.append("Qt%s%s" % (qt_ver, m.group('name')))
+		if not self.qt_vars:
+			self.fatal('cannot find any Qt%s library (%r)' % (qt_ver, self.env.QTLIBS))
+
+	qtextralibs = getattr(Options.options, 'qtextralibs', None)
+	if qtextralibs:
+		self.qt_vars.extend(qtextralibs.split(','))
+
+@conf
+def set_qt5_defines(self):
+	qt_ver = '6' if self.want_qt6 else '5'
+
+	if sys.platform != 'win32':
+		return
+
+	for x in self.qt_vars:
+		y=x.replace('Qt' + qt_ver, 'Qt')[2:].upper()
+		self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
+
+def options(opt):
+	"""
+	Command-line options
+	"""
+	opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
+	for i in 'qtdir qtbin qtlibs'.split():
+		opt.add_option('--'+i, type=str, default='', dest=i)
+
+	opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
+	opt.add_option('--qtextralibs', type=str, default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')
+

+ 29 - 36
sdk/waf/waflib/Tools/ruby.py

@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # encoding: utf-8
 # daniel.svensson at purplescout.se 2008
-# Thomas Nagy 2010 (ita)
+# Thomas Nagy 2016-2018 (ita)
 
 """
 Support for Ruby extensions. A C/C++ compiler is required::
@@ -23,12 +23,12 @@ Support for Ruby extensions. A C/C++ compiler is required::
 """
 
 import os
-from waflib import Task, Options, Utils
-from waflib.TaskGen import before_method, feature, after_method, Task, extension
+from waflib import Errors, Options, Task, Utils
+from waflib.TaskGen import before_method, feature, extension
 from waflib.Configure import conf
 
 @feature('rubyext')
-@before_method('apply_incpaths', 'apply_lib_vars', 'apply_bundle', 'apply_link')
+@before_method('apply_incpaths', 'process_source', 'apply_bundle', 'apply_link')
 def init_rubyext(self):
 	"""
 	Add required variables for ruby extensions
@@ -41,12 +41,12 @@ def init_rubyext(self):
 		self.uselib.append('RUBYEXT')
 
 @feature('rubyext')
-@before_method('apply_link', 'propagate_uselib')
+@before_method('apply_link', 'propagate_uselib_vars')
 def apply_ruby_so_name(self):
 	"""
 	Strip the *lib* prefix from ruby extensions
 	"""
-	self.env['cshlib_PATTERN'] = self.env['cxxshlib_PATTERN'] = self.env['rubyext_PATTERN']
+	self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.rubyext_PATTERN
 
 @conf
 def check_ruby_version(self, minver=()):
@@ -56,33 +56,26 @@ def check_ruby_version(self, minver=()):
 	The ruby binary can be overridden by ``--with-ruby-binary`` command-line option.
 	"""
 
-	if Options.options.rubybinary:
-		self.env.RUBY = Options.options.rubybinary
-	else:
-		self.find_program('ruby', var='RUBY')
-
-	ruby = self.env.RUBY
+	ruby = self.find_program('ruby', var='RUBY', value=Options.options.rubybinary)
 
 	try:
-		version = self.cmd_and_log([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
-	except Exception:
+		version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
+	except Errors.WafError:
 		self.fatal('could not determine ruby version')
 	self.env.RUBY_VERSION = version
 
 	try:
-		ver = tuple(map(int, version.split(".")))
-	except Exception:
+		ver = tuple(map(int, version.split('.')))
+	except Errors.WafError:
 		self.fatal('unsupported ruby version %r' % version)
 
 	cver = ''
 	if minver:
+		cver = '> ' + '.'.join(str(x) for x in minver)
 		if ver < minver:
 			self.fatal('ruby is too old %r' % ver)
-		cver = '.'.join([str(x) for x in minver])
-	else:
-		cver = ver
 
-	self.msg('Checking for ruby version %s' % str(minver or ''), cver)
+	self.msg('Checking for ruby version %s' % cver, version)
 
 @conf
 def check_ruby_ext_devel(self):
@@ -98,21 +91,21 @@ def check_ruby_ext_devel(self):
 	version = tuple(map(int, self.env.RUBY_VERSION.split(".")))
 
 	def read_out(cmd):
-		return Utils.to_list(self.cmd_and_log([self.env.RUBY, '-rrbconfig', '-e', cmd]))
+		return Utils.to_list(self.cmd_and_log(self.env.RUBY + ['-rrbconfig', '-e', cmd]))
 
 	def read_config(key):
-		return read_out('puts Config::CONFIG[%r]' % key)
+		return read_out('puts RbConfig::CONFIG[%r]' % key)
 
-	ruby = self.env['RUBY']
-	archdir = read_config('archdir')
-	cpppath = archdir
+	cpppath = archdir = read_config('archdir')
 
 	if version >= (1, 9, 0):
 		ruby_hdrdir = read_config('rubyhdrdir')
 		cpppath += ruby_hdrdir
+		if version >= (2, 0, 0):
+			cpppath += read_config('rubyarchhdrdir')
 		cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
 
-	self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file')
+	self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file', link_header_test=False)
 
 	self.env.LIBPATH_RUBYEXT = read_config('libdir')
 	self.env.LIBPATH_RUBYEXT += archdir
@@ -157,28 +150,28 @@ def check_ruby_module(self, module_name):
 	"""
 	self.start_msg('Ruby module %s' % module_name)
 	try:
-		self.cmd_and_log([self.env['RUBY'], '-e', 'require \'%s\';puts 1' % module_name])
-	except Exception:
+		self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name])
+	except Errors.WafError:
 		self.end_msg(False)
 		self.fatal('Could not find the ruby module %r' % module_name)
 	self.end_msg(True)
 
 @extension('.rb')
 def process(self, node):
-	tsk = self.create_task('run_ruby', node)
+	return self.create_task('run_ruby', node)
 
 class run_ruby(Task.Task):
 	"""
 	Task to run ruby files detected by file extension .rb::
-	
+
 		def options(opt):
 			opt.load('ruby')
-		
+
 		def configure(ctx):
 			ctx.check_ruby_version()
-		
+
 		def build(bld):
-			bld.env['RBFLAGS'] = '-e puts "hello world"'
+			bld.env.RBFLAGS = '-e puts "hello world"'
 			bld(source='a_ruby_file.rb')
 	"""
 	run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'
@@ -187,7 +180,7 @@ def options(opt):
 	"""
 	Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options
 	"""
-	opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
-	opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
-	opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
+	opt.add_option('--with-ruby-archdir', type=str, dest='rubyarchdir', help='Specify directory where to install arch specific files')
+	opt.add_option('--with-ruby-libdir', type=str, dest='rubylibdir', help='Specify alternate ruby library path')
+	opt.add_option('--with-ruby-binary', type=str, dest='rubybinary', help='Specify alternate ruby binary')
 

+ 29 - 39
sdk/waf/waflib/Tools/suncc.py

@@ -1,33 +1,25 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 # Ralf Habacker, 2006 (rh)
 
-import os
-from waflib import Utils
+from waflib import Errors
 from waflib.Tools import ccroot, ar
 from waflib.Configure import conf
 
 @conf
 def find_scc(conf):
 	"""
-	Detect the Sun C compiler
+	Detects the Sun C compiler
 	"""
 	v = conf.env
-	cc = None
-	if v['CC']: cc = v['CC']
-	elif 'CC' in conf.environ: cc = conf.environ['CC']
-	if not cc: cc = conf.find_program('cc', var='CC')
-	if not cc: conf.fatal('Could not find a Sun C compiler')
-	cc = conf.cmd_to_list(cc)
-
+	cc = conf.find_program('cc', var='CC')
 	try:
 		conf.cmd_and_log(cc + ['-flags'])
-	except Exception:
+	except Errors.WafError:
 		conf.fatal('%r is not a Sun compiler' % cc)
-
-	v['CC']  = cc
-	v['CC_NAME'] = 'sun'
+	v.CC_NAME = 'sun'
+	conf.get_suncc_version(cc)
 
 @conf
 def scc_common_flags(conf):
@@ -36,36 +28,34 @@ def scc_common_flags(conf):
 	"""
 	v = conf.env
 
-	v['CC_SRC_F']            = []
-	v['CC_TGT_F']            = ['-c', '-o']
+	v.CC_SRC_F            = []
+	v.CC_TGT_F            = ['-c', '-o', '']
+
+	if not v.LINK_CC:
+		v.LINK_CC = v.CC
 
-	# linker
-	if not v['LINK_CC']: v['LINK_CC'] = v['CC']
-	v['CCLNK_SRC_F']         = ''
-	v['CCLNK_TGT_F']         = ['-o']
-	v['CPPPATH_ST']          = '-I%s'
-	v['DEFINES_ST']          = '-D%s'
+	v.CCLNK_SRC_F         = ''
+	v.CCLNK_TGT_F         = ['-o', '']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
 
-	v['LIB_ST']              = '-l%s' # template for adding libs
-	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-	v['STLIB_ST']            = '-l%s'
-	v['STLIBPATH_ST']        = '-L%s'
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
 
-	v['SONAME_ST']           = '-Wl,-h,%s'
-	v['SHLIB_MARKER']        = '-Bdynamic'
-	v['STLIB_MARKER']        = '-Bstatic'
+	v.SONAME_ST           = '-Wl,-h,%s'
+	v.SHLIB_MARKER        = '-Bdynamic'
+	v.STLIB_MARKER        = '-Bstatic'
 
-	# program
-	v['cprogram_PATTERN']    = '%s'
+	v.cprogram_PATTERN    = '%s'
 
-	# shared library
-	v['CFLAGS_cshlib']       = ['-Kpic', '-DPIC']
-	v['LINKFLAGS_cshlib']    = ['-G']
-	v['cshlib_PATTERN']      = 'lib%s.so'
+	v.CFLAGS_cshlib       = ['-xcode=pic32', '-DPIC']
+	v.LINKFLAGS_cshlib    = ['-G']
+	v.cshlib_PATTERN      = 'lib%s.so'
 
-	# static lib
-	v['LINKFLAGS_cstlib']    = ['-Bstatic']
-	v['cstlib_PATTERN']      = 'lib%s.a'
+	v.LINKFLAGS_cstlib    = ['-Bstatic']
+	v.cstlib_PATTERN      = 'lib%s.a'
 
 def configure(conf):
 	conf.find_scc()

+ 29 - 40
sdk/waf/waflib/Tools/suncxx.py

@@ -1,34 +1,25 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 # Ralf Habacker, 2006 (rh)
 
-import os
-from waflib import Utils
+from waflib import Errors
 from waflib.Tools import ccroot, ar
 from waflib.Configure import conf
 
 @conf
 def find_sxx(conf):
 	"""
-	Detect the sun C++ compiler
+	Detects the sun C++ compiler
 	"""
 	v = conf.env
-	cc = None
-	if v['CXX']: cc = v['CXX']
-	elif 'CXX' in conf.environ: cc = conf.environ['CXX']
-	if not cc: cc = conf.find_program('CC', var='CXX') #studio
-	if not cc: cc = conf.find_program('c++', var='CXX')
-	if not cc: conf.fatal('Could not find a Sun C++ compiler')
-	cc = conf.cmd_to_list(cc)
-
+	cc = conf.find_program(['CC', 'c++'], var='CXX')
 	try:
 		conf.cmd_and_log(cc + ['-flags'])
-	except Exception:
+	except Errors.WafError:
 		conf.fatal('%r is not a Sun compiler' % cc)
-
-	v['CXX']  = cc
-	v['CXX_NAME'] = 'sun'
+	v.CXX_NAME = 'sun'
+	conf.get_suncc_version(cc)
 
 @conf
 def sxx_common_flags(conf):
@@ -37,36 +28,34 @@ def sxx_common_flags(conf):
 	"""
 	v = conf.env
 
-	v['CXX_SRC_F']           = []
-	v['CXX_TGT_F']           = ['-c', '-o']
+	v.CXX_SRC_F           = []
+	v.CXX_TGT_F           = ['-c', '-o', '']
+
+	if not v.LINK_CXX:
+		v.LINK_CXX = v.CXX
 
-	# linker
-	if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
-	v['CXXLNK_SRC_F']        = []
-	v['CXXLNK_TGT_F']        = ['-o']
-	v['CPPPATH_ST']          = '-I%s'
-	v['DEFINES_ST']          = '-D%s'
+	v.CXXLNK_SRC_F        = []
+	v.CXXLNK_TGT_F        = ['-o', '']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
 
-	v['LIB_ST']              = '-l%s' # template for adding libs
-	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-	v['STLIB_ST']            = '-l%s'
-	v['STLIBPATH_ST']        = '-L%s'
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
 
-	v['SONAME_ST']           = '-Wl,-h,%s'
-	v['SHLIB_MARKER']        = '-Bdynamic'
-	v['STLIB_MARKER']        = '-Bstatic'
+	v.SONAME_ST           = '-Wl,-h,%s'
+	v.SHLIB_MARKER        = '-Bdynamic'
+	v.STLIB_MARKER        = '-Bstatic'
 
-	# program
-	v['cxxprogram_PATTERN']  = '%s'
+	v.cxxprogram_PATTERN  = '%s'
 
-	# shared library
-	v['CXXFLAGS_cxxshlib']   = ['-Kpic', '-DPIC']
-	v['LINKFLAGS_cxxshlib']  = ['-G']
-	v['cxxshlib_PATTERN']    = 'lib%s.so'
+	v.CXXFLAGS_cxxshlib   = ['-xcode=pic32', '-DPIC']
+	v.LINKFLAGS_cxxshlib  = ['-G']
+	v.cxxshlib_PATTERN    = 'lib%s.so'
 
-	# static lib
-	v['LINKFLAGS_cxxstlib']  = ['-Bstatic']
-	v['cxxstlib_PATTERN']    = 'lib%s.a'
+	v.LINKFLAGS_cxxstlib  = ['-Bstatic']
+	v.cxxstlib_PATTERN    = 'lib%s.a'
 
 def configure(conf):
 	conf.find_sxx()

+ 255 - 127
sdk/waf/waflib/Tools/tex.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 
 """
 TeX/LaTeX/PDFLaTeX/XeLaTeX support
@@ -20,21 +20,27 @@ Example::
 			outs     = 'ps', # 'pdf' or 'ps pdf'
 			deps     = 'crossreferencing.lst', # to give dependencies directly
 			prompt   = 1, # 0 for the batch mode
-			)
+		)
 
-To configure with a special program use::
+Notes:
 
-	$ PDFLATEX=luatex waf configure
+- To configure with a special program, use::
+
+     $ PDFLATEX=luatex waf configure
+
+- This tool does not use the target attribute of the task generator
+  (``bld(target=...)``); the target file name is built from the source
+  base name and the output type(s)
 """
 
 import os, re
-from waflib import Utils, Task, Errors, Logs
+from waflib import Utils, Task, Errors, Logs, Node
 from waflib.TaskGen import feature, before_method
 
 re_bibunit = re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
 def bibunitscan(self):
 	"""
-	Parse the inputs and try to find the *bibunit* dependencies
+	Parses TeX inputs and try to find the *bibunit* file dependencies
 
 	:return: list of bibunit files
 	:rtype: list of :py:class:`waflib.Node.Node`
@@ -42,43 +48,55 @@ def bibunitscan(self):
 	node = self.inputs[0]
 
 	nodes = []
-	if not node: return nodes
+	if not node:
+		return nodes
 
 	code = node.read()
-
 	for match in re_bibunit.finditer(code):
 		path = match.group('file')
 		if path:
-			for k in ['', '.bib']:
+			found = None
+			for k in ('', '.bib'):
 				# add another loop for the tex include paths?
-				Logs.debug('tex: trying %s%s' % (path, k))
+				Logs.debug('tex: trying %s%s', path, k)
 				fi = node.parent.find_resource(path + k)
 				if fi:
+					found = True
 					nodes.append(fi)
-					# no break, people are crazy
-			else:
-				Logs.debug('tex: could not find %s' % path)
+					# no break
+			if not found:
+				Logs.debug('tex: could not find %s', path)
 
-	Logs.debug("tex: found the following bibunit files: %s" % nodes)
+	Logs.debug('tex: found the following bibunit files: %s', nodes)
 	return nodes
 
-exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps']
+known_tex_env_vars = ['TEXINPUTS', 'TEXFONTS', 'PKFONTS', 'TEXPKS', 'GFFONTS']
+"""Tex environment variables that are should cause rebuilds when the values change"""
+
+exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty']
 """List of typical file extensions included in latex files"""
 
 exts_tex = ['.ltx', '.tex']
 """List of typical file extensions that contain latex"""
 
-re_tex = re.compile(r'\\(?P<type>include|bibliography|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
+re_tex = re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
 """Regexp for expressions that may include latex files"""
 
 g_bibtex_re = re.compile('bibdata', re.M)
 """Regexp for bibtex files"""
 
+g_glossaries_re = re.compile('\\@newglossary', re.M)
+"""Regexp for expressions that create glossaries"""
+
 class tex(Task.Task):
 	"""
-	Compile a tex/latex file.
+	Compiles a tex/latex file.
+
+	A series of applications need to be run by setting certain environmental variables;
+	these variables are repeatedly regenerated during processing (self.env.env).
 
 	.. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex
+	   :top-classes: waflib.Tools.tex.tex
 	"""
 
 	bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
@@ -91,24 +109,32 @@ class tex(Task.Task):
 	Execute the program **makeindex**
 	"""
 
+	makeglossaries_fun, _ = Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}', shell=False)
+	makeglossaries_fun.__doc__ = """
+	Execute the program **makeglossaries**
+	"""
+
+	def make_os_env_again(self):
+		if self.generator.env.env:
+			self.env.env = dict(self.generator.env.env)
+		else:
+			self.env.env = dict(os.environ)
+
 	def exec_command(self, cmd, **kw):
 		"""
-		Override :py:meth:`waflib.Task.Task.exec_command` to execute the command without buffering (latex may prompt for inputs)
+		Executes TeX commands without buffering (latex may prompt for inputs)
 
 		:return: the return code
 		:rtype: int
 		"""
-		bld = self.generator.bld
-		try:
-			if not kw.get('cwd', None):
-				kw['cwd'] = bld.cwd
-		except AttributeError:
-			bld.cwd = kw['cwd'] = bld.variant_dir
-		return Utils.subprocess.Popen(cmd, **kw).wait()
+		if self.env.PROMPT_LATEX:
+			# capture the outputs in configuration tests
+			kw['stdout'] = kw['stderr'] = None
+		return super(tex, self).exec_command(cmd, **kw)
 
 	def scan_aux(self, node):
 		"""
-		A recursive regex-based scanner that finds included auxiliary files.
+		Recursive regex-based scanner that finds included auxiliary files.
 		"""
 		nodes = [node]
 		re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M)
@@ -119,16 +145,15 @@ class tex(Task.Task):
 				path = match.group('file')
 				found = node.parent.find_or_declare(path)
 				if found and found not in nodes:
-					Logs.debug('tex: found aux node ' + found.abspath())
+					Logs.debug('tex: found aux node %r', found)
 					nodes.append(found)
 					parse_node(found)
-
 		parse_node(node)
 		return nodes
 
 	def scan(self):
 		"""
-		A recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`
+		Recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`
 
 		Depending on your needs you might want:
 
@@ -147,22 +172,37 @@ class tex(Task.Task):
 		nodes = []
 		names = []
 		seen = []
-		if not node: return (nodes, names)
+		if not node:
+			return (nodes, names)
 
 		def parse_node(node):
 			if node in seen:
 				return
 			seen.append(node)
 			code = node.read()
-			global re_tex
 			for match in re_tex.finditer(code):
+
+				multibib = match.group('type')
+				if multibib and multibib.startswith('bibliography'):
+					multibib = multibib[len('bibliography'):]
+					if multibib.startswith('style'):
+						continue
+				else:
+					multibib = None
+
 				for path in match.group('file').split(','):
 					if path:
 						add_name = True
 						found = None
 						for k in exts_deps_tex:
-							Logs.debug('tex: trying %s%s' % (path, k))
-							found = node.parent.find_resource(path + k)
+
+							# issue 1067, scan in all texinputs folders
+							for up in self.texinputs_nodes:
+								Logs.debug('tex: trying %s%s', path, k)
+								found = up.find_resource(path + k)
+								if found:
+									break
+
 
 							for tsk in self.generator.tasks:
 								if not found or found in tsk.outputs:
@@ -174,6 +214,14 @@ class tex(Task.Task):
 									if found.name.endswith(ext):
 										parse_node(found)
 										break
+
+							# multibib stuff
+							if found and multibib and found.name.endswith('.bib'):
+								try:
+									self.multibibs.append(found)
+								except AttributeError:
+									self.multibibs = [found]
+
 							# no break, people are crazy
 						if add_name:
 							names.append(path)
@@ -182,12 +230,12 @@ class tex(Task.Task):
 		for x in nodes:
 			x.parent.get_bld().mkdir()
 
-		Logs.debug("tex: found the following : %s and names %s" % (nodes, names))
+		Logs.debug("tex: found the following : %s and names %s", nodes, names)
 		return (nodes, names)
 
 	def check_status(self, msg, retcode):
 		"""
-		Check an exit status and raise an error with a particular message
+		Checks an exit status and raise an error with a particular message
 
 		:param msg: message to display if the code is non-zero
 		:type msg: string
@@ -195,33 +243,45 @@ class tex(Task.Task):
 		:type retcode: boolean
 		"""
 		if retcode != 0:
-			raise Errors.WafError("%r command exit status %r" % (msg, retcode))
+			raise Errors.WafError('%r command exit status %r' % (msg, retcode))
+
+	def info(self, *k, **kw):
+		try:
+			info = self.generator.bld.conf.logger.info
+		except AttributeError:
+			info = Logs.info
+		info(*k, **kw)
 
 	def bibfile(self):
 		"""
-		Parse the *.aux* files to find bibfiles to process.
-		If yes, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`
+		Parses *.aux* files to find bibfiles to process.
+		If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`
 		"""
 		for aux_node in self.aux_nodes:
 			try:
 				ct = aux_node.read()
-			except (OSError, IOError):
-				Logs.error('Error reading %s: %r' % aux_node.abspath())
+			except EnvironmentError:
+				Logs.error('Error reading %s: %r', aux_node.abspath())
 				continue
 
 			if g_bibtex_re.findall(ct):
-				Logs.warn('calling bibtex')
+				self.info('calling bibtex')
 
-				self.env.env = {}
-				self.env.env.update(os.environ)
-				self.env.env.update({'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS})
+				self.make_os_env_again()
+				self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
 				self.env.SRCFILE = aux_node.name[:-4]
 				self.check_status('error when calling bibtex', self.bibtex_fun())
 
+		for node in getattr(self, 'multibibs', []):
+			self.make_os_env_again()
+			self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
+			self.env.SRCFILE = node.name[:-4]
+			self.check_status('error when calling bibtex', self.bibtex_fun())
+
 	def bibunits(self):
 		"""
-		Parse the *.aux* file to find bibunit files. If there are bibunit files,
-		execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`.
+		Parses *.aux* file to find bibunit files. If there are bibunit files,
+		runs :py:meth:`waflib.Tools.tex.tex.bibtex_fun`.
 		"""
 		try:
 			bibunits = bibunitscan(self)
@@ -229,126 +289,171 @@ class tex(Task.Task):
 			Logs.error('error bibunitscan')
 		else:
 			if bibunits:
-				fn  = ['bu' + str(i) for i in xrange(1, len(bibunits) + 1)]
+				fn  = ['bu' + str(i) for i in range(1, len(bibunits) + 1)]
 				if fn:
-					Logs.warn('calling bibtex on bibunits')
+					self.info('calling bibtex on bibunits')
 
 				for f in fn:
-					self.env.env = {'BIBINPUTS': self.TEXINPUTS, 'BSTINPUTS': self.TEXINPUTS}
+					self.env.env = {'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}
 					self.env.SRCFILE = f
 					self.check_status('error when calling bibtex', self.bibtex_fun())
 
 	def makeindex(self):
 		"""
-		Look on the filesystem if there is a *.idx* file to process. If yes, execute
-		:py:meth:`waflib.Tools.tex.tex.makeindex_fun`
+		Searches the filesystem for *.idx* files to process. If present,
+		runs :py:meth:`waflib.Tools.tex.tex.makeindex_fun`
 		"""
+		self.idx_node = self.inputs[0].change_ext('.idx')
 		try:
 			idx_path = self.idx_node.abspath()
 			os.stat(idx_path)
 		except OSError:
-			Logs.warn('index file %s absent, not calling makeindex' % idx_path)
+			self.info('index file %s absent, not calling makeindex', idx_path)
 		else:
-			Logs.warn('calling makeindex')
+			self.info('calling makeindex')
 
 			self.env.SRCFILE = self.idx_node.name
-			self.env.env = {}
+			self.make_os_env_again()
 			self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun())
 
 	def bibtopic(self):
 		"""
-		Additional .aux files from the bibtopic package
+		Lists additional .aux files from the bibtopic package
 		"""
 		p = self.inputs[0].parent.get_bld()
 		if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')):
 			self.aux_nodes += p.ant_glob('*[0-9].aux')
 
-	def run(self):
+	def makeglossaries(self):
+		"""
+		Lists additional glossaries from .aux files. If present, runs the makeglossaries program.
 		"""
-		Runs the TeX build process.
+		src_file = self.inputs[0].abspath()
+		base_file = os.path.basename(src_file)
+		base, _ = os.path.splitext(base_file)
+		for aux_node in self.aux_nodes:
+			try:
+				ct = aux_node.read()
+			except EnvironmentError:
+				Logs.error('Error reading %s: %r', aux_node.abspath())
+				continue
 
-		It may require multiple passes, depending on the usage of cross-references,
-		bibliographies, content susceptible of needing such passes.
-		The appropriate TeX compiler is called until the *.aux* files stop changing.
+			if g_glossaries_re.findall(ct):
+				if not self.env.MAKEGLOSSARIES:
+					raise Errors.WafError("The program 'makeglossaries' is missing!")
+				Logs.warn('calling makeglossaries')
+				self.env.SRCFILE = base
+				self.check_status('error when calling makeglossaries %s' % base, self.makeglossaries_fun())
+				return
 
-		Makeindex and bibtex are called if necessary.
+	def texinputs(self):
 		"""
-		env = self.env
+		Returns the list of texinput nodes as a string suitable for the TEXINPUTS environment variables
 
-		if not env['PROMPT_LATEX']:
-			env.append_value('LATEXFLAGS', '-interaction=batchmode')
-			env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
-			env.append_value('XELATEXFLAGS', '-interaction=batchmode')
+		:rtype: string
+		"""
+		return os.pathsep.join([k.abspath() for k in self.texinputs_nodes]) + os.pathsep
 
-		fun = self.texfun
+	def run(self):
+		"""
+		Runs the whole TeX build process
 
-		node = self.inputs[0]
-		srcfile = node.abspath()
+		Multiple passes are required depending on the usage of cross-references,
+		bibliographies, glossaries, indexes and additional contents
+		The appropriate TeX compiler is called until the *.aux* files stop changing.
+		"""
+		env = self.env
 
-		texinputs = self.env.TEXINPUTS or ''
-		self.TEXINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep + texinputs + os.pathsep
+		if not env.PROMPT_LATEX:
+			env.append_value('LATEXFLAGS', '-interaction=nonstopmode')
+			env.append_value('PDFLATEXFLAGS', '-interaction=nonstopmode')
+			env.append_value('XELATEXFLAGS', '-interaction=nonstopmode')
 
 		# important, set the cwd for everybody
-		self.cwd = self.inputs[0].parent.get_bld().abspath()
+		self.cwd = self.inputs[0].parent.get_bld()
+
+		self.info('first pass on %s', self.__class__.__name__)
 
-		Logs.warn('first pass on %s' % self.__class__.__name__)
+		# Hash .aux files before even calling the LaTeX compiler
+		cur_hash = self.hash_aux_nodes()
 
-		self.env.env = {}
-		self.env.env.update(os.environ)
-		self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
-		self.env.SRCFILE = srcfile
-		self.check_status('error when calling latex', fun())
+		self.call_latex()
 
-		self.aux_nodes = self.scan_aux(node.change_ext('.aux'))
-		self.idx_node = node.change_ext('.idx')
+		# Find the .aux files again since bibtex processing can require it
+		self.hash_aux_nodes()
 
 		self.bibtopic()
 		self.bibfile()
 		self.bibunits()
 		self.makeindex()
+		self.makeglossaries()
 
-		hash = ''
 		for i in range(10):
-			# prevent against infinite loops - one never knows
-
-			# watch the contents of file.aux and stop if file.aux does not change anymore
-			prev_hash = hash
-			try:
-				hashes = [Utils.h_file(x.abspath()) for x in self.aux_nodes]
-				hash = Utils.h_list(hashes)
-			except (OSError, IOError):
-				Logs.error('could not read aux.h')
-				pass
-			if hash and hash == prev_hash:
+			# There is no need to call latex again if the .aux hash value has not changed
+			prev_hash = cur_hash
+			cur_hash = self.hash_aux_nodes()
+			if not cur_hash:
+				Logs.error('No aux.h to process')
+			if cur_hash and cur_hash == prev_hash:
 				break
 
 			# run the command
-			Logs.warn('calling %s' % self.__class__.__name__)
+			self.info('calling %s', self.__class__.__name__)
+			self.call_latex()
 
-			self.env.env = {}
-			self.env.env.update(os.environ)
-			self.env.env.update({'TEXINPUTS': self.TEXINPUTS})
-			self.env.SRCFILE = srcfile
-			self.check_status('error when calling %s' % self.__class__.__name__, fun())
+	def hash_aux_nodes(self):
+		"""
+		Returns a hash of the .aux file contents
+
+		:rtype: string or bytes
+		"""
+		try:
+			self.aux_nodes
+		except AttributeError:
+			try:
+				self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux'))
+			except IOError:
+				return None
+		return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])
+
+	def call_latex(self):
+		"""
+		Runs the TeX compiler once
+		"""
+		self.make_os_env_again()
+		self.env.env.update({'TEXINPUTS': self.texinputs()})
+		self.env.SRCFILE = self.inputs[0].abspath()
+		self.check_status('error when calling latex', self.texfun())
 
 class latex(tex):
+	"Compiles LaTeX files"
 	texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
+	vars.append('TEXDEPS')
+
 class pdflatex(tex):
+	"Compiles PdfLaTeX files"
 	texfun, vars =  Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
+	vars.append('TEXDEPS')
+
 class xelatex(tex):
+	"XeLaTeX files"
 	texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
+	vars.append('TEXDEPS')
 
 class dvips(Task.Task):
+	"Converts dvi files to postscript"
 	run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
 	color   = 'BLUE'
 	after   = ['latex', 'pdflatex', 'xelatex']
 
 class dvipdf(Task.Task):
+	"Converts dvi files to pdf"
 	run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
 	color   = 'BLUE'
 	after   = ['latex', 'pdflatex', 'xelatex']
 
 class pdf2ps(Task.Task):
+	"Converts pdf files to postscript"
 	run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
 	color   = 'BLUE'
 	after   = ['latex', 'pdflatex', 'xelatex']
@@ -357,30 +462,38 @@ class pdf2ps(Task.Task):
 @before_method('process_source')
 def apply_tex(self):
 	"""
-	Create :py:class:`waflib.Tools.tex.tex` objects, and dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
+	Creates :py:class:`waflib.Tools.tex.tex` objects, and
+	dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
 	"""
-	if not getattr(self, 'type', None) in ['latex', 'pdflatex', 'xelatex']:
+	if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'):
 		self.type = 'pdflatex'
 
-	tree = self.bld
 	outs = Utils.to_list(getattr(self, 'outs', []))
 
-	# prompt for incomplete files (else the batchmode is used)
-	self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
+	# prompt for incomplete files (else the nonstopmode is used)
+	try:
+		self.generator.bld.conf
+	except AttributeError:
+		default_prompt = False
+	else:
+		default_prompt = True
+	self.env.PROMPT_LATEX = getattr(self, 'prompt', default_prompt)
 
 	deps_lst = []
 
 	if getattr(self, 'deps', None):
 		deps = self.to_list(self.deps)
-		for filename in deps:
-			n = self.path.find_resource(filename)
-			if not n:
-				self.bld.fatal('Could not find %r for %r' % (filename, self))
-			if not n in deps_lst:
-				deps_lst.append(n)
+		for dep in deps:
+			if isinstance(dep, str):
+				n = self.path.find_resource(dep)
+				if not n:
+					self.bld.fatal('Could not find %r for %r' % (dep, self))
+				if not n in deps_lst:
+					deps_lst.append(n)
+			elif isinstance(dep, Node.Node):
+				deps_lst.append(dep)
 
 	for node in self.to_nodes(self.source):
-
 		if self.type == 'latex':
 			task = self.create_task('latex', node, node.change_ext('.dvi'))
 		elif self.type == 'pdflatex':
@@ -388,44 +501,59 @@ def apply_tex(self):
 		elif self.type == 'xelatex':
 			task = self.create_task('xelatex', node, node.change_ext('.pdf'))
 
-		task.env = self.env
+		# rebuild when particular environment variables changes are detected
+		task.make_os_env_again()
+		task.env.TEXDEPS = Utils.h_list([task.env.env.get(x, '') for x in known_tex_env_vars])
 
 		# add the manual dependencies
 		if deps_lst:
-			try:
-				lst = tree.node_deps[task.uid()]
-				for n in deps_lst:
-					if not n in lst:
-						lst.append(n)
-			except KeyError:
-				tree.node_deps[task.uid()] = deps_lst
+			for n in deps_lst:
+				if not n in task.dep_nodes:
+					task.dep_nodes.append(n)
 
-		v = dict(os.environ)
-		p = node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.get_bld().abspath() + os.pathsep + v.get('TEXINPUTS', '') + os.pathsep
-		v['TEXINPUTS'] = p
+		# texinputs is a nasty beast
+		if hasattr(self, 'texinputs_nodes'):
+			task.texinputs_nodes = self.texinputs_nodes
+		else:
+			task.texinputs_nodes = [node.parent, node.parent.get_bld(), self.path, self.path.get_bld()]
+			lst = os.environ.get('TEXINPUTS', '')
+			if self.env.TEXINPUTS:
+				lst += os.pathsep + self.env.TEXINPUTS
+			if lst:
+				lst = lst.split(os.pathsep)
+			for x in lst:
+				if x:
+					if os.path.isabs(x):
+						p = self.bld.root.find_node(x)
+						if p:
+							task.texinputs_nodes.append(p)
+						else:
+							Logs.error('Invalid TEXINPUTS folder %s', x)
+					else:
+						Logs.error('Cannot resolve relative paths in TEXINPUTS %s', x)
 
 		if self.type == 'latex':
 			if 'ps' in outs:
 				tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
-				tsk.env.env = dict(v)
+				tsk.env.env = dict(os.environ)
 			if 'pdf' in outs:
 				tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
-				tsk.env.env = dict(v)
+				tsk.env.env = dict(os.environ)
 		elif self.type == 'pdflatex':
 			if 'ps' in outs:
 				self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
 	self.source = []
 
+
 def configure(self):
 	"""
-	Try to find the programs tex, latex and others. Do not raise any error if they
-	are not found.
+	Find the programs tex, latex and others without raising errors.
 	"""
 	v = self.env
-	for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
+	for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split():
 		try:
 			self.find_program(p, var=p.upper())
 		except self.errors.ConfigurationError:
 			pass
-	v['DVIPSFLAGS'] = '-Ppdf'
+	v.DVIPSFLAGS = '-Ppdf'
 

+ 104 - 81
sdk/waf/waflib/Tools/vala.py

@@ -8,14 +8,14 @@ At this point, vala is still unstable, so do not expect
 this tool to be too stable either (apis, etc)
 """
 
-import os.path, shutil, re
-from waflib import Context, Task, Utils, Logs, Options, Errors
+import re
+from waflib import Build, Context, Errors, Logs, Node, Options, Task, Utils
 from waflib.TaskGen import extension, taskgen_method
 from waflib.Configure import conf
 
 class valac(Task.Task):
 	"""
-	Task to compile vala files.
+	Compiles vala files
 	"""
 	#run_str = "${VALAC} ${VALAFLAGS}" # ideally
 	#vars = ['VALAC_VERSION']
@@ -23,32 +23,34 @@ class valac(Task.Task):
 	ext_out = ['.h']
 
 	def run(self):
-		cmd = [self.env['VALAC']] + self.env['VALAFLAGS']
-		cmd.extend([a.abspath() for a in self.inputs])
-		ret = self.exec_command(cmd, cwd=self.outputs[0].parent.abspath())
+		cmd = self.env.VALAC + self.env.VALAFLAGS
+		resources = getattr(self, 'vala_exclude', [])
+		cmd.extend([a.abspath() for a in self.inputs if a not in resources])
+		ret = self.exec_command(cmd, cwd=self.vala_dir_node.abspath())
 
 		if ret:
 			return ret
 
-		for x in self.outputs:
-			if id(x.parent) != id(self.outputs[0].parent):
-				shutil.move(self.outputs[0].parent.abspath() + os.sep + x.name, x.abspath())
-
 		if self.generator.dump_deps_node:
 			self.generator.dump_deps_node.write('\n'.join(self.generator.packages))
 
 		return ret
 
-valac = Task.update_outputs(valac) # no decorators for python2 classes
-
 @taskgen_method
 def init_vala_task(self):
+	"""
+	Initializes the vala task with the relevant data (acts as a constructor)
+	"""
 	self.profile = getattr(self, 'profile', 'gobject')
 
+	self.packages = packages = Utils.to_list(getattr(self, 'packages', []))
+	self.use = Utils.to_list(getattr(self, 'use', []))
+	if packages and not self.use:
+		self.use = packages[:] # copy
+
 	if self.profile == 'gobject':
-		self.uselib = Utils.to_list(getattr(self, 'uselib', []))
-		if not 'GOBJECT' in self.uselib:
-			self.uselib.append('GOBJECT')
+		if not 'GOBJECT' in self.use:
+			self.use.append('GOBJECT')
 
 	def addflags(flags):
 		self.env.append_value('VALAFLAGS', flags)
@@ -56,32 +58,46 @@ def init_vala_task(self):
 	if self.profile:
 		addflags('--profile=%s' % self.profile)
 
-	if hasattr(self, 'threading'):
+	valatask = self.valatask
+
+	# output directory
+	if hasattr(self, 'vala_dir'):
+		if isinstance(self.vala_dir, str):
+			valatask.vala_dir_node = self.path.get_bld().make_node(self.vala_dir)
+			try:
+				valatask.vala_dir_node.mkdir()
+			except OSError:
+				raise self.bld.fatal('Cannot create the vala dir %r' % valatask.vala_dir_node)
+		else:
+			valatask.vala_dir_node = self.vala_dir
+	else:
+		valatask.vala_dir_node = self.path.get_bld()
+	addflags('--directory=%s' % valatask.vala_dir_node.abspath())
+
+	if hasattr(self, 'thread'):
 		if self.profile == 'gobject':
-			if not 'GTHREAD' in self.uselib:
-				self.uselib.append('GTHREAD')
+			if not 'GTHREAD' in self.use:
+				self.use.append('GTHREAD')
 		else:
 			#Vala doesn't have threading support for dova nor posix
-			Logs.warn("Profile %s means no threading support" % self.profile)
-			self.threading = False
+			Logs.warn('Profile %s means no threading support', self.profile)
+			self.thread = False
 
-		if self.threading:
-			addflags('--threading')
-
-	valatask = self.valatask
+		if self.thread:
+			addflags('--thread')
 
 	self.is_lib = 'cprogram' not in self.features
 	if self.is_lib:
 		addflags('--library=%s' % self.target)
 
-		h_node = self.path.find_or_declare('%s.h' % self.target)
+		h_node = valatask.vala_dir_node.find_or_declare('%s.h' % self.target)
 		valatask.outputs.append(h_node)
 		addflags('--header=%s' % h_node.name)
 
-		valatask.outputs.append(self.path.find_or_declare('%s.vapi' % self.target))
+		valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi' % self.target))
 
 		if getattr(self, 'gir', None):
-			gir_node = self.path.find_or_declare('%s.gir' % self.gir)
+			gir_node = valatask.vala_dir_node.find_or_declare('%s.gir' % self.gir)
 			addflags('--gir=%s' % gir_node.name)
 			valatask.outputs.append(gir_node)
 
@@ -89,13 +105,11 @@ def init_vala_task(self):
 	if self.vala_target_glib:
 		addflags('--target-glib=%s' % self.vala_target_glib)
 
-	addflags(['--define=%s' % x for x in getattr(self, 'vala_defines', [])])
-
+	addflags(['--define=%s' % x for x in Utils.to_list(getattr(self, 'vala_defines', []))])
 
 	packages_private = Utils.to_list(getattr(self, 'packages_private', []))
 	addflags(['--pkg=%s' % x for x in packages_private])
 
-
 	def _get_api_version():
 		api_version = '1.0'
 		if hasattr(Context.g_module, 'API_VERSION'):
@@ -107,17 +121,15 @@ def init_vala_task(self):
 		return api_version
 
 	self.includes = Utils.to_list(getattr(self, 'includes', []))
-	self.uselib = self.to_list(getattr(self, 'uselib', []))
 	valatask.install_path = getattr(self, 'install_path', '')
 
 	valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi')
-	valatask.pkg_name = getattr(self, 'pkg_name', self.env['PACKAGE'])
+	valatask.pkg_name = getattr(self, 'pkg_name', self.env.PACKAGE)
 	valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version()))
 	valatask.install_binding = getattr(self, 'install_binding', True)
 
-	self.packages = packages = Utils.to_list(getattr(self, 'packages', []))
 	self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
-	includes =  []
+	#includes =  []
 
 	if hasattr(self, 'use'):
 		local_packages = Utils.to_list(self.use)[:] # make sure to have a copy
@@ -133,20 +145,22 @@ def init_vala_task(self):
 				package_obj = self.bld.get_tgen_by_name(package)
 			except Errors.WafError:
 				continue
-			package_name = package_obj.target
-			package_node = package_obj.path
-			package_dir = package_node.path_from(self.path)
 
-			for task in package_obj.tasks:
+			# in practice the other task is already processed
+			# but this makes it explicit
+			package_obj.post()
+			package_name = package_obj.target
+			task = getattr(package_obj, 'valatask', None)
+			if task:
 				for output in task.outputs:
 					if output.name == package_name + ".vapi":
 						valatask.set_run_after(task)
 						if package_name not in packages:
 							packages.append(package_name)
-						if package_dir not in vapi_dirs:
-							vapi_dirs.append(package_dir)
-						if package_dir not in includes:
-							includes.append(package_dir)
+						if output.parent not in vapi_dirs:
+							vapi_dirs.append(output.parent)
+						if output.parent not in self.includes:
+							self.includes.append(output.parent)
 
 			if hasattr(package_obj, 'use'):
 				lst = self.to_list(package_obj.use)
@@ -156,46 +170,40 @@ def init_vala_task(self):
 	addflags(['--pkg=%s' % p for p in packages])
 
 	for vapi_dir in vapi_dirs:
-		v_node = self.path.find_dir(vapi_dir)
+		if isinstance(vapi_dir, Node.Node):
+			v_node = vapi_dir
+		else:
+			v_node = self.path.find_dir(vapi_dir)
 		if not v_node:
-			Logs.warn('Unable to locate Vala API directory: %r' % vapi_dir)
+			Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
 		else:
 			addflags('--vapidir=%s' % v_node.abspath())
-			addflags('--vapidir=%s' % v_node.get_bld().abspath())
 
 	self.dump_deps_node = None
 	if self.is_lib and self.packages:
-		self.dump_deps_node = self.path.find_or_declare('%s.deps' % self.target)
+		self.dump_deps_node = valatask.vala_dir_node.find_or_declare('%s.deps' % self.target)
 		valatask.outputs.append(self.dump_deps_node)
 
-	self.includes.append(self.bld.srcnode.abspath())
-	self.includes.append(self.bld.bldnode.abspath())
-	for include in includes:
-		try:
-			self.includes.append(self.path.find_dir(include).abspath())
-			self.includes.append(self.path.find_dir(include).get_bld().abspath())
-		except AttributeError:
-			Logs.warn("Unable to locate include directory: '%s'" % include)
-
-
 	if self.is_lib and valatask.install_binding:
 		headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
-		try:
-			self.install_vheader.source = headers_list
-		except AttributeError:
-			self.install_vheader = self.bld.install_files(valatask.header_path, headers_list, self.env)
+		if headers_list:
+			self.install_vheader = self.add_install_files(install_to=valatask.header_path, install_from=headers_list)
 
 		vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))]
-		try:
-			self.install_vapi.source = vapi_list
-		except AttributeError:
-			self.install_vapi = self.bld.install_files(valatask.vapi_path, vapi_list, self.env)
+		if vapi_list:
+			self.install_vapi = self.add_install_files(install_to=valatask.vapi_path, install_from=vapi_list)
 
 		gir_list = [o for o in valatask.outputs if o.suffix() == '.gir']
-		try:
-			self.install_gir.source = gir_list
-		except AttributeError:
-			self.install_gir = self.bld.install_files(getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), gir_list, self.env)
+		if gir_list:
+			self.install_gir = self.add_install_files(
+				install_to=getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), install_from=gir_list)
+
+	if hasattr(self, 'vala_resources'):
+		nodes = self.to_nodes(self.vala_resources)
+		valatask.vala_exclude = getattr(valatask, 'vala_exclude', []) + nodes
+		valatask.inputs.extend(nodes)
+		for x in nodes:
+			addflags(['--gresources', x.abspath()])
 
 @extension('.vala', '.gs')
 def vala_file(self, node):
@@ -207,7 +215,7 @@ def vala_file(self, node):
 			bld.program(
 				packages      = 'gtk+-2.0',
 				target        = 'vala-gtk-example',
-				uselib        = 'GTK GLIB',
+				use           = 'GTK GLIB',
 				source        = 'vala-gtk-example.vala foo.vala',
 				vala_defines  = ['DEBUG'] # adds --define=<xyz> values to the command-line
 
@@ -220,7 +228,7 @@ def vala_file(self, node):
 				#install_binding = False
 
 				# profile     = 'xyz' # adds --profile=<xyz> to enable profiling
-				# threading   = True, # add --threading, except if profile is on or not on 'gobject'
+				# thread      = True, # adds --thread, except if profile is on or not on 'gobject'
 				# vala_target_glib = 'xyz' # adds --target-glib=<xyz>, can be given through the command-line option --vala-target-glib=<xyz>
 			)
 
@@ -236,10 +244,20 @@ def vala_file(self, node):
 		self.init_vala_task()
 
 	valatask.inputs.append(node)
-	c_node = node.change_ext('.c')
+	name = node.name[:node.name.rfind('.')] + '.c'
+	c_node = valatask.vala_dir_node.find_or_declare(name)
 	valatask.outputs.append(c_node)
 	self.source.append(c_node)
 
+@extension('.vapi')
+def vapi_file(self, node):
+	try:
+		valatask = self.valatask
+	except AttributeError:
+		valatask = self.valatask = self.create_task('valac')
+		self.init_vala_task()
+	valatask.inputs.append(node)
+
 @conf
 def find_valac(self, valac_name, min_version):
 	"""
@@ -253,11 +271,11 @@ def find_valac(self, valac_name, min_version):
 	"""
 	valac = self.find_program(valac_name, var='VALAC')
 	try:
-		output = self.cmd_and_log(valac + ' --version')
-	except Exception:
+		output = self.cmd_and_log(valac + ['--version'])
+	except Errors.WafError:
 		valac_version = None
 	else:
-		ver = re.search(r'\d+.\d+.\d+', output).group(0).split('.')
+		ver = re.search(r'\d+.\d+.\d+', output).group().split('.')
 		valac_version = tuple([int(x) for x in ver])
 
 	self.msg('Checking for %s version >= %r' % (valac_name, min_version),
@@ -265,7 +283,7 @@ def find_valac(self, valac_name, min_version):
 	if valac and valac_version < min_version:
 		self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version))
 
-	self.env['VALAC_VERSION'] = valac_version
+	self.env.VALAC_VERSION = valac_version
 	return valac
 
 @conf
@@ -279,6 +297,10 @@ def check_vala(self, min_version=(0,8,0), branch=None):
 	:param branch: first part of the version number, in case a snapshot is used (0, 8)
 	:type branch: tuple of int
 	"""
+	if self.env.VALA_MINVER:
+		min_version = self.env.VALA_MINVER
+	if self.env.VALA_MINVER_BRANCH:
+		branch = self.env.VALA_MINVER_BRANCH
 	if not branch:
 		branch = min_version[:2]
 	try:
@@ -291,7 +313,7 @@ def check_vala_deps(self):
 	"""
 	Load the gobject and gthread packages if they are missing.
 	"""
-	if not self.env['HAVE_GOBJECT']:
+	if not self.env.HAVE_GOBJECT:
 		pkg_args = {'package':      'gobject-2.0',
 		            'uselib_store': 'GOBJECT',
 		            'args':         '--cflags --libs'}
@@ -299,7 +321,7 @@ def check_vala_deps(self):
 			pkg_args['atleast_version'] = Options.options.vala_target_glib
 		self.check_cfg(**pkg_args)
 
-	if not self.env['HAVE_GTHREAD']:
+	if not self.env.HAVE_GTHREAD:
 		pkg_args = {'package':      'gthread-2.0',
 		            'uselib_store': 'GTHREAD',
 		            'args':         '--cflags --libs'}
@@ -312,13 +334,14 @@ def configure(self):
 	Use the following to enforce minimum vala version::
 
 		def configure(conf):
-			conf.load('vala', funs='')
-			conf.check_vala(min_version=(0,10,0))
+			conf.env.VALA_MINVER = (0, 10, 0)
+			conf.load('vala')
 	"""
 	self.load('gnu_dirs')
 	self.check_vala_deps()
 	self.check_vala()
-	self.env.VALAFLAGS = ['-C', '--quiet']
+	self.add_os_flags('VALAFLAGS')
+	self.env.append_unique('VALAFLAGS', ['-C'])
 
 def options(opt):
 	"""
@@ -326,7 +349,7 @@ def options(opt):
 	"""
 	opt.load('gnu_dirs')
 	valaopts = opt.add_option_group('Vala Compiler Options')
-	valaopts.add_option ('--vala-target-glib', default=None,
+	valaopts.add_option('--vala-target-glib', default=None,
 		dest='vala_target_glib', metavar='MAJOR.MINOR',
 		help='Target version of glib for Vala GObject code generation')
 

+ 220 - 90
sdk/waf/waflib/Tools/waf_unit_test.py

@@ -1,10 +1,10 @@
 #!/usr/bin/env python
 # encoding: utf-8
 # Carlos Rafael Giani, 2006
-# Thomas Nagy, 2010
+# Thomas Nagy, 2010-2018 (ita)
 
 """
-Unit testing system for C/C++/D providing test execution:
+Unit testing system for C/C++/D and interpreted languages providing test execution:
 
 * in parallel, by using ``waf -j``
 * partial (only the tests that have changed) or full (by using ``waf --alltests``)
@@ -31,34 +31,167 @@ the predefined callback::
 		bld(features='cxx cxxprogram test', source='main.c', target='app')
 		from waflib.Tools import waf_unit_test
 		bld.add_post_fun(waf_unit_test.summary)
+
+By passing --dump-test-scripts the build outputs corresponding python files
+(with extension _run.py) that are useful for debugging purposes.
 """
 
-import os, sys
-from waflib.TaskGen import feature, after_method
+import os, shlex, sys
+from waflib.TaskGen import feature, after_method, taskgen_method
 from waflib import Utils, Task, Logs, Options
+from waflib.Tools import ccroot
 testlock = Utils.threading.Lock()
 
+SCRIPT_TEMPLATE = """#! %(python)s
+import subprocess, sys
+cmd = %(cmd)r
+# if you want to debug with gdb:
+#cmd = ['gdb', '-args'] + cmd
+env = %(env)r
+status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str))
+sys.exit(status)
+"""
+
+@taskgen_method
+def handle_ut_cwd(self, key):
+	"""
+	Task generator method, used internally to limit code duplication.
+	This method may disappear anytime.
+	"""
+	cwd = getattr(self, key, None)
+	if cwd:
+		if isinstance(cwd, str):
+			# we want a Node instance
+			if os.path.isabs(cwd):
+				self.ut_cwd = self.bld.root.make_node(cwd)
+			else:
+				self.ut_cwd = self.path.make_node(cwd)
+
+@feature('test_scripts')
+def make_interpreted_test(self):
+	"""Create interpreted unit tests."""
+	for x in ['test_scripts_source', 'test_scripts_template']:
+		if not hasattr(self, x):
+			Logs.warn('a test_scripts taskgen i missing %s' % x)
+			return
+
+	self.ut_run, lst = Task.compile_fun(self.test_scripts_template, shell=getattr(self, 'test_scripts_shell', False))
+
+	script_nodes = self.to_nodes(self.test_scripts_source)
+	for script_node in script_nodes:
+		tsk = self.create_task('utest', [script_node])
+		tsk.vars = lst + tsk.vars
+		tsk.env['SCRIPT'] = script_node.path_from(tsk.get_cwd())
+
+	self.handle_ut_cwd('test_scripts_cwd')
+
+	env = getattr(self, 'test_scripts_env', None)
+	if env:
+		self.ut_env = env
+	else:
+		self.ut_env = dict(os.environ)
+
+	paths = getattr(self, 'test_scripts_paths', {})
+	for (k,v) in paths.items():
+		p = self.ut_env.get(k, '').split(os.pathsep)
+		if isinstance(v, str):
+			v = v.split(os.pathsep)
+		self.ut_env[k] = os.pathsep.join(p + v)
+	self.env.append_value('UT_DEPS', ['%r%r' % (key, self.ut_env[key]) for key in self.ut_env])
+
 @feature('test')
-@after_method('apply_link')
+@after_method('apply_link', 'process_use')
 def make_test(self):
 	"""Create the unit test task. There can be only one unit test task by task generator."""
-	if getattr(self, 'link_task', None):
-		self.create_task('utest', self.link_task.outputs)
+	if not getattr(self, 'link_task', None):
+		return
+
+	tsk = self.create_task('utest', self.link_task.outputs)
+	if getattr(self, 'ut_str', None):
+		self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
+		tsk.vars = tsk.vars + lst
+		self.env.append_value('UT_DEPS', self.ut_str)
 
+	self.handle_ut_cwd('ut_cwd')
+
+	if not hasattr(self, 'ut_paths'):
+		paths = []
+		for x in self.tmp_use_sorted:
+			try:
+				y = self.bld.get_tgen_by_name(x).link_task
+			except AttributeError:
+				pass
+			else:
+				if not isinstance(y, ccroot.stlink_task):
+					paths.append(y.outputs[0].parent.abspath())
+		self.ut_paths = os.pathsep.join(paths) + os.pathsep
+
+	if not hasattr(self, 'ut_env'):
+		self.ut_env = dct = dict(os.environ)
+		def add_path(var):
+			dct[var] = self.ut_paths + dct.get(var,'')
+		if Utils.is_win32:
+			add_path('PATH')
+		elif Utils.unversioned_sys_platform() == 'darwin':
+			add_path('DYLD_LIBRARY_PATH')
+			add_path('LD_LIBRARY_PATH')
+		else:
+			add_path('LD_LIBRARY_PATH')
+
+	if not hasattr(self, 'ut_cmd'):
+		self.ut_cmd = getattr(Options.options, 'testcmd', False)
+
+	self.env.append_value('UT_DEPS', str(self.ut_cmd))
+	self.env.append_value('UT_DEPS', self.ut_paths)
+	self.env.append_value('UT_DEPS', ['%r%r' % (key, self.ut_env[key]) for key in self.ut_env])
+
+@taskgen_method
+def add_test_results(self, tup):
+	"""Override and return tup[1] to interrupt the build immediately if a test does not run"""
+	Logs.debug("ut: %r", tup)
+	try:
+		self.utest_results.append(tup)
+	except AttributeError:
+		self.utest_results = [tup]
+	try:
+		self.bld.utest_results.append(tup)
+	except AttributeError:
+		self.bld.utest_results = [tup]
+
+class test_result(object):
+	def __init__(self, test_path, exit_code, out, err, task):
+		self.task = task
+		self.generator = task.generator
+		self.out = out
+		self.err = err
+		self.exit_code = exit_code
+		self.test_path = test_path
+
+	def __iter__(self):
+		yield self.test_path
+		yield self.exit_code
+		yield self.out
+		yield self.err
+
+	def __getitem__(self, idx):
+		return list(self)[idx]
+
+@Task.deep_inputs
 class utest(Task.Task):
 	"""
 	Execute a unit test
 	"""
 	color = 'PINK'
 	after = ['vnum', 'inst']
-	vars = []
+	vars = ['UT_DEPS']
+
 	def runnable_status(self):
 		"""
 		Always execute the task if `waf --alltests` was used or no
-                tests if ``waf --notests`` was used
+		tests if ``waf --notests`` was used
 		"""
-                if getattr(Options.options, 'no_tests', False):
-                        return Task.SKIP_ME
+		if getattr(Options.options, 'no_tests', False):
+			return Task.SKIP_ME
 
 		ret = super(utest, self).runnable_status()
 		if ret == Task.SKIP_ME:
@@ -66,68 +199,63 @@ class utest(Task.Task):
 				return Task.RUN_ME
 		return ret
 
-	def run(self):
+	def get_test_env(self):
 		"""
-		Execute the test. The execution is always successful, but the results
-		are stored on ``self.generator.bld.utest_results`` for postprocessing.
+		In general, tests may require any library built anywhere in the project.
+		Override this method if fewer paths are needed
 		"""
+		return self.generator.ut_env
 
-		filename = self.inputs[0].abspath()
-		self.ut_exec = getattr(self.generator, 'ut_exec', [filename])
-		if getattr(self.generator, 'ut_fun', None):
-			# FIXME waf 1.8 - add a return statement here?
-			self.generator.ut_fun(self)
-
-		try:
-			fu = getattr(self.generator.bld, 'all_test_paths')
-		except AttributeError:
-			# this operation may be performed by at most #maxjobs
-			fu = os.environ.copy()
-
-			lst = []
-			for g in self.generator.bld.groups:
-				for tg in g:
-					if getattr(tg, 'link_task', None):
-						s = tg.link_task.outputs[0].parent.abspath()
-						if s not in lst:
-							lst.append(s)
-
-			def add_path(dct, path, var):
-				dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
-
-			if Utils.is_win32:
-				add_path(fu, lst, 'PATH')
-			elif Utils.unversioned_sys_platform() == 'darwin':
-				add_path(fu, lst, 'DYLD_LIBRARY_PATH')
-				add_path(fu, lst, 'LD_LIBRARY_PATH')
-			else:
-				add_path(fu, lst, 'LD_LIBRARY_PATH')
-			self.generator.bld.all_test_paths = fu
-
-
-		cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath()
+	def post_run(self):
+		super(utest, self).post_run()
+		if getattr(Options.options, 'clear_failed_tests', False) and self.waf_unit_test_results[1]:
+			self.generator.bld.task_sigs[self.uid()] = None
 
-                testcmd = getattr(Options.options, 'testcmd', False)
-                if testcmd:
-                        self.ut_exec = (testcmd % self.ut_exec[0]).split(' ')
+	def run(self):
+		"""
+		Execute the test. The execution is always successful, and the results
+		are stored on ``self.generator.bld.utest_results`` for postprocessing.
 
-		proc = Utils.subprocess.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE)
+		Override ``add_test_results`` to interrupt the build
+		"""
+		if hasattr(self.generator, 'ut_run'):
+			return self.generator.ut_run(self)
+
+		self.ut_exec = getattr(self.generator, 'ut_exec', [self.inputs[0].abspath()])
+		ut_cmd = getattr(self.generator, 'ut_cmd', False)
+		if ut_cmd:
+			self.ut_exec = shlex.split(ut_cmd % Utils.shell_escape(self.ut_exec))
+
+		return self.exec_command(self.ut_exec)
+
+	def exec_command(self, cmd, **kw):
+		self.generator.bld.log_command(cmd, kw)
+		if getattr(Options.options, 'dump_test_scripts', False):
+			script_code = SCRIPT_TEMPLATE % {
+				'python': sys.executable,
+				'env': self.get_test_env(),
+				'cwd': self.get_cwd().abspath(),
+				'cmd': cmd
+			}
+			script_file = self.inputs[0].abspath() + '_run.py'
+			Utils.writef(script_file, script_code, encoding='utf-8')
+			os.chmod(script_file, Utils.O755)
+			if Logs.verbose > 1:
+				Logs.info('Test debug file written as %r' % script_file)
+
+		proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(),
+			stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, shell=isinstance(cmd,str))
 		(stdout, stderr) = proc.communicate()
-
-		tup = (filename, proc.returncode, stdout, stderr)
-		self.generator.utest_result = tup
-
+		self.waf_unit_test_results = tup = test_result(self.inputs[0].abspath(), proc.returncode, stdout, stderr, self)
 		testlock.acquire()
 		try:
-			bld = self.generator.bld
-			Logs.debug("ut: %r", tup)
-			try:
-				bld.utest_results.append(tup)
-			except AttributeError:
-				bld.utest_results = [tup]
+			return self.generator.add_test_results(tup)
 		finally:
 			testlock.release()
 
+	def get_cwd(self):
+		return getattr(self.generator, 'ut_cwd', self.inputs[0].parent)
+
 def summary(bld):
 	"""
 	Display an execution summary::
@@ -144,47 +272,49 @@ def summary(bld):
 		total = len(lst)
 		tfail = len([x for x in lst if x[1]])
 
-		Logs.pprint('CYAN', '  tests that pass %d/%d' % (total-tfail, total))
-		for (f, code, out, err) in lst:
-			if not code:
-				Logs.pprint('CYAN', '    %s' % f)
+		Logs.pprint('GREEN', '  tests that pass %d/%d' % (total-tfail, total))
+		for result in lst:
+			if not result.exit_code:
+				Logs.pprint('GREEN', '    %s' % result.test_path)
 
-		Logs.pprint('CYAN', '  tests that fail %d/%d' % (tfail, total))
-		for (f, code, out, err) in lst:
-			if code:
-				Logs.pprint('CYAN', '    %s' % f)
+		Logs.pprint('GREEN' if tfail == 0 else 'RED', '  tests that fail %d/%d' % (tfail, total))
+		for result in lst:
+			if result.exit_code:
+				Logs.pprint('RED', '    %s' % result.test_path)
 
 def set_exit_code(bld):
-        """
-        If any of the tests fail waf will exit with that exit code.
-        This is useful if you have an automated build system which need
-        to report on errors from the tests.
-        You may use it like this:
+	"""
+	If any of the tests fail waf will exit with that exit code.
+	This is useful if you have an automated build system which need
+	to report on errors from the tests.
+	You may use it like this:
 
 		def build(bld):
 			bld(features='cxx cxxprogram test', source='main.c', target='app')
 			from waflib.Tools import waf_unit_test
 			bld.add_post_fun(waf_unit_test.set_exit_code)
-        """
-        lst = getattr(bld, 'utest_results', [])
-        for (f, code, out, err) in lst:
-                if code:
-                        msg = []
-                        if out:
-                                msg.append('stdout:%s%s' % (os.linesep, out.decode('utf-8')))
-                        if err:
-                                msg.append('stderr:%s%s' % (os.linesep, err.decode('utf-8')))
-                        bld.fatal(os.linesep.join(msg))
+	"""
+	lst = getattr(bld, 'utest_results', [])
+	for result in lst:
+		if result.exit_code:
+			msg = []
+			if result.out:
+				msg.append('stdout:%s%s' % (os.linesep, result.out.decode('utf-8')))
+			if result.err:
+				msg.append('stderr:%s%s' % (os.linesep, result.err.decode('utf-8')))
+			bld.fatal(os.linesep.join(msg))
 
 
 def options(opt):
 	"""
 	Provide the ``--alltests``, ``--notests`` and ``--testcmd`` command-line options.
 	"""
-        opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests')
+	opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests')
 	opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')
-        opt.add_option('--testcmd', action='store', default=False,
-                       help = 'Run the unit tests using the test-cmd string'
-                              ' example "--test-cmd="valgrind --error-exitcode=1'
-                              ' %s" to run under valgrind', dest='testcmd')
+	opt.add_option('--clear-failed', action='store_true', default=False,
+		help='Force failed unit tests to run again next time', dest='clear_failed_tests')
+	opt.add_option('--testcmd', action='store', default=False, dest='testcmd',
+		help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind')
+	opt.add_option('--dump-test-scripts', action='store_true', default=False,
+		help='Create python scripts to help debug tests', dest='dump_test_scripts')
 

+ 688 - 0
sdk/waf/waflib/Tools/wafcache.py

@@ -0,0 +1,688 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2019 (ita)
+
+"""
+Filesystem-based cache system to share and re-use build artifacts
+
+Cache access operations (copy to and from) are delegated to
+independent pre-forked worker subprocesses.
+
+The following environment variables may be set:
+* WAFCACHE: several possibilities:
+  - File cache:
+    absolute path of the waf cache (~/.cache/wafcache_user,
+    where `user` represents the currently logged-in user)
+  - URL to a cache server, for example:
+    export WAFCACHE=http://localhost:8080/files/
+    in that case, GET/POST requests are made to urls of the form
+    http://localhost:8080/files/000000000/0 (cache management is delegated to the server)
+  - GCS, S3 or MINIO bucket
+    gs://my-bucket/    (uses gsutil command line tool or WAFCACHE_CMD)
+    s3://my-bucket/    (uses aws command line tool or WAFCACHE_CMD)
+    minio://my-bucket/ (uses mc command line tool or WAFCACHE_CMD)
+* WAFCACHE_CMD: bucket upload/download command, for example:
+    WAFCACHE_CMD="gsutil cp %{SRC} %{TGT}"
+  Note that the WAFCACHE bucket value is used for the source or destination
+  depending on the operation (upload or download). For example, with:
+    WAFCACHE="gs://mybucket/"
+  the following commands may be run:
+    gsutil cp build/myprogram  gs://mybucket/aa/aaaaa/1
+    gsutil cp gs://mybucket/bb/bbbbb/2 build/somefile
+* WAFCACHE_NO_PUSH: if set, disables pushing to the cache
+* WAFCACHE_VERBOSITY: if set, displays more detailed cache operations
+* WAFCACHE_STATS: if set, displays cache usage statistics on exit
+
+File cache specific options:
+  Files are copied using hard links by default; if the cache is located
+  onto another partition, the system switches to file copies instead.
+* WAFCACHE_TRIM_MAX_FOLDER: maximum amount of tasks to cache (1M)
+* WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB)
+* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try
+                                   and trim the cache (3 minutes)
+
+Upload specific options:
+* WAFCACHE_ASYNC_WORKERS: define a number of workers to upload results asynchronously
+                          this may improve build performance with many/long file uploads
+                          the default is unset (synchronous uploads)
+* WAFCACHE_ASYNC_NOWAIT: do not wait for uploads to complete (default: False)
+                         this requires asynchonous uploads to have an effect
+
+Usage::
+
+	def build(bld):
+		bld.load('wafcache')
+		...
+
+To troubleshoot::
+
+	waf clean build --zone=wafcache
+"""
+
+import atexit, base64, errno, getpass, os, re, shutil, sys, time, threading, traceback, shlex
+try:
+	import subprocess32 as subprocess
+except ImportError:
+	import subprocess
+
+base_cache = os.path.expanduser('~/.cache/')
+if not os.path.isdir(base_cache):
+	base_cache = '/tmp/'
+default_wafcache_dir = os.path.join(base_cache, 'wafcache_' + getpass.getuser())
+
+CACHE_DIR = os.environ.get('WAFCACHE', default_wafcache_dir)
+WAFCACHE_CMD = os.environ.get('WAFCACHE_CMD')
+TRIM_MAX_FOLDERS = int(os.environ.get('WAFCACHE_TRIM_MAX_FOLDER', 1000000))
+EVICT_INTERVAL_MINUTES = int(os.environ.get('WAFCACHE_EVICT_INTERVAL_MINUTES', 3))
+EVICT_MAX_BYTES = int(os.environ.get('WAFCACHE_EVICT_MAX_BYTES', 10**10))
+WAFCACHE_NO_PUSH = 1 if os.environ.get('WAFCACHE_NO_PUSH') else 0
+WAFCACHE_VERBOSITY = 1 if os.environ.get('WAFCACHE_VERBOSITY') else 0
+WAFCACHE_STATS = 1 if os.environ.get('WAFCACHE_STATS') else 0
+WAFCACHE_ASYNC_WORKERS = os.environ.get('WAFCACHE_ASYNC_WORKERS')
+WAFCACHE_ASYNC_NOWAIT = os.environ.get('WAFCACHE_ASYNC_NOWAIT')
+OK = "ok"
+
+re_waf_cmd = re.compile('(?P<src>%{SRC})|(?P<tgt>%{TGT})')
+
+try:
+	import cPickle
+except ImportError:
+	import pickle as cPickle
+
+if __name__ != '__main__':
+	from waflib import Task, Logs, Utils, Build
+
+def can_retrieve_cache(self):
+	"""
+	New method for waf Task classes
+	"""
+	if not self.outputs:
+		return False
+
+	self.cached = False
+
+	sig = self.signature()
+	ssig = Utils.to_hex(self.uid() + sig)
+
+	if WAFCACHE_STATS:
+		self.generator.bld.cache_reqs += 1
+
+	files_to = [node.abspath() for node in self.outputs]
+	proc = get_process()
+	err = cache_command(proc, ssig, [], files_to)
+	process_pool.append(proc)
+	if err.startswith(OK):
+		if WAFCACHE_VERBOSITY:
+			Logs.pprint('CYAN', '  Fetched %r from cache' % files_to)
+		else:
+			Logs.debug('wafcache: fetched %r from cache', files_to)
+		if WAFCACHE_STATS:
+			self.generator.bld.cache_hits += 1
+	else:
+		if WAFCACHE_VERBOSITY:
+			Logs.pprint('YELLOW', '  No cache entry %s' % files_to)
+		else:
+			Logs.debug('wafcache: No cache entry %s: %s', files_to, err)
+		return False
+
+	self.cached = True
+	return True
+
+def put_files_cache(self):
+	"""
+	New method for waf Task classes
+	"""
+	if WAFCACHE_NO_PUSH or getattr(self, 'cached', None) or not self.outputs:
+		return
+
+	files_from = []
+	for node in self.outputs:
+		path = node.abspath()
+		if not os.path.isfile(path):
+			return
+		files_from.append(path)
+
+	bld = self.generator.bld
+	old_sig = self.signature()
+
+	for node in self.inputs:
+		try:
+			del node.ctx.cache_sig[node]
+		except KeyError:
+			pass
+
+	delattr(self, 'cache_sig')
+	sig = self.signature()
+
+	def _async_put_files_cache(bld, ssig, files_from):
+		proc = get_process()
+		if WAFCACHE_ASYNC_WORKERS:
+			with bld.wafcache_lock:
+				if bld.wafcache_stop:
+					process_pool.append(proc)
+					return
+				bld.wafcache_procs.add(proc)
+
+		err = cache_command(proc, ssig, files_from, [])
+		process_pool.append(proc)
+		if err.startswith(OK):
+			if WAFCACHE_VERBOSITY:
+				Logs.pprint('CYAN', '  Successfully uploaded %s to cache' % files_from)
+			else:
+				Logs.debug('wafcache: Successfully uploaded %r to cache', files_from)
+			if WAFCACHE_STATS:
+				bld.cache_puts += 1
+		else:
+			if WAFCACHE_VERBOSITY:
+				Logs.pprint('RED', '  Error caching step results %s: %s' % (files_from, err))
+			else:
+				Logs.debug('wafcache: Error caching results %s: %s', files_from, err)
+
+	if old_sig == sig:
+		ssig = Utils.to_hex(self.uid() + sig)
+		if WAFCACHE_ASYNC_WORKERS:
+			fut = bld.wafcache_executor.submit(_async_put_files_cache, bld, ssig, files_from)
+			bld.wafcache_uploads.append(fut)
+		else:
+			_async_put_files_cache(bld, ssig, files_from)
+	else:
+		Logs.debug('wafcache: skipped %r upload due to late input modifications %r', self.outputs, self.inputs)
+
+	bld.task_sigs[self.uid()] = self.cache_sig
+
+def hash_env_vars(self, env, vars_lst):
+	"""
+	Reimplement BuildContext.hash_env_vars so that the resulting hash does not depend on local paths
+	"""
+	if not env.table:
+		env = env.parent
+		if not env:
+			return Utils.SIG_NIL
+
+	idx = str(id(env)) + str(vars_lst)
+	try:
+		cache = self.cache_env
+	except AttributeError:
+		cache = self.cache_env = {}
+	else:
+		try:
+			return self.cache_env[idx]
+		except KeyError:
+			pass
+
+	v = str([env[a] for a in vars_lst])
+	v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
+	m = Utils.md5()
+	m.update(v.encode())
+	ret = m.digest()
+
+	Logs.debug('envhash: %r %r', ret, v)
+
+	cache[idx] = ret
+
+	return ret
+
+def uid(self):
+	"""
+	Reimplement Task.uid() so that the signature does not depend on local paths
+	"""
+	try:
+		return self.uid_
+	except AttributeError:
+		m = Utils.md5()
+		src = self.generator.bld.srcnode
+		up = m.update
+		up(self.__class__.__name__.encode())
+		for x in self.inputs + self.outputs:
+			up(x.path_from(src).encode())
+		self.uid_ = m.digest()
+		return self.uid_
+
+
+def make_cached(cls):
+	"""
+	Enable the waf cache for a given task class
+	"""
+	if getattr(cls, 'nocache', None) or getattr(cls, 'has_cache', False):
+		return
+
+	full_name = "%s.%s" % (cls.__module__, cls.__name__)
+	if full_name in ('waflib.Tools.ccroot.vnum', 'waflib.Build.inst'):
+		return
+
+	m1 = getattr(cls, 'run', None)
+	def run(self):
+		if getattr(self, 'nocache', False):
+			return m1(self)
+		if self.can_retrieve_cache():
+			return 0
+		return m1(self)
+	cls.run = run
+
+	m2 = getattr(cls, 'post_run', None)
+	def post_run(self):
+		if getattr(self, 'nocache', False):
+			return m2(self)
+		ret = m2(self)
+		self.put_files_cache()
+		return ret
+	cls.post_run = post_run
+	cls.has_cache = True
+
+process_pool = []
+def get_process():
+	"""
+	Returns a worker process that can process waf cache commands
+	The worker process is assumed to be returned to the process pool when unused
+	"""
+	try:
+		return process_pool.pop()
+	except IndexError:
+		filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'wafcache.py'
+		cmd = [sys.executable, '-c', Utils.readf(filepath)]
+		return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0)
+
+def atexit_pool():
+	for proc in process_pool:
+		proc.kill()
+atexit.register(atexit_pool)
+
+def build(bld):
+	"""
+	Called during the build process to enable file caching
+	"""
+
+	if WAFCACHE_ASYNC_WORKERS:
+		try:
+			num_workers = int(WAFCACHE_ASYNC_WORKERS)
+		except ValueError:
+			Logs.warn('Invalid WAFCACHE_ASYNC_WORKERS specified: %r' % WAFCACHE_ASYNC_WORKERS)
+		else:
+			from concurrent.futures import ThreadPoolExecutor
+			bld.wafcache_executor = ThreadPoolExecutor(max_workers=num_workers)
+			bld.wafcache_uploads = []
+			bld.wafcache_procs = set([])
+			bld.wafcache_stop = False
+			bld.wafcache_lock = threading.Lock()
+
+		def finalize_upload_async(bld):
+			if WAFCACHE_ASYNC_NOWAIT:
+				with bld.wafcache_lock:
+					bld.wafcache_stop = True
+
+				for fut in reversed(bld.wafcache_uploads):
+					fut.cancel()
+
+				for proc in bld.wafcache_procs:
+					proc.kill()
+
+				bld.wafcache_procs.clear()
+			else:
+				Logs.pprint('CYAN', '... waiting for wafcache uploads to complete (%s uploads)' % len(bld.wafcache_uploads))
+			bld.wafcache_executor.shutdown(wait=True)
+		bld.add_post_fun(finalize_upload_async)
+
+	if WAFCACHE_STATS:
+		# Init counter for statistics and hook to print results at the end
+		bld.cache_reqs = bld.cache_hits = bld.cache_puts = 0
+
+		def printstats(bld):
+			hit_ratio = 0
+			if bld.cache_reqs > 0:
+				hit_ratio = (bld.cache_hits / bld.cache_reqs) * 100
+			Logs.pprint('CYAN', '  wafcache stats: %s requests, %s hits (ratio: %.2f%%), %s writes' %
+					 (bld.cache_reqs, bld.cache_hits, hit_ratio, bld.cache_puts) )
+		bld.add_post_fun(printstats)
+
+	if process_pool:
+		# already called once
+		return
+
+	# pre-allocation
+	processes = [get_process() for x in range(bld.jobs)]
+	process_pool.extend(processes)
+
+	Task.Task.can_retrieve_cache = can_retrieve_cache
+	Task.Task.put_files_cache = put_files_cache
+	Task.Task.uid = uid
+	Build.BuildContext.hash_env_vars = hash_env_vars
+	for x in reversed(list(Task.classes.values())):
+		make_cached(x)
+
+def cache_command(proc, sig, files_from, files_to):
+	"""
+	Create a command for cache worker processes, returns a pickled
+	base64-encoded tuple containing the task signature, a list of files to
+	cache and a list of files files to get from cache (one of the lists
+	is assumed to be empty)
+	"""
+	obj = base64.b64encode(cPickle.dumps([sig, files_from, files_to]))
+	proc.stdin.write(obj)
+	proc.stdin.write('\n'.encode())
+	proc.stdin.flush()
+	obj = proc.stdout.readline()
+	if not obj:
+		raise OSError('Preforked sub-process %r died' % proc.pid)
+	return cPickle.loads(base64.b64decode(obj))
+
+try:
+	copyfun = os.link
+except NameError:
+	copyfun = shutil.copy2
+
+def atomic_copy(orig, dest):
+	"""
+	Copy files to the cache, the operation is atomic for a given file
+	"""
+	global copyfun
+	tmp = dest + '.tmp'
+	up = os.path.dirname(dest)
+	try:
+		os.makedirs(up)
+	except OSError:
+		pass
+
+	try:
+		copyfun(orig, tmp)
+	except OSError as e:
+		if e.errno == errno.EXDEV:
+			copyfun = shutil.copy2
+			copyfun(orig, tmp)
+		else:
+			raise
+	os.rename(tmp, dest)
+
+def lru_trim():
+	"""
+	the cache folders take the form:
+	`CACHE_DIR/0b/0b180f82246d726ece37c8ccd0fb1cde2650d7bfcf122ec1f169079a3bfc0ab9`
+	they are listed in order of last access, and then removed
+	until the amount of folders is within TRIM_MAX_FOLDERS and the total space
+	taken by files is less than EVICT_MAX_BYTES
+	"""
+	lst = []
+	for up in os.listdir(CACHE_DIR):
+		if len(up) == 2:
+			sub = os.path.join(CACHE_DIR, up)
+			for hval in os.listdir(sub):
+				path = os.path.join(sub, hval)
+
+				size = 0
+				for fname in os.listdir(path):
+					try:
+						size += os.lstat(os.path.join(path, fname)).st_size
+					except OSError:
+						pass
+				lst.append((os.stat(path).st_mtime, size, path))
+
+	lst.sort(key=lambda x: x[0])
+	lst.reverse()
+
+	tot = sum(x[1] for x in lst)
+	while tot > EVICT_MAX_BYTES or len(lst) > TRIM_MAX_FOLDERS:
+		_, tmp_size, path = lst.pop()
+		tot -= tmp_size
+
+		tmp = path + '.remove'
+		try:
+			shutil.rmtree(tmp)
+		except OSError:
+			pass
+		try:
+			os.rename(path, tmp)
+		except OSError:
+			sys.stderr.write('Could not rename %r to %r\n' % (path, tmp))
+		else:
+			try:
+				shutil.rmtree(tmp)
+			except OSError:
+				sys.stderr.write('Could not remove %r\n' % tmp)
+	sys.stderr.write("Cache trimmed: %r bytes in %r folders left\n" % (tot, len(lst)))
+
+
+def lru_evict():
+	"""
+	Reduce the cache size
+	"""
+	lockfile = os.path.join(CACHE_DIR, 'all.lock')
+	try:
+		st = os.stat(lockfile)
+	except EnvironmentError as e:
+		if e.errno == errno.ENOENT:
+			with open(lockfile, 'w') as f:
+				f.write('')
+		else:
+			# any other errors such as permissions
+			raise
+
+	if st.st_mtime < time.time() - EVICT_INTERVAL_MINUTES * 60:
+		# check every EVICT_INTERVAL_MINUTES minutes if the cache is too big
+		# OCLOEXEC is unnecessary because no cleaning processes are spawned
+		fd = os.open(lockfile, os.O_RDWR | os.O_CREAT, 0o755)
+		try:
+			try:
+				import fcntl
+			except ImportError:
+				import msvcrt, ctypes, ctypes.wintypes
+				handle = msvcrt.get_osfhandle(fd)
+
+				kernel32 = ctypes.windll('kernel32', use_last_error=True)
+				DWORD = ctypes.wintypes.DWORD
+				HANDLE = ctypes.wintypes.HANDLE
+				class DUMMYSTRUCTNAME(ctypes.Structure):
+					_fields = [('Offset', ctypes.wintypes.DWORD), ('OffsetHigh', DWORD)]
+				class DUMMYUNIONNAME(ctypes.Union):
+					_fields_ = [('_dummystructname', DUMMYSTRUCTNAME), ('Pointer', ctypes.c_void_p)]
+				class OVERLAPPED(ctypes.Structure):
+					_fields_ = [('Internal', ctypes.c_void_p), ('InternalHigh', ctypes.c_void_p), ('_dummyunionname', DUMMYUNIONNAME), ('hEvent', HANDLE)]
+
+				LockFileEx = kernel32.LockFileEx
+				LockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, DWORD, POINTER(OVERLAPPED)]
+				LockFileEx.restype = BOOL
+
+				UnlockFileEx = kernel32.UnlockFileEx
+				UnlockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, POINTER(OVERLAPPED)]
+				UnlockFileEx.restype = BOOL
+
+				if LockFileEx(handle, 3, 0, 1, 0, ctypes.pointer(OVERLAPPED())):
+					try:
+						lru_trim()
+						os.utime(lockfile, None)
+					finally:
+						win32file.UnlockFileEx(handle, 0, 1, 0, ctypes.pointer(OVERLAPPED()))
+				else:
+					last_error = kernel32.GetLastError()
+					if last_error == 33:
+						if WAFCACHE_VERBOSITY:
+							sys.stderr.write('wafcache: another cleaning process is running\n')
+					else:
+						raise OSError(last_error)
+
+			else:
+				try:
+					fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+				except EnvironmentError:
+					if WAFCACHE_VERBOSITY:
+						sys.stderr.write('wafcache: another cleaning process is running\n')
+				else:
+					# now dow the actual cleanup
+					lru_trim()
+					os.utime(lockfile, None)
+		finally:
+			os.close(fd)
+
+class netcache(object):
+	def __init__(self):
+		import urllib3
+		self.http = urllib3.PoolManager()
+
+	def url_of(self, sig, i):
+		return "%s/%s/%s" % (CACHE_DIR, sig, i)
+
+	def upload(self, file_path, sig, i):
+		url = self.url_of(sig, i)
+		with open(file_path, 'rb') as f:
+			file_data = f.read()
+		r = self.http.request('POST', url, timeout=60,
+			fields={ 'file': ('%s/%s' % (sig, i), file_data), })
+		if r.status >= 400:
+			raise OSError("Invalid status %r %r" % (url, r.status))
+
+	def download(self, file_path, sig, i):
+		url = self.url_of(sig, i)
+		with self.http.request('GET', url, preload_content=False, timeout=60) as inf:
+			if inf.status >= 400:
+				raise OSError("Invalid status %r %r" % (url, inf.status))
+			with open(file_path, 'wb') as out:
+				shutil.copyfileobj(inf, out)
+
+	def copy_to_cache(self, sig, files_from, files_to):
+		try:
+			for i, x in enumerate(files_from):
+				if not os.path.islink(x):
+					self.upload(x, sig, i)
+		except Exception:
+			return traceback.format_exc()
+		return OK
+
+	def copy_from_cache(self, sig, files_from, files_to):
+		try:
+			for i, x in enumerate(files_to):
+				self.download(x, sig, i)
+		except Exception:
+			return traceback.format_exc()
+		return OK
+
+class fcache(object):
+	def __init__(self):
+		if not os.path.exists(CACHE_DIR):
+			try:
+				os.makedirs(CACHE_DIR)
+			except OSError:
+				pass
+		if not os.path.exists(CACHE_DIR):
+			raise ValueError('Could not initialize the cache directory')
+
+	def copy_to_cache(self, sig, files_from, files_to):
+		"""
+		Copy files to the cache, existing files are overwritten,
+		and the copy is atomic only for a given file, not for all files
+		that belong to a given task object
+		"""
+		try:
+			for i, x in enumerate(files_from):
+				dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
+				atomic_copy(x, dest)
+		except Exception:
+			return traceback.format_exc()
+		else:
+			# attempt trimming if caching was successful:
+			# we may have things to trim!
+			try:
+				lru_evict()
+			except Exception:
+				return traceback.format_exc()
+		return OK
+
+	def copy_from_cache(self, sig, files_from, files_to):
+		"""
+		Copy files from the cache
+		"""
+		try:
+			for i, x in enumerate(files_to):
+				orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
+				atomic_copy(orig, x)
+
+			# success! update the cache time
+			os.utime(os.path.join(CACHE_DIR, sig[:2], sig), None)
+		except Exception:
+			return traceback.format_exc()
+		return OK
+
+class bucket_cache(object):
+	def bucket_copy(self, source, target):
+		if WAFCACHE_CMD:
+			def replacer(match):
+				if match.group('src'):
+					return source
+				elif match.group('tgt'):
+					return target
+			cmd = [re_waf_cmd.sub(replacer, x) for x in shlex.split(WAFCACHE_CMD)]
+		elif CACHE_DIR.startswith('s3://'):
+			cmd = ['aws', 's3', 'cp', source, target]
+		elif CACHE_DIR.startswith('gs://'):
+			cmd = ['gsutil', 'cp', source, target]
+		else:
+			cmd = ['mc', 'cp', source, target]
+
+		proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+		out, err = proc.communicate()
+		if proc.returncode:
+			raise OSError('Error copy %r to %r using: %r (exit %r):\n  out:%s\n  err:%s' % (
+				source, target, cmd, proc.returncode, out.decode(errors='replace'), err.decode(errors='replace')))
+
+	def copy_to_cache(self, sig, files_from, files_to):
+		try:
+			for i, x in enumerate(files_from):
+				dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
+				self.bucket_copy(x, dest)
+		except Exception:
+			return traceback.format_exc()
+		return OK
+
+	def copy_from_cache(self, sig, files_from, files_to):
+		try:
+			for i, x in enumerate(files_to):
+				orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
+				self.bucket_copy(orig, x)
+		except EnvironmentError:
+			return traceback.format_exc()
+		return OK
+
+def loop(service):
+	"""
+	This function is run when this file is run as a standalone python script,
+	it assumes a parent process that will communicate the commands to it
+	as pickled-encoded tuples (one line per command)
+
+	The commands are to copy files to the cache or copy files from the
+	cache to a target destination
+	"""
+	# one operation is performed at a single time by a single process
+	# therefore stdin never has more than one line
+	txt = sys.stdin.readline().strip()
+	if not txt:
+		# parent process probably ended
+		sys.exit(1)
+	ret = OK
+
+	[sig, files_from, files_to] = cPickle.loads(base64.b64decode(txt))
+	if files_from:
+		# TODO return early when pushing files upstream
+		ret = service.copy_to_cache(sig, files_from, files_to)
+	elif files_to:
+		# the build process waits for workers to (possibly) obtain files from the cache
+		ret = service.copy_from_cache(sig, files_from, files_to)
+	else:
+		ret = "Invalid command"
+
+	obj = base64.b64encode(cPickle.dumps(ret))
+	sys.stdout.write(obj.decode())
+	sys.stdout.write('\n')
+	sys.stdout.flush()
+
+if __name__ == '__main__':
+	if CACHE_DIR.startswith('s3://') or CACHE_DIR.startswith('gs://') or CACHE_DIR.startswith('minio://'):
+		if CACHE_DIR.startswith('minio://'):
+			CACHE_DIR = CACHE_DIR[8:]   # minio doesn't need the protocol part, uses config aliases
+		service = bucket_cache()
+	elif CACHE_DIR.startswith('http'):
+		service = netcache()
+	else:
+		service = fcache()
+	while 1:
+		try:
+			loop(service)
+		except KeyboardInterrupt:
+			break
+

+ 74 - 61
sdk/waf/waflib/Tools/winres.py

@@ -4,18 +4,20 @@
 
 "Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}"
 
-import re, traceback
-from waflib import Task, Logs, Utils
+import os
+import re
+from waflib import Task, Utils
 from waflib.TaskGen import extension
 from waflib.Tools import c_preproc
+from waflib import Utils
 
 @extension('.rc')
 def rc_file(self, node):
 	"""
-	Bind the .rc extension to a winrc task
+	Binds the .rc extension to a winrc task
 	"""
 	obj_ext = '.rc.o'
-	if self.env['WINRC_TGT_F'] == '/fo':
+	if self.env.WINRC_TGT_F == '/fo':
 		obj_ext = '.res'
 	rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
 	try:
@@ -24,15 +26,22 @@ def rc_file(self, node):
 		self.compiled_tasks = [rctask]
 
 re_lines = re.compile(
-	'(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\
-	'(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',
+	r'(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\
+	r'(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',
 	re.IGNORECASE | re.MULTILINE)
 
 class rc_parser(c_preproc.c_parser):
-	def filter_comments(self, filepath):
-		code = Utils.readf(filepath)
+	"""
+	Calculates dependencies in .rc files
+	"""
+	def filter_comments(self, node):
+		"""
+		Overrides :py:meth:`waflib.Tools.c_preproc.c_parser.filter_comments`
+		"""
+		code = node.read()
 		if c_preproc.use_trigraphs:
-			for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
+			for (a, b) in c_preproc.trig_def:
+				code = code.split(a).join(b)
 		code = c_preproc.re_nl.sub('', code)
 		code = c_preproc.re_cpp.sub(c_preproc.repl, code)
 		ret = []
@@ -43,72 +52,76 @@ class rc_parser(c_preproc.c_parser):
 				ret.append(('include', m.group(5)))
 		return ret
 
-	def addlines(self, node):
-		self.currentnode_stack.append(node.parent)
-		filepath = node.abspath()
-
-		self.count_files += 1
-		if self.count_files > c_preproc.recursion_limit:
-			raise c_preproc.PreprocError("recursion limit exceeded")
-		pc = self.parse_cache
-		Logs.debug('preproc: reading file %r', filepath)
-		try:
-			lns = pc[filepath]
-		except KeyError:
-			pass
-		else:
-			self.lines.extend(lns)
-			return
-
-		try:
-			lines = self.filter_comments(filepath)
-			lines.append((c_preproc.POPFILE, ''))
-			lines.reverse()
-			pc[filepath] = lines
-			self.lines.extend(lines)
-		except IOError:
-			raise c_preproc.PreprocError("could not read the file %s" % filepath)
-		except Exception:
-			if Logs.verbose > 0:
-				Logs.error("parsing %s failed" % filepath)
-				traceback.print_exc()
-
 class winrc(Task.Task):
 	"""
-	Task for compiling resource files
+	Compiles resource files
 	"""
 	run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
 	color   = 'BLUE'
-
 	def scan(self):
 		tmp = rc_parser(self.generator.includes_nodes)
 		tmp.start(self.inputs[0], self.env)
-		nodes = tmp.nodes
-		names = tmp.names
-
-		if Logs.verbose:
-			Logs.debug('deps: deps for %s: %r; unresolved %r' % (str(self), nodes, names))
+		return (tmp.nodes, tmp.names)
+
+	def exec_command(self, cmd, **kw):
+		if self.env.WINRC_TGT_F == '/fo':
+			# Since winres include paths may contain spaces, they do not fit in
+			# response files and are best passed as environment variables
+			replace_cmd = []
+			incpaths = []
+			while cmd:
+				# filter include path flags
+				flag = cmd.pop(0)
+				if flag.upper().startswith('/I'):
+					if len(flag) == 2:
+						incpaths.append(cmd.pop(0))
+					else:
+						incpaths.append(flag[2:])
+				else:
+					replace_cmd.append(flag)
+			cmd = replace_cmd
+			if incpaths:
+				# append to existing environment variables in INCLUDE
+				env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
+				pre_includes = env.get('INCLUDE', '')
+				env['INCLUDE'] = pre_includes + os.pathsep + os.pathsep.join(incpaths)
+
+		return super(winrc, self).exec_command(cmd, **kw)
+
+	def quote_flag(self, flag):
+		if self.env.WINRC_TGT_F == '/fo':
+			# winres does not support quotes around flags in response files
+			return flag
+
+		return super(winrc, self).quote_flag(flag)
 
-		return (nodes, names)
 
 def configure(conf):
 	"""
-	Detect the programs RC or windres, depending on the C/C++ compiler in use
+	Detects the programs RC or windres, depending on the C/C++ compiler in use
 	"""
 	v = conf.env
-	v['WINRC_TGT_F'] = '-o'
-	v['WINRC_SRC_F'] = '-i'
-
-	# find rc.exe
-	if not conf.env.WINRC:
+	if not v.WINRC:
 		if v.CC_NAME == 'msvc':
-			conf.find_program('RC', var='WINRC', path_list = v['PATH'])
-			v['WINRC_TGT_F'] = '/fo'
-			v['WINRC_SRC_F'] = ''
+			if Utils.is_win32:
+				conf.find_program('RC', var='WINRC', path_list=v.PATH)
+			else:
+				llvm_env_path = conf.environ.get('LLVM_PATH')
+				llvm_path = None
+				if llvm_env_path:
+					llvm_path = llvm_env_path
+				elif 'LLVM_PATH' in v:
+					llvm_path = v['LLVM_PATH']
+
+				paths = v.PATH
+				if llvm_path:
+					paths = [llvm_path] + v.PATH
+				conf.find_program('llvm-rc', var='WINRC', path_list=paths)
+
+			v.WINRC_TGT_F = '/fo'
+			v.WINRC_SRC_F = ''
 		else:
-			conf.find_program('windres', var='WINRC', path_list = v['PATH'])
-	if not conf.env.WINRC:
-		conf.fatal('winrc was not found!')
-
-	v['WINRCFLAGS'] = []
+			conf.find_program('windres', var='WINRC', path_list=v.PATH)
+			v.WINRC_TGT_F = '-o'
+			v.WINRC_SRC_F = '-i'
 

+ 26 - 30
sdk/waf/waflib/Tools/xlc.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 # Ralf Habacker, 2006 (rh)
 # Yinon Ehrlich, 2009
 # Michael Kuhn, 2009
@@ -11,13 +11,11 @@ from waflib.Configure import conf
 @conf
 def find_xlc(conf):
 	"""
-	Detect the Aix C compiler
+	Detects the Aix C compiler
 	"""
 	cc = conf.find_program(['xlc_r', 'xlc'], var='CC')
-	cc = conf.cmd_to_list(cc)
 	conf.get_xlc_version(cc)
 	conf.env.CC_NAME = 'xlc'
-	conf.env.CC      = cc
 
 @conf
 def xlc_common_flags(conf):
@@ -26,38 +24,36 @@ def xlc_common_flags(conf):
 	"""
 	v = conf.env
 
-	v['CC_SRC_F']            = []
-	v['CC_TGT_F']            = ['-c', '-o']
+	v.CC_SRC_F            = []
+	v.CC_TGT_F            = ['-c', '-o']
 
-	# linker
-	if not v['LINK_CC']: v['LINK_CC'] = v['CC']
-	v['CCLNK_SRC_F']         = []
-	v['CCLNK_TGT_F']         = ['-o']
-	v['CPPPATH_ST']          = '-I%s'
-	v['DEFINES_ST']          = '-D%s'
+	if not v.LINK_CC:
+		v.LINK_CC = v.CC
 
-	v['LIB_ST']              = '-l%s' # template for adding libs
-	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-	v['STLIB_ST']            = '-l%s'
-	v['STLIBPATH_ST']        = '-L%s'
-	v['RPATH_ST']            = '-Wl,-rpath,%s'
+	v.CCLNK_SRC_F         = []
+	v.CCLNK_TGT_F         = ['-o']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
 
-	v['SONAME_ST']           = []
-	v['SHLIB_MARKER']        = []
-	v['STLIB_MARKER']        = []
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+	v.RPATH_ST            = '-Wl,-rpath,%s'
 
-	# program
-	v['LINKFLAGS_cprogram']  = ['-Wl,-brtl']
-	v['cprogram_PATTERN']    = '%s'
+	v.SONAME_ST           = []
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
 
-	# shared library
-	v['CFLAGS_cshlib']       = ['-fPIC']
-	v['LINKFLAGS_cshlib']    = ['-G', '-Wl,-brtl,-bexpfull']
-	v['cshlib_PATTERN']      = 'lib%s.so'
+	v.LINKFLAGS_cprogram  = ['-Wl,-brtl']
+	v.cprogram_PATTERN    = '%s'
 
-	# static lib
-	v['LINKFLAGS_cstlib']    = []
-	v['cstlib_PATTERN']      = 'lib%s.a'
+	v.CFLAGS_cshlib       = ['-fPIC']
+	v.LINKFLAGS_cshlib    = ['-G', '-Wl,-brtl,-bexpfull']
+	v.cshlib_PATTERN      = 'lib%s.so'
+
+	v.LINKFLAGS_cstlib    = []
+	v.cstlib_PATTERN      = 'lib%s.a'
 
 def configure(conf):
 	conf.find_xlc()

+ 26 - 30
sdk/waf/waflib/Tools/xlcxx.py

@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
+# Thomas Nagy, 2006-2018 (ita)
 # Ralf Habacker, 2006 (rh)
 # Yinon Ehrlich, 2009
 # Michael Kuhn, 2009
@@ -11,13 +11,11 @@ from waflib.Configure import conf
 @conf
 def find_xlcxx(conf):
 	"""
-	Detect the Aix C++ compiler
+	Detects the Aix C++ compiler
 	"""
 	cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX')
-	cxx = conf.cmd_to_list(cxx)
 	conf.get_xlc_version(cxx)
 	conf.env.CXX_NAME = 'xlc++'
-	conf.env.CXX      = cxx
 
 @conf
 def xlcxx_common_flags(conf):
@@ -26,38 +24,36 @@ def xlcxx_common_flags(conf):
 	"""
 	v = conf.env
 
-	v['CXX_SRC_F']           = []
-	v['CXX_TGT_F']           = ['-c', '-o']
+	v.CXX_SRC_F           = []
+	v.CXX_TGT_F           = ['-c', '-o']
 
-	# linker
-	if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
-	v['CXXLNK_SRC_F']        = []
-	v['CXXLNK_TGT_F']        = ['-o']
-	v['CPPPATH_ST']          = '-I%s'
-	v['DEFINES_ST']          = '-D%s'
+	if not v.LINK_CXX:
+		v.LINK_CXX = v.CXX
 
-	v['LIB_ST']              = '-l%s' # template for adding libs
-	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-	v['STLIB_ST']            = '-l%s'
-	v['STLIBPATH_ST']        = '-L%s'
-	v['RPATH_ST']            = '-Wl,-rpath,%s'
+	v.CXXLNK_SRC_F        = []
+	v.CXXLNK_TGT_F        = ['-o']
+	v.CPPPATH_ST          = '-I%s'
+	v.DEFINES_ST          = '-D%s'
 
-	v['SONAME_ST']           = []
-	v['SHLIB_MARKER']        = []
-	v['STLIB_MARKER']        = []
+	v.LIB_ST              = '-l%s' # template for adding libs
+	v.LIBPATH_ST          = '-L%s' # template for adding libpaths
+	v.STLIB_ST            = '-l%s'
+	v.STLIBPATH_ST        = '-L%s'
+	v.RPATH_ST            = '-Wl,-rpath,%s'
 
-	# program
-	v['LINKFLAGS_cxxprogram']= ['-Wl,-brtl']
-	v['cxxprogram_PATTERN']  = '%s'
+	v.SONAME_ST           = []
+	v.SHLIB_MARKER        = []
+	v.STLIB_MARKER        = []
 
-	# shared library
-	v['CXXFLAGS_cxxshlib']   = ['-fPIC']
-	v['LINKFLAGS_cxxshlib']  = ['-G', '-Wl,-brtl,-bexpfull']
-	v['cxxshlib_PATTERN']    = 'lib%s.so'
+	v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
+	v.cxxprogram_PATTERN  = '%s'
 
-	# static lib
-	v['LINKFLAGS_cxxstlib']  = []
-	v['cxxstlib_PATTERN']    = 'lib%s.a'
+	v.CXXFLAGS_cxxshlib   = ['-fPIC']
+	v.LINKFLAGS_cxxshlib  = ['-G', '-Wl,-brtl,-bexpfull']
+	v.cxxshlib_PATTERN    = 'lib%s.so'
+
+	v.LINKFLAGS_cxxstlib  = []
+	v.cxxstlib_PATTERN    = 'lib%s.a'
 
 def configure(conf):
 	conf.find_xlcxx()

File diff suppressed because it is too large
+ 436 - 298
sdk/waf/waflib/Utils.py


+ 1 - 1
sdk/waf/waflib/__init__.py

@@ -1,3 +1,3 @@
 #!/usr/bin/env python
 # encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
+# Thomas Nagy, 2005-2018 (ita)

+ 177 - 80
sdk/waf/waflib/ansiterm.py

@@ -1,9 +1,51 @@
-import sys, os
+#!/usr/bin/env python
+# encoding: utf-8
+
+"""
+Emulate a vt100 terminal in cmd.exe
+
+By wrapping sys.stdout / sys.stderr with Ansiterm,
+the vt100 escape characters will be interpreted and
+the equivalent actions will be performed with Win32
+console commands.
+
+"""
+
+import os, re, sys
+from waflib import Utils
+
+wlock = Utils.threading.Lock()
+
 try:
-	if not (sys.stderr.isatty() and sys.stdout.isatty()):
-		raise ValueError('not a tty')
+	from ctypes import Structure, WinDLL, c_short, c_ushort, c_ulong, c_int, byref, c_wchar, POINTER, c_long
+except ImportError:
 
-	from ctypes import *
+	class AnsiTerm(object):
+		def __init__(self, stream):
+			self.stream = stream
+			try:
+				self.errors = self.stream.errors
+			except AttributeError:
+				pass # python 2.5
+			self.encoding = self.stream.encoding
+
+		def write(self, txt):
+			try:
+				wlock.acquire()
+				self.stream.write(txt)
+				self.stream.flush()
+			finally:
+				wlock.release()
+
+		def fileno(self):
+			return self.stream.fileno()
+
+		def flush(self):
+			self.stream.flush()
+
+		def isatty(self):
+			return self.stream.isatty()
+else:
 
 	class COORD(Structure):
 		_fields_ = [("X", c_short), ("Y", c_short)]
@@ -12,23 +54,10 @@ try:
 		_fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
 
 	class CONSOLE_SCREEN_BUFFER_INFO(Structure):
-		_fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_short), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
+		_fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_ushort), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
 
 	class CONSOLE_CURSOR_INFO(Structure):
-		_fields_ = [('dwSize',c_ulong), ('bVisible', c_int)]
-
-	sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
-	csinfo = CONSOLE_CURSOR_INFO()
-	hconsole = windll.kernel32.GetStdHandle(-11)
-	windll.kernel32.GetConsoleScreenBufferInfo(hconsole, byref(sbinfo))
-	if sbinfo.Size.X < 9 or sbinfo.Size.Y < 9: raise ValueError('small console')
-	windll.kernel32.GetConsoleCursorInfo(hconsole, byref(csinfo))
-except Exception:
-	pass
-else:
-	import re, threading
-
-	is_vista = getattr(sys, "getwindowsversion", None) and sys.getwindowsversion()[0] >= 6
+		_fields_ = [('dwSize', c_ulong), ('bVisible', c_int)]
 
 	try:
 		_type = unicode
@@ -36,33 +65,63 @@ else:
 		_type = str
 
 	to_int = lambda number, default: number and int(number) or default
-	wlock = threading.Lock()
 
 	STD_OUTPUT_HANDLE = -11
 	STD_ERROR_HANDLE = -12
 
+	kernel32 = WinDLL('kernel32')
+	kernel32.GetStdHandle.argtypes = [c_ulong]
+	kernel32.GetStdHandle.restype = c_ulong
+	kernel32.GetConsoleScreenBufferInfo.argtypes = [c_ulong, POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+	kernel32.GetConsoleScreenBufferInfo.restype = c_long
+	kernel32.SetConsoleTextAttribute.argtypes = [c_ulong, c_ushort]
+	kernel32.SetConsoleTextAttribute.restype = c_long
+	kernel32.FillConsoleOutputCharacterW.argtypes = [c_ulong, c_wchar, c_ulong, POINTER(COORD), POINTER(c_ulong)]
+	kernel32.FillConsoleOutputCharacterW.restype = c_long
+	kernel32.FillConsoleOutputAttribute.argtypes = [c_ulong, c_ushort, c_ulong, POINTER(COORD), POINTER(c_ulong) ]
+	kernel32.FillConsoleOutputAttribute.restype = c_long
+	kernel32.SetConsoleCursorPosition.argtypes = [c_ulong, POINTER(COORD) ]
+	kernel32.SetConsoleCursorPosition.restype = c_long
+	kernel32.SetConsoleCursorInfo.argtypes = [c_ulong, POINTER(CONSOLE_CURSOR_INFO)]
+	kernel32.SetConsoleCursorInfo.restype = c_long
+
 	class AnsiTerm(object):
 		"""
 		emulate a vt100 terminal in cmd.exe
 		"""
-		def __init__(self):
-			self.encoding = sys.stdout.encoding
-			self.hconsole = windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
+		def __init__(self, s):
+			self.stream = s
+			try:
+				self.errors = s.errors
+			except AttributeError:
+				pass # python2.5
+			self.encoding = s.encoding
 			self.cursor_history = []
-			self.orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
-			self.orig_csinfo = CONSOLE_CURSOR_INFO()
-			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self.orig_sbinfo))
-			windll.kernel32.GetConsoleCursorInfo(hconsole, byref(self.orig_csinfo))
+
+			handle = (s.fileno() == 2) and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE
+			self.hconsole = kernel32.GetStdHandle(handle)
+
+			self._sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+
+			self._csinfo = CONSOLE_CURSOR_INFO()
+			kernel32.GetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
+
+			# just to double check that the console is usable
+			self._orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+			r = kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._orig_sbinfo))
+			self._isatty = r == 1
 
 		def screen_buffer_info(self):
-			sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
-			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
-			return sbinfo
+			"""
+			Updates self._sbinfo and returns it
+			"""
+			kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._sbinfo))
+			return self._sbinfo
 
 		def clear_line(self, param):
 			mode = param and int(param) or 0
 			sbinfo = self.screen_buffer_info()
-			if mode == 1: # Clear from begining of line to cursor position
+			if mode == 1: # Clear from beginning of line to cursor position
 				line_start = COORD(0, sbinfo.CursorPosition.Y)
 				line_length = sbinfo.Size.X
 			elif mode == 2: # Clear entire line
@@ -71,26 +130,26 @@ else:
 			else: # Clear from cursor position to end of line
 				line_start = sbinfo.CursorPosition
 				line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
-			chars_written = c_int()
-			windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_wchar(' '), line_length, line_start, byref(chars_written))
-			windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
+			chars_written = c_ulong()
+			kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), line_length, line_start, byref(chars_written))
+			kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
 
 		def clear_screen(self, param):
 			mode = to_int(param, 0)
 			sbinfo = self.screen_buffer_info()
-			if mode == 1: # Clear from begining of screen to cursor position
+			if mode == 1: # Clear from beginning of screen to cursor position
 				clear_start = COORD(0, 0)
 				clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
 			elif mode == 2: # Clear entire screen and return cursor to home
 				clear_start = COORD(0, 0)
 				clear_length = sbinfo.Size.X * sbinfo.Size.Y
-				windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
+				kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
 			else: # Clear from cursor position to end of screen
 				clear_start = sbinfo.CursorPosition
 				clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
-			chars_written = c_int()
-			windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_wchar(' '), clear_length, clear_start, byref(chars_written))
-			windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
+			chars_written = c_ulong()
+			kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), clear_length, clear_start, byref(chars_written))
+			kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
 
 		def push_cursor(self, param):
 			sbinfo = self.screen_buffer_info()
@@ -99,7 +158,7 @@ else:
 		def pop_cursor(self, param):
 			if self.cursor_history:
 				old_pos = self.cursor_history.pop()
-				windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
+				kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
 
 		def set_cursor(self, param):
 			y, sep, x = param.partition(';')
@@ -110,7 +169,7 @@ else:
 				min(max(0, x), sbinfo.Size.X),
 				min(max(0, y), sbinfo.Size.Y)
 			)
-			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+			kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
 
 		def set_column(self, param):
 			x = to_int(param, 1) - 1
@@ -119,7 +178,7 @@ else:
 				min(max(0, x), sbinfo.Size.X),
 				sbinfo.CursorPosition.Y
 			)
-			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+			kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
 
 		def move_cursor(self, x_offset=0, y_offset=0):
 			sbinfo = self.screen_buffer_info()
@@ -127,7 +186,7 @@ else:
 				min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
 				min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
 			)
-			windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
+			kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
 
 		def move_up(self, param):
 			self.move_cursor(y_offset = -to_int(param, 1))
@@ -160,35 +219,32 @@ else:
 
 		def set_color(self, param):
 			cols = param.split(';')
-			sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
-			windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
+			sbinfo = self.screen_buffer_info()
 			attr = sbinfo.Attributes
 			for c in cols:
-				if is_vista:
-					c = int(c)
-				else:
-					c = to_int(c, 0)
-				if c in range(30,38): # fgcolor
-					attr = (attr & 0xfff0) | self.rgb2bgr(c-30)
-				elif c in range(40,48): # bgcolor
-					attr = (attr & 0xff0f) | (self.rgb2bgr(c-40) << 4)
+				c = to_int(c, 0)
+				if 29 < c < 38: # fgcolor
+					attr = (attr & 0xfff0) | self.rgb2bgr(c - 30)
+				elif 39 < c < 48: # bgcolor
+					attr = (attr & 0xff0f) | (self.rgb2bgr(c - 40) << 4)
 				elif c == 0: # reset
-					attr = self.orig_sbinfo.Attributes
+					attr = self._orig_sbinfo.Attributes
 				elif c == 1: # strong
 					attr |= 0x08
 				elif c == 4: # blink not available -> bg intensity
 					attr |= 0x80
 				elif c == 7: # negative
 					attr = (attr & 0xff88) | ((attr & 0x70) >> 4) | ((attr & 0x07) << 4)
-			windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
+
+			kernel32.SetConsoleTextAttribute(self.hconsole, attr)
 
 		def show_cursor(self,param):
-			csinfo.bVisible = 1
-			windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
+			self._csinfo.bVisible = 1
+			kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
 
 		def hide_cursor(self,param):
-			csinfo.bVisible = 0
-			windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
+			self._csinfo.bVisible = 0
+			kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
 
 		ansi_command_table = {
 			'A': move_up,
@@ -209,38 +265,79 @@ else:
 			'u': pop_cursor,
 		}
 		# Match either the escape sequence or text not containing escape sequence
-		ansi_tokens = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
+		ansi_tokens = re.compile(r'(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
 		def write(self, text):
 			try:
 				wlock.acquire()
-				for param, cmd, txt in self.ansi_tokens.findall(text):
-					if cmd:
-						cmd_func = self.ansi_command_table.get(cmd)
-						if cmd_func:
-							cmd_func(self, param)
-					else:
-						self.writeconsole(txt)
+				if self._isatty:
+					for param, cmd, txt in self.ansi_tokens.findall(text):
+						if cmd:
+							cmd_func = self.ansi_command_table.get(cmd)
+							if cmd_func:
+								cmd_func(self, param)
+						else:
+							self.writeconsole(txt)
+				else:
+					# no support for colors in the console, just output the text:
+					# eclipse or msys may be able to interpret the escape sequences
+					self.stream.write(text)
 			finally:
 				wlock.release()
 
 		def writeconsole(self, txt):
-			chars_written = c_int()
-			writeconsole = windll.kernel32.WriteConsoleA
+			chars_written = c_ulong()
+			writeconsole = kernel32.WriteConsoleA
 			if isinstance(txt, _type):
-				writeconsole = windll.kernel32.WriteConsoleW
-
-			TINY_STEP = 3000
-			for x in range(0, len(txt), TINY_STEP):
-			    # According MSDN, size should NOT exceed 64 kb (issue #746)
-			    tiny = txt[x : x + TINY_STEP]
-			    writeconsole(self.hconsole, tiny, len(tiny), byref(chars_written), None)
+				writeconsole = kernel32.WriteConsoleW
+
+			# MSDN says that there is a shared buffer of 64 KB for the console
+			# writes. Attempt to not get ERROR_NOT_ENOUGH_MEMORY, see waf issue #746
+			done = 0
+			todo = len(txt)
+			chunk = 32<<10
+			while todo != 0:
+				doing = min(chunk, todo)
+				buf = txt[done:done+doing]
+				r = writeconsole(self.hconsole, buf, doing, byref(chars_written), None)
+				if r == 0:
+					chunk >>= 1
+					continue
+				done += doing
+				todo -= doing
+
+
+		def fileno(self):
+			return self.stream.fileno()
 
 		def flush(self):
 			pass
 
 		def isatty(self):
-			return True
-
-	sys.stderr = sys.stdout = AnsiTerm()
-	os.environ['TERM'] = 'vt100'
+			return self._isatty
+
+	if sys.stdout.isatty() or sys.stderr.isatty():
+		handle = sys.stdout.isatty() and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE
+		console = kernel32.GetStdHandle(handle)
+		sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
+		def get_term_cols():
+			kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo))
+			# Issue 1401 - the progress bar cannot reach the last character
+			return sbinfo.Size.X - 1
+
+# just try and see
+try:
+	import struct, fcntl, termios
+except ImportError:
+	pass
+else:
+	if (sys.stdout.isatty() or sys.stderr.isatty()) and os.environ.get('TERM', '') not in ('dumb', 'emacs'):
+		FD = sys.stdout.isatty() and sys.stdout.fileno() or sys.stderr.fileno()
+		def fun():
+			return struct.unpack("HHHH", fcntl.ioctl(FD, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)))[1]
+		try:
+			fun()
+		except Exception as e:
+			pass
+		else:
+			get_term_cols = fun
 

+ 3 - 0
sdk/waf/waflib/extras/__init__.py

@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2005-2010 (ita)

+ 173 - 0
sdk/waf/waflib/extras/batched_cc.py

@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2015 (ita)
+
+"""
+Instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
+cc -c ../file1.c ../file2.c ../file3.c
+
+Files are output on the directory where the compiler is called, and dependencies are more difficult
+to track (do not run the command on all source files if only one file changes)
+As such, we do as if the files were compiled one by one, but no command is actually run:
+replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
+signatures from each slave and finds out the command-line to run.
+
+Just import this module to start using it:
+def build(bld):
+	bld.load('batched_cc')
+
+Note that this is provided as an example, unity builds are recommended
+for best performance results (fewer tasks and fewer jobs to execute).
+See waflib/extras/unity.py.
+"""
+
+from waflib import Task, Utils
+from waflib.TaskGen import extension, feature, after_method
+from waflib.Tools import c, cxx
+
+MAX_BATCH = 50
+
+c_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
+c_fun, _ = Task.compile_fun_noshell(c_str)
+
+cxx_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
+cxx_fun, _ = Task.compile_fun_noshell(cxx_str)
+
+count = 70000
+class batch(Task.Task):
+	color = 'PINK'
+
+	after = ['c', 'cxx']
+	before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']
+
+	def uid(self):
+		return Utils.h_list([Task.Task.uid(self), self.generator.idx, self.generator.path.abspath(), self.generator.target])
+
+	def __str__(self):
+		return 'Batch compilation for %d slaves' % len(self.slaves)
+
+	def __init__(self, *k, **kw):
+		Task.Task.__init__(self, *k, **kw)
+		self.slaves = []
+		self.inputs = []
+		self.hasrun = 0
+
+		global count
+		count += 1
+		self.idx = count
+
+	def add_slave(self, slave):
+		self.slaves.append(slave)
+		self.set_run_after(slave)
+
+	def runnable_status(self):
+		for t in self.run_after:
+			if not t.hasrun:
+				return Task.ASK_LATER
+
+		for t in self.slaves:
+			#if t.executed:
+			if t.hasrun != Task.SKIPPED:
+				return Task.RUN_ME
+
+		return Task.SKIP_ME
+
+	def get_cwd(self):
+		return self.slaves[0].outputs[0].parent
+
+	def batch_incpaths(self):
+		st = self.env.CPPPATH_ST
+		return [st % node.abspath() for node in self.generator.includes_nodes]
+
+	def run(self):
+		self.outputs = []
+
+		srclst = []
+		slaves = []
+		for t in self.slaves:
+			if t.hasrun != Task.SKIPPED:
+				slaves.append(t)
+				srclst.append(t.inputs[0].abspath())
+
+		self.env.SRCLST = srclst
+
+		if self.slaves[0].__class__.__name__ == 'c':
+			ret = c_fun(self)
+		else:
+			ret = cxx_fun(self)
+
+		if ret:
+			return ret
+
+		for t in slaves:
+			t.old_post_run()
+
+def hook(cls_type):
+	def n_hook(self, node):
+
+		ext = '.obj' if self.env.CC_NAME == 'msvc' else '.o'
+		name = node.name
+		k = name.rfind('.')
+		if k >= 0:
+			basename = name[:k] + ext
+		else:
+			basename = name + ext
+
+		outdir = node.parent.get_bld().make_node('%d' % self.idx)
+		outdir.mkdir()
+		out = outdir.find_or_declare(basename)
+
+		task = self.create_task(cls_type, node, out)
+
+		try:
+			self.compiled_tasks.append(task)
+		except AttributeError:
+			self.compiled_tasks = [task]
+
+		if not getattr(self, 'masters', None):
+			self.masters = {}
+			self.allmasters = []
+
+		def fix_path(tsk):
+			if self.env.CC_NAME == 'msvc':
+				tsk.env.append_unique('CXX_TGT_F_BATCHED', '/Fo%s\\' % outdir.abspath())
+
+		if not node.parent in self.masters:
+			m = self.masters[node.parent] = self.master = self.create_task('batch')
+			fix_path(m)
+			self.allmasters.append(m)
+		else:
+			m = self.masters[node.parent]
+			if len(m.slaves) > MAX_BATCH:
+				m = self.masters[node.parent] = self.master = self.create_task('batch')
+				fix_path(m)
+				self.allmasters.append(m)
+		m.add_slave(task)
+		return task
+	return n_hook
+
+extension('.c')(hook('c'))
+extension('.cpp','.cc','.cxx','.C','.c++')(hook('cxx'))
+
+@feature('cprogram', 'cshlib', 'cstaticlib', 'cxxprogram', 'cxxshlib', 'cxxstlib')
+@after_method('apply_link')
+def link_after_masters(self):
+	if getattr(self, 'allmasters', None):
+		for m in self.allmasters:
+			self.link_task.set_run_after(m)
+
+# Modify the c and cxx task classes - in theory it would be best to
+# create subclasses and to re-map the c/c++ extensions
+for x in ('c', 'cxx'):
+	t = Task.classes[x]
+	def run(self):
+		pass
+
+	def post_run(self):
+		pass
+
+	setattr(t, 'oldrun', getattr(t, 'run', None))
+	setattr(t, 'run', run)
+	setattr(t, 'old_post_run', t.post_run)
+	setattr(t, 'post_run', post_run)
+

+ 58 - 0
sdk/waf/waflib/extras/biber.py

@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2011 (ita)
+
+"""
+Latex processing using "biber"
+"""
+
+import os
+from waflib import Task, Logs
+
+from waflib.Tools import tex as texmodule
+
+class tex(texmodule.tex):
+	biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False)
+	biber_fun.__doc__ = """
+	Execute the program **biber**
+	"""
+
+	def bibfile(self):
+		return None
+
+	def bibunits(self):
+		self.env.env = {}
+		self.env.env.update(os.environ)
+		self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
+		self.env.SRCFILE = self.aux_nodes[0].name[:-4]
+
+		if not self.env['PROMPT_LATEX']:
+			self.env.append_unique('BIBERFLAGS', '--quiet')
+
+		path = self.aux_nodes[0].abspath()[:-4] + '.bcf'
+		if os.path.isfile(path):
+			Logs.warn('calling biber')
+			self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun())
+		else:
+			super(tex, self).bibfile()
+			super(tex, self).bibunits()
+
+class latex(tex):
+	texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
+class pdflatex(tex):
+	texfun, vars =  Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
+class xelatex(tex):
+	texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
+
+def configure(self):
+	"""
+	Almost the same as in tex.py, but try to detect 'biber'
+	"""
+	v = self.env
+	for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
+		try:
+			self.find_program(p, var=p.upper())
+		except self.errors.ConfigurationError:
+			pass
+	v['DVIPSFLAGS'] = '-Ppdf'
+

+ 128 - 0
sdk/waf/waflib/extras/bjam.py

@@ -0,0 +1,128 @@
+#! /usr/bin/env python
+# per rosengren 2011
+
+from os import sep, readlink
+from waflib import Logs
+from waflib.TaskGen import feature, after_method
+from waflib.Task import Task, always_run
+
+def options(opt):
+	grp = opt.add_option_group('Bjam Options')
+	grp.add_option('--bjam_src', default=None, help='You can find it in <boost root>/tools/jam/src')
+	grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in <src>/bin.<uname>/bjam')
+	grp.add_option('--bjam_config', default=None)
+	grp.add_option('--bjam_toolset', default=None)
+
+def configure(cnf):
+	if not cnf.env.BJAM_SRC:
+		cnf.env.BJAM_SRC = cnf.options.bjam_src
+	if not cnf.env.BJAM_UNAME:
+		cnf.env.BJAM_UNAME = cnf.options.bjam_uname
+	try:
+		cnf.find_program('bjam', path_list=[
+			cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME
+		])
+	except Exception:
+		cnf.env.BJAM = None
+	if not cnf.env.BJAM_CONFIG:
+		cnf.env.BJAM_CONFIG = cnf.options.bjam_config
+	if not cnf.env.BJAM_TOOLSET:
+		cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset
+
+@feature('bjam')
+@after_method('process_rule')
+def process_bjam(self):
+	if not self.bld.env.BJAM:
+		self.create_task('bjam_creator')
+	self.create_task('bjam_build')
+	self.create_task('bjam_installer')
+	if getattr(self, 'always', False):
+		always_run(bjam_creator)
+		always_run(bjam_build)
+	always_run(bjam_installer)
+
+class bjam_creator(Task):
+	ext_out = 'bjam_exe'
+	vars=['BJAM_SRC', 'BJAM_UNAME']
+	def run(self):
+		env = self.env
+		gen = self.generator
+		bjam = gen.bld.root.find_dir(env.BJAM_SRC)
+		if not bjam:
+			Logs.error('Can not find bjam source')
+			return -1
+		bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam'
+		bjam_exe = bjam.find_resource(bjam_exe_relpath)
+		if bjam_exe:
+			env.BJAM = bjam_exe.srcpath()
+			return 0
+		bjam_cmd = ['./build.sh']
+		Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd))
+		result = self.exec_command(bjam_cmd, cwd=bjam.srcpath())
+		if not result == 0:
+			Logs.error('bjam failed')
+			return -1
+		bjam_exe = bjam.find_resource(bjam_exe_relpath)
+		if bjam_exe:
+			env.BJAM = bjam_exe.srcpath()
+			return 0
+		Logs.error('bjam failed')
+		return -1
+
+class bjam_build(Task):
+	ext_in = 'bjam_exe'
+	ext_out = 'install'
+	vars = ['BJAM_TOOLSET']
+	def run(self):
+		env = self.env
+		gen = self.generator
+		path = gen.path
+		bld = gen.bld
+		if hasattr(gen, 'root'):
+			build_root = path.find_node(gen.root)
+		else:
+			build_root = path
+		jam = bld.srcnode.find_resource(env.BJAM_CONFIG)
+		if jam:
+			Logs.debug('bjam: Using jam configuration from ' + jam.srcpath())
+			jam_rel = jam.relpath_gen(build_root)
+		else:
+			Logs.warn('No build configuration in build_config/user-config.jam. Using default')
+			jam_rel = None
+		bjam_exe = bld.srcnode.find_node(env.BJAM)
+		if not bjam_exe:
+			Logs.error('env.BJAM is not set')
+			return -1
+		bjam_exe_rel = bjam_exe.relpath_gen(build_root)
+		cmd = ([bjam_exe_rel] +
+			(['--user-config=' + jam_rel] if jam_rel else []) +
+			['--stagedir=' + path.get_bld().path_from(build_root)] +
+			['--debug-configuration'] +
+			['--with-' + lib for lib in self.generator.target] +
+			(['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) +
+			['link=' + 'shared'] +
+			['variant=' + 'release']
+		)
+		Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd))
+		ret = self.exec_command(cmd, cwd=build_root.srcpath())
+		if ret != 0:
+			return ret
+		self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*'))
+		return 0
+
+class bjam_installer(Task):
+	ext_in = 'install'
+	def run(self):
+		gen = self.generator
+		path = gen.path
+		for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')):
+			files = []
+			for n in path.get_bld().ant_glob(pat):
+				try:
+					t = readlink(n.srcpath())
+					gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False)
+				except OSError:
+					files.append(n)
+			gen.bld.install_files(idir, files, postpone=False)
+		return 0
+

+ 108 - 0
sdk/waf/waflib/extras/blender.py

@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Michal Proszek, 2014 (poxip)
+
+"""
+Detect the version of Blender, path
+and install the extension:
+
+	def options(opt):
+		opt.load('blender')
+	def configure(cnf):
+		cnf.load('blender')
+	def build(bld):
+		bld(name='io_mesh_raw',
+			feature='blender',
+			files=['file1.py', 'file2.py']
+		)
+If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name
+Use ./waf configure --system to set the installation directory to system path
+"""
+import os
+import re
+from getpass import getuser
+
+from waflib import Utils
+from waflib.TaskGen import feature
+from waflib.Configure import conf
+
+def options(opt):
+	opt.add_option(
+		'-s', '--system',
+		dest='directory_system',
+		default=False,
+		action='store_true',
+		help='determines installation directory (default: user)'
+	)
+
+@conf
+def find_blender(ctx):
+	'''Return version number of blender, if not exist return None'''
+	blender = ctx.find_program('blender')
+	output = ctx.cmd_and_log(blender + ['--version'])
+	m = re.search(r'Blender\s*((\d+(\.|))*)', output)
+	if not m:
+		ctx.fatal('Could not retrieve blender version')
+
+	try:
+		blender_version = m.group(1)
+	except IndexError:
+		ctx.fatal('Could not retrieve blender version')
+
+	ctx.env['BLENDER_VERSION'] = blender_version
+	return blender
+
+@conf
+def configure_paths(ctx):
+	"""Setup blender paths"""
+	# Get the username
+	user = getuser()
+	_platform = Utils.unversioned_sys_platform()
+	config_path = {'user': '', 'system': ''}
+	if _platform.startswith('linux'):
+		config_path['user'] = '/home/%s/.config/blender/' % user
+		config_path['system'] = '/usr/share/blender/'
+	elif _platform == 'darwin':
+		# MAC OS X
+		config_path['user'] = \
+			'/Users/%s/Library/Application Support/Blender/' % user
+		config_path['system'] = '/Library/Application Support/Blender/'
+	elif Utils.is_win32:
+		# Windows
+		appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
+		homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')
+
+		config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path
+		config_path['system'] = \
+			'%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
+	else:
+		ctx.fatal(
+			'Unsupported platform. '
+			'Available platforms: Linux, OSX, MS-Windows.'
+		)
+
+	blender_version = ctx.env['BLENDER_VERSION']
+
+	config_path['user'] += blender_version + '/'
+	config_path['system'] += blender_version + '/'
+
+	ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
+	if ctx.options.directory_system:
+		ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']
+
+	ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
+		ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
+	)
+	Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])
+
+def configure(ctx):
+	ctx.find_blender()
+	ctx.configure_paths()
+
+@feature('blender_list')
+def blender(self):
+	# Two ways to install a blender extension: as a module or just .py files
+	dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
+	Utils.check_dir(dest_dir)
+	self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.'))
+

+ 526 - 0
sdk/waf/waflib/extras/boost.py

@@ -0,0 +1,526 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# partially based on boost.py written by Gernot Vormayr
+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
+# modified by Bjoern Michaelsen, 2008
+# modified by Luca Fossati, 2008
+# rewritten for waf 1.5.1, Thomas Nagy, 2008
+# rewritten for waf 1.6.2, Sylvain Rouquette, 2011
+
+'''
+
+This is an extra tool, not bundled with the default waf binary.
+To add the boost tool to the waf file:
+$ ./waf-light --tools=compat15,boost
+	or, if you have waf >= 1.6.2
+$ ./waf update --files=boost
+
+When using this tool, the wscript will look like:
+
+	def options(opt):
+		opt.load('compiler_cxx boost')
+
+	def configure(conf):
+		conf.load('compiler_cxx boost')
+		conf.check_boost(lib='system filesystem')
+
+	def build(bld):
+		bld(source='main.cpp', target='app', use='BOOST')
+
+Options are generated, in order to specify the location of boost includes/libraries.
+The `check_boost` configuration function allows to specify the used boost libraries.
+It can also provide default arguments to the --boost-mt command-line arguments.
+Everything will be packaged together in a BOOST component that you can use.
+
+When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
+ - you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
+   Errors: C4530
+ - boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
+   So before calling `conf.check_boost` you might want to disabling by adding
+		conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
+   Errors:
+ - boost might also be compiled with /MT, which links the runtime statically.
+   If you have problems with redefined symbols,
+		self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+		self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
+Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.
+
+'''
+
+import sys
+import re
+from waflib import Utils, Logs, Errors
+from waflib.Configure import conf
+from waflib.TaskGen import feature, after_method
+
+BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
+BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
+BOOST_VERSION_FILE = 'boost/version.hpp'
+BOOST_VERSION_CODE = '''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; }
+'''
+
+BOOST_ERROR_CODE = '''
+#include <boost/system/error_code.hpp>
+int main() { boost::system::error_code c; }
+'''
+
+PTHREAD_CODE = '''
+#include <pthread.h>
+static void* f(void*) { return 0; }
+int main() {
+	pthread_t th;
+	pthread_attr_t attr;
+	pthread_attr_init(&attr);
+	pthread_create(&th, &attr, &f, 0);
+	pthread_join(th, 0);
+	pthread_cleanup_push(0, 0);
+	pthread_cleanup_pop(0);
+	pthread_attr_destroy(&attr);
+}
+'''
+
+BOOST_THREAD_CODE = '''
+#include <boost/thread.hpp>
+int main() { boost::thread t; }
+'''
+
+BOOST_LOG_CODE = '''
+#include <boost/log/trivial.hpp>
+#include <boost/log/utility/setup/console.hpp>
+#include <boost/log/utility/setup/common_attributes.hpp>
+int main() {
+	using namespace boost::log;
+	add_common_attributes();
+	add_console_log(std::clog, keywords::format = "%Message%");
+	BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl;
+}
+'''
+
+# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
+PLATFORM = Utils.unversioned_sys_platform()
+detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
+detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
+detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
+BOOST_TOOLSETS = {
+	'borland':  'bcb',
+	'clang':	detect_clang,
+	'como':	 'como',
+	'cw':	   'cw',
+	'darwin':   'xgcc',
+	'edg':	  'edg',
+	'g++':	  detect_mingw,
+	'gcc':	  detect_mingw,
+	'icpc':	 detect_intel,
+	'intel':	detect_intel,
+	'kcc':	  'kcc',
+	'kylix':	'bck',
+	'mipspro':  'mp',
+	'mingw':	'mgw',
+	'msvc':	 'vc',
+	'qcc':	  'qcc',
+	'sun':	  'sw',
+	'sunc++':   'sw',
+	'tru64cxx': 'tru',
+	'vacpp':	'xlc'
+}
+
+
+def options(opt):
+	opt = opt.add_option_group('Boost Options')
+	opt.add_option('--boost-includes', type=str,
+				   default='', dest='boost_includes',
+				   help='''path to the directory where the boost includes are,
+				   e.g., /path/to/boost_1_55_0/stage/include''')
+	opt.add_option('--boost-libs', type=str,
+				   default='', dest='boost_libs',
+				   help='''path to the directory where the boost libs are,
+				   e.g., path/to/boost_1_55_0/stage/lib''')
+	opt.add_option('--boost-mt', action='store_true',
+				   default=False, dest='boost_mt',
+				   help='select multi-threaded libraries')
+	opt.add_option('--boost-abi', type=str, default='', dest='boost_abi',
+				   help='''select libraries with tags (gd for debug, static is automatically added),
+				   see doc Boost, Getting Started, chapter 6.1''')
+	opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
+				   help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
+	opt.add_option('--boost-toolset', type=str,
+				   default='', dest='boost_toolset',
+				   help='force a toolset e.g. msvc, vc90, \
+						gcc, mingw, mgw45 (default: auto)')
+	py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
+	opt.add_option('--boost-python', type=str,
+				   default=py_version, dest='boost_python',
+				   help='select the lib python with this version \
+						(default: %s)' % py_version)
+
+
+@conf
+def __boost_get_version_file(self, d):
+	if not d:
+		return None
+	dnode = self.root.find_dir(d)
+	if dnode:
+		return dnode.find_node(BOOST_VERSION_FILE)
+	return None
+
+@conf
+def boost_get_version(self, d):
+	"""silently retrieve the boost version number"""
+	node = self.__boost_get_version_file(d)
+	if node:
+		try:
+			txt = node.read()
+		except EnvironmentError:
+			Logs.error("Could not read the file %r", node.abspath())
+		else:
+			re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M)
+			m1 = re_but1.search(txt)
+			re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M)
+			m2 = re_but2.search(txt)
+			if m1 and m2:
+				return (m1.group(1), m2.group(1))
+	return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":")
+
+@conf
+def boost_get_includes(self, *k, **kw):
+	includes = k and k[0] or kw.get('includes')
+	if includes and self.__boost_get_version_file(includes):
+		return includes
+	for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES:
+		if self.__boost_get_version_file(d):
+			return d
+	if includes:
+		self.end_msg('headers not found in %s' % includes)
+		self.fatal('The configuration failed')
+	else:
+		self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
+		self.fatal('The configuration failed')
+
+
+@conf
+def boost_get_toolset(self, cc):
+	toolset = cc
+	if not cc:
+		build_platform = Utils.unversioned_sys_platform()
+		if build_platform in BOOST_TOOLSETS:
+			cc = build_platform
+		else:
+			cc = self.env.CXX_NAME
+	if cc in BOOST_TOOLSETS:
+		toolset = BOOST_TOOLSETS[cc]
+	return isinstance(toolset, str) and toolset or toolset(self.env)
+
+
+@conf
+def __boost_get_libs_path(self, *k, **kw):
+	''' return the lib path and all the files in it '''
+	if 'files' in kw:
+		return self.root.find_dir('.'), Utils.to_list(kw['files'])
+	libs = k and k[0] or kw.get('libs')
+	if libs:
+		path = self.root.find_dir(libs)
+		files = path.ant_glob('*boost_*')
+	if not libs or not files:
+		for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS:
+			if not d:
+				continue
+			path = self.root.find_dir(d)
+			if path:
+				files = path.ant_glob('*boost_*')
+				if files:
+					break
+			path = self.root.find_dir(d + '64')
+			if path:
+				files = path.ant_glob('*boost_*')
+				if files:
+					break
+	if not path:
+		if libs:
+			self.end_msg('libs not found in %s' % libs)
+			self.fatal('The configuration failed')
+		else:
+			self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
+			self.fatal('The configuration failed')
+
+	self.to_log('Found the boost path in %r with the libraries:' % path)
+	for x in files:
+		self.to_log('    %r' % x)
+	return path, files
+
+@conf
+def boost_get_libs(self, *k, **kw):
+	'''
+	return the lib path and the required libs
+	according to the parameters
+	'''
+	path, files = self.__boost_get_libs_path(**kw)
+	files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True)
+	toolset = self.boost_get_toolset(kw.get('toolset', ''))
+	toolset_pat = '(-%s[0-9]{0,3})' % toolset
+	version = '-%s' % self.env.BOOST_VERSION
+
+	def find_lib(re_lib, files):
+		for file in files:
+			if re_lib.search(file.name):
+				self.to_log('Found boost lib %s' % file)
+				return file
+		return None
+
+	# extensions from Tools.ccroot.lib_patterns
+	wo_ext = re.compile(r"\.(a|so|lib|dll|dylib)(\.[0-9\.]+)?$")
+	def format_lib_name(name):
+		if name.startswith('lib') and self.env.CC_NAME != 'msvc':
+			name = name[3:]
+		return wo_ext.sub("", name)
+
+	def match_libs(lib_names, is_static):
+		libs = []
+		lib_names = Utils.to_list(lib_names)
+		if not lib_names:
+			return libs
+		t = []
+		if kw.get('mt', False):
+			t.append('-mt')
+		if kw.get('abi'):
+			t.append('%s%s' % (is_static and '-s' or '-', kw['abi']))
+		elif is_static:
+			t.append('-s')
+		tags_pat = t and ''.join(t) or ''
+		ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN
+		ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN
+
+		for lib in lib_names:
+			if lib == 'python':
+				# for instance, with python='27',
+				# accepts '-py27', '-py2', '27', '-2.7' and '2'
+				# but will reject '-py3', '-py26', '26' and '3'
+				tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|(-{1}.{3})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'], kw['python'][1])
+			else:
+				tags = tags_pat
+			# Trying libraries, from most strict match to least one
+			for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext),
+							'boost_%s%s%s%s$' % (lib, tags, version, ext),
+							# Give up trying to find the right version
+							'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext),
+							'boost_%s%s%s$' % (lib, tags, ext),
+							'boost_%s%s$' % (lib, ext),
+							'boost_%s' % lib]:
+				self.to_log('Trying pattern %s' % pattern)
+				file = find_lib(re.compile(pattern), files)
+				if file:
+					libs.append(format_lib_name(file.name))
+					break
+			else:
+				self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
+				self.fatal('The configuration failed')
+		return libs
+
+	return  path.abspath(), match_libs(kw.get('lib'), False), match_libs(kw.get('stlib'), True)
+
+@conf
+def _check_pthread_flag(self, *k, **kw):
+	'''
+	Computes which flags should be added to CXXFLAGS and LINKFLAGS to compile in multi-threading mode
+
+	Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3,
+	boost/thread.hpp will trigger a #error if -pthread isn't used:
+	  boost/config/requires_threads.hpp:47:5: #error "Compiler threading support
+	  is not turned on. Please set the correct command line options for
+	  threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)"
+
+	Based on _BOOST_PTHREAD_FLAG(): https://github.com/tsuna/boost.m4/blob/master/build-aux/boost.m4
+    '''
+
+	var = kw.get('uselib_store', 'BOOST')
+
+	self.start_msg('Checking the flags needed to use pthreads')
+
+	# The ordering *is* (sometimes) important.  Some notes on the
+	# individual items follow:
+	# (none): in case threads are in libc; should be tried before -Kthread and
+	#       other compiler flags to prevent continual compiler warnings
+	# -lpthreads: AIX (must check this before -lpthread)
+	# -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
+	# -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
+	# -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
+	# -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads)
+	# -pthreads: Solaris/GCC
+	# -mthreads: MinGW32/GCC, Lynx/GCC
+	# -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
+	#      doesn't hurt to check since this sometimes defines pthreads too;
+	#      also defines -D_REENTRANT)
+	#      ... -mt is also the pthreads flag for HP/aCC
+	# -lpthread: GNU Linux, etc.
+	# --thread-safe: KAI C++
+	if Utils.unversioned_sys_platform() == "sunos":
+		# On Solaris (at least, for some versions), libc contains stubbed
+		# (non-functional) versions of the pthreads routines, so link-based
+		# tests will erroneously succeed.  (We need to link with -pthreads/-mt/
+		# -lpthread.)  (The stubs are missing pthread_cleanup_push, or rather
+		# a function called by this macro, so we could check for that, but
+		# who knows whether they'll stub that too in a future libc.)  So,
+		# we'll just look for -pthreads and -lpthread first:
+		boost_pthread_flags = ["-pthreads", "-lpthread", "-mt", "-pthread"]
+	else:
+		boost_pthread_flags = ["", "-lpthreads", "-Kthread", "-kthread", "-llthread", "-pthread",
+							   "-pthreads", "-mthreads", "-lpthread", "--thread-safe", "-mt"]
+
+	for boost_pthread_flag in boost_pthread_flags:
+		try:
+			self.env.stash()
+			self.env.append_value('CXXFLAGS_%s' % var, boost_pthread_flag)
+			self.env.append_value('LINKFLAGS_%s' % var, boost_pthread_flag)
+			self.check_cxx(code=PTHREAD_CODE, msg=None, use=var, execute=False)
+
+			self.end_msg(boost_pthread_flag)
+			return
+		except self.errors.ConfigurationError:
+			self.env.revert()
+	self.end_msg('None')
+
+@conf
+def check_boost(self, *k, **kw):
+	"""
+	Initialize boost libraries to be used.
+
+	Keywords: you can pass the same parameters as with the command line (without "--boost-").
+	Note that the command line has the priority, and should preferably be used.
+	"""
+	if not self.env['CXX']:
+		self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
+
+	params = {
+		'lib': k and k[0] or kw.get('lib'),
+		'stlib': kw.get('stlib')
+	}
+	for key, value in self.options.__dict__.items():
+		if not key.startswith('boost_'):
+			continue
+		key = key[len('boost_'):]
+		params[key] = value and value or kw.get(key, '')
+
+	var = kw.get('uselib_store', 'BOOST')
+
+	self.find_program('dpkg-architecture', var='DPKG_ARCHITECTURE', mandatory=False)
+	if self.env.DPKG_ARCHITECTURE:
+		deb_host_multiarch = self.cmd_and_log([self.env.DPKG_ARCHITECTURE[0], '-qDEB_HOST_MULTIARCH'])
+		BOOST_LIBS.insert(0, '/usr/lib/%s' % deb_host_multiarch.strip())
+
+	self.start_msg('Checking boost includes')
+	self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
+	versions = self.boost_get_version(inc)
+	self.env.BOOST_VERSION = versions[0]
+	self.env.BOOST_VERSION_NUMBER = int(versions[1])
+	self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000,
+							   int(versions[1]) / 100 % 1000,
+							   int(versions[1]) % 100))
+	if Logs.verbose:
+		Logs.pprint('CYAN', '	path : %s' % self.env['INCLUDES_%s' % var])
+
+	if not params['lib'] and not params['stlib']:
+		return
+	if 'static' in kw or 'static' in params:
+		Logs.warn('boost: static parameter is deprecated, use stlib instead.')
+	self.start_msg('Checking boost libs')
+	path, libs, stlibs = self.boost_get_libs(**params)
+	self.env['LIBPATH_%s' % var] = [path]
+	self.env['STLIBPATH_%s' % var] = [path]
+	self.env['LIB_%s' % var] = libs
+	self.env['STLIB_%s' % var] = stlibs
+	self.end_msg('ok')
+	if Logs.verbose:
+		Logs.pprint('CYAN', '	path : %s' % path)
+		Logs.pprint('CYAN', '	shared libs : %s' % libs)
+		Logs.pprint('CYAN', '	static libs : %s' % stlibs)
+
+	def has_shlib(lib):
+		return params['lib'] and lib in params['lib']
+	def has_stlib(lib):
+		return params['stlib'] and lib in params['stlib']
+	def has_lib(lib):
+		return has_shlib(lib) or has_stlib(lib)
+	if has_lib('thread'):
+		# not inside try_link to make check visible in the output
+		self._check_pthread_flag(k, kw)
+
+	def try_link():
+		if has_lib('system'):
+			self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False)
+		if has_lib('thread'):
+			self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False)
+		if has_lib('log'):
+			if not has_lib('thread'):
+				self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
+			if has_shlib('log'):
+				self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK']
+			self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)
+
+	if params.get('linkage_autodetect', False):
+		self.start_msg("Attempting to detect boost linkage flags")
+		toolset = self.boost_get_toolset(kw.get('toolset', ''))
+		if toolset in ('vc',):
+			# disable auto-linking feature, causing error LNK1181
+			# because the code wants to be linked against
+			self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+
+			# if no dlls are present, we guess the .lib files are not stubs
+			has_dlls = False
+			for x in Utils.listdir(path):
+				if x.endswith(self.env.cxxshlib_PATTERN % ''):
+					has_dlls = True
+					break
+			if not has_dlls:
+				self.env['STLIBPATH_%s' % var] = [path]
+				self.env['STLIB_%s' % var] = libs
+				del self.env['LIB_%s' % var]
+				del self.env['LIBPATH_%s' % var]
+
+			# we attempt to play with some known-to-work CXXFLAGS combinations
+			for cxxflags in (['/MD', '/EHsc'], []):
+				self.env.stash()
+				self.env["CXXFLAGS_%s" % var] += cxxflags
+				try:
+					try_link()
+				except Errors.ConfigurationError as e:
+					self.env.revert()
+					exc = e
+				else:
+					self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
+					exc = None
+					self.env.commit()
+					break
+
+			if exc is not None:
+				self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc)
+				self.fatal('The configuration failed')
+		else:
+			self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
+			self.fatal('The configuration failed')
+	else:
+		self.start_msg('Checking for boost linkage')
+		try:
+			try_link()
+		except Errors.ConfigurationError as e:
+			self.end_msg("Could not link against boost libraries using supplied options")
+			self.fatal('The configuration failed')
+		self.end_msg('ok')
+
+
+@feature('cxx')
+@after_method('apply_link')
+def install_boost(self):
+	if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'):
+		return
+	install_boost.done = True
+	inst_to = getattr(self, 'install_path', '${BINDIR}')
+	for lib in self.env.LIB_BOOST:
+		try:
+			file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST)
+			self.add_install_files(install_to=inst_to, install_from=self.bld.root.find_node(file))
+		except:
+			continue
+install_boost.done = False

+ 24 - 0
sdk/waf/waflib/extras/build_file_timestamp.py

@@ -0,0 +1,24 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2015
+
+"""
+Force files to also depend on the timestamps of those located in the build directory. You may
+want to use this to force partial rebuilds, see playground/track_output_files/ for a working example.
+
+Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
+or one may want to hash theA files in the source directory only under certain conditions (md5_tstamp tool)
+"""
+
+import os
+from waflib import Node, Utils
+
+def get_bld_sig(self):
+	if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
+		return Utils.h_file(self.abspath())
+
+	val = Utils.h_file(self.abspath()) + str(os.stat(self.abspath()).st_mtime).encode('latin-1')
+	return val
+
+Node.Node.get_bld_sig = get_bld_sig
+

+ 110 - 0
sdk/waf/waflib/extras/build_logs.py

@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2013 (ita)
+
+"""
+A system for recording all outputs to a log file. Just add the following to your wscript file::
+
+  def init(ctx):
+    ctx.load('build_logs')
+"""
+
+import atexit, datetime, sys, os, shutil, threading
+from waflib import ansiterm, Logs, Context
+
+# adding the logs under the build/ directory will clash with the clean/ command
+try:
+	up = os.path.dirname(Context.g_module.__file__)
+except AttributeError:
+	up = '.'
+LOGFILE = os.path.join(up, 'logs', datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S_%f.log'))
+
+wlock = threading.Lock()
+class log_to_file(object):
+	def __init__(self, stream, fileobj, filename):
+		self.stream = stream
+		self.encoding = self.stream.encoding
+		self.fileobj = fileobj
+		self.filename = filename
+		self.is_valid = True
+	def replace_colors(self, data):
+		for x in Logs.colors_lst.values():
+			if isinstance(x, str):
+				data = data.replace(x, '')
+		return data
+	def write(self, data):
+		try:
+			wlock.acquire()
+			self.stream.write(data)
+			self.stream.flush()
+			if self.is_valid:
+				self.fileobj.write(self.replace_colors(data))
+		finally:
+			wlock.release()
+	def fileno(self):
+		return self.stream.fileno()
+	def flush(self):
+		self.stream.flush()
+		if self.is_valid:
+			self.fileobj.flush()
+	def isatty(self):
+		return self.stream.isatty()
+
+def init(ctx):
+	global LOGFILE
+	filename = os.path.abspath(LOGFILE)
+	try:
+		os.makedirs(os.path.dirname(os.path.abspath(filename)))
+	except OSError:
+		pass
+
+	if hasattr(os, 'O_NOINHERIT'):
+		fd = os.open(LOGFILE, os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT)
+		fileobj = os.fdopen(fd, 'w')
+	else:
+		fileobj = open(LOGFILE, 'w')
+	old_stderr = sys.stderr
+
+	# sys.stdout has already been replaced, so __stdout__ will be faster
+	#sys.stdout = log_to_file(sys.stdout, fileobj, filename)
+	#sys.stderr = log_to_file(sys.stderr, fileobj, filename)
+	def wrap(stream):
+		if stream.isatty():
+			return ansiterm.AnsiTerm(stream)
+		return stream
+	sys.stdout = log_to_file(wrap(sys.__stdout__), fileobj, filename)
+	sys.stderr = log_to_file(wrap(sys.__stderr__), fileobj, filename)
+
+	# now mess with the logging module...
+	for x in Logs.log.handlers:
+		try:
+			stream = x.stream
+		except AttributeError:
+			pass
+		else:
+			if id(stream) == id(old_stderr):
+				x.stream = sys.stderr
+
+def exit_cleanup():
+	try:
+		fileobj = sys.stdout.fileobj
+	except AttributeError:
+		pass
+	else:
+		sys.stdout.is_valid = False
+		sys.stderr.is_valid = False
+		fileobj.close()
+		filename = sys.stdout.filename
+
+		Logs.info('Output logged to %r', filename)
+
+		# then copy the log file to "latest.log" if possible
+		up = os.path.dirname(os.path.abspath(filename))
+		try:
+			shutil.copy(filename, os.path.join(up, 'latest.log'))
+		except OSError:
+			# this may fail on windows due to processes spawned
+			pass
+
+atexit.register(exit_cleanup)
+

+ 85 - 0
sdk/waf/waflib/extras/buildcopy.py

@@ -0,0 +1,85 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Calle Rosenquist, 2017 (xbreak)
+"""
+Create task that copies source files to the associated build node.
+This is useful to e.g. construct a complete Python package so it can be unit tested
+without installation.
+
+Source files to be copied can be specified either in `buildcopy_source` attribute, or
+`source` attribute. If both are specified `buildcopy_source` has priority.
+
+Examples::
+
+	def build(bld):
+		bld(name             = 'bar',
+			features         = 'py buildcopy',
+			source           = bld.path.ant_glob('src/bar/*.py'))
+
+		bld(name             = 'py baz',
+			features         = 'buildcopy',
+			buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt'])
+
+"""
+import os, shutil
+from waflib import Errors, Task, TaskGen, Utils, Node, Logs
+
+@TaskGen.before_method('process_source')
+@TaskGen.feature('buildcopy')
+def make_buildcopy(self):
+	"""
+	Creates the buildcopy task.
+	"""
+	def to_src_nodes(lst):
+		"""Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives
+		preference to nodes in build.
+		"""
+		if isinstance(lst, Node.Node):
+			if not lst.is_src():
+				raise Errors.WafError('buildcopy: node %s is not in src'%lst)
+			if not os.path.isfile(lst.abspath()):
+				raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst)
+			return lst
+
+		if isinstance(lst, str):
+			lst = [x for x in Utils.split_path(lst) if x and x != '.']
+
+		node = self.bld.path.get_src().search_node(lst)
+		if node:
+			if not os.path.isfile(node.abspath()):
+				raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
+			return node
+
+		node = self.bld.path.get_src().find_node(lst)
+		if node:
+			if not os.path.isfile(node.abspath()):
+				raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
+			return node
+		raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst))
+
+	nodes = [ to_src_nodes(n) for n in getattr(self, 'buildcopy_source', getattr(self, 'source', [])) ]
+	if not nodes:
+		Logs.warn('buildcopy: No source files provided to buildcopy in %s (set `buildcopy_source` or `source`)',
+			self)
+		return
+	node_pairs = [(n, n.get_bld()) for n in nodes]
+	self.create_task('buildcopy', [n[0] for n in node_pairs], [n[1] for n in node_pairs], node_pairs=node_pairs)
+
+class buildcopy(Task.Task):
+	"""
+	Copy for each pair `n` in `node_pairs`: n[0] -> n[1].
+
+	Attribute `node_pairs` should contain a list of tuples describing source and target:
+
+		node_pairs = [(in, out), ...]
+
+	"""
+	color = 'PINK'
+
+	def keyword(self):
+		return 'Copying'
+
+	def run(self):
+		for f,t in self.node_pairs:
+			t.parent.mkdir()
+			shutil.copy2(f.abspath(), t.abspath())

+ 32 - 0
sdk/waf/waflib/extras/c_bgxlc.py

@@ -0,0 +1,32 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+"""
+IBM XL Compiler for Blue Gene
+"""
+
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+
+from waflib.Tools import xlc # method xlc_common_flags
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('c_bgxlc')
+
+@conf
+def find_bgxlc(conf):
+	cc = conf.find_program(['bgxlc_r','bgxlc'], var='CC')
+	conf.get_xlc_version(cc)
+	conf.env.CC = cc
+	conf.env.CC_NAME = 'bgxlc'
+
+def configure(conf):
+	conf.find_bgxlc()
+	conf.find_ar()
+	conf.xlc_common_flags()
+	conf.env.LINKFLAGS_cshlib = ['-G','-Wl,-bexpfull']
+	conf.env.LINKFLAGS_cprogram = []
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()
+

+ 72 - 0
sdk/waf/waflib/extras/c_dumbpreproc.py

@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2006-2010 (ita)
+
+"""
+Dumb C/C++ preprocessor for finding dependencies
+
+It will look at all include files it can find after removing the comments, so the following
+will always add the dependency on both "a.h" and "b.h"::
+
+	#include "a.h"
+	#ifdef B
+		#include "b.h"
+	#endif
+	int main() {
+		return 0;
+	}
+
+To use::
+
+	def configure(conf):
+		conf.load('compiler_c')
+		conf.load('c_dumbpreproc')
+"""
+
+import re
+from waflib.Tools import c_preproc
+
+re_inc = re.compile(
+	'^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$',
+	re.IGNORECASE | re.MULTILINE)
+
+def lines_includes(node):
+	code = node.read()
+	if c_preproc.use_trigraphs:
+		for (a, b) in c_preproc.trig_def:
+			code = code.split(a).join(b)
+	code = c_preproc.re_nl.sub('', code)
+	code = c_preproc.re_cpp.sub(c_preproc.repl, code)
+	return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
+
+parser = c_preproc.c_parser
+class dumb_parser(parser):
+	def addlines(self, node):
+		if node in self.nodes[:-1]:
+			return
+		self.currentnode_stack.append(node.parent)
+
+		# Avoid reading the same files again
+		try:
+			lines = self.parse_cache[node]
+		except KeyError:
+			lines = self.parse_cache[node] = lines_includes(node)
+
+		self.lines = lines + [(c_preproc.POPFILE, '')] +  self.lines
+
+	def start(self, node, env):
+		try:
+			self.parse_cache = node.ctx.parse_cache
+		except AttributeError:
+			self.parse_cache = node.ctx.parse_cache = {}
+
+		self.addlines(node)
+		while self.lines:
+			(x, y) = self.lines.pop(0)
+			if x == c_preproc.POPFILE:
+				self.currentnode_stack.pop()
+				continue
+			self.tryfind(y, env=env)
+
+c_preproc.c_parser = dumb_parser
+

+ 82 - 0
sdk/waf/waflib/extras/c_emscripten.py

@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 vi:ts=4:noexpandtab
+
+import shlex
+
+from waflib import Errors
+from waflib.Tools import ccroot, gcc, gxx
+from waflib.Configure import conf
+
+from waflib.Tools.compiler_c import c_compiler
+from waflib.Tools.compiler_cxx import cxx_compiler
+
+for supported_os in ('linux', 'darwin', 'gnu', 'aix'):
+	c_compiler[supported_os].append('c_emscripten')
+	cxx_compiler[supported_os].append('c_emscripten')
+
+
+@conf
+def get_emscripten_version(conf, cc):
+	"""
+	Emscripten doesn't support processing '-' like clang/gcc
+	"""
+	dummy = conf.cachedir.parent.make_node("waf-emscripten.c")
+	dummy.write("")
+	cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()]
+	env = conf.env.env or None
+	try:
+		out, err = conf.cmd_and_log(cmd, output=0, env=env)
+	except Errors.WafError as e:
+		conf.fatal('Could not determine the emscripten version %r: %s' % (cmd, e))
+
+	k = {}
+	out = out.splitlines()
+	for line in out:
+		lst = shlex.split(line)
+		if len(lst)>2:
+			key = lst[1]
+			val = lst[2]
+			k[key] = val
+
+	if not ('__clang__' in k and 'EMSCRIPTEN' in k):
+		conf.fatal('Could not determine the emscripten compiler version.')
+
+	conf.env.DEST_OS = 'generic'
+	conf.env.DEST_BINFMT = 'elf'
+	conf.env.DEST_CPU = 'asm-js'
+	conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
+	return k
+
+@conf
+def find_emscripten(conf):
+	cc = conf.find_program(['emcc'], var='CC')
+	conf.get_emscripten_version(cc)
+	conf.env.CC = cc
+	conf.env.CC_NAME = 'emscripten'
+	cxx = conf.find_program(['em++'], var='CXX')
+	conf.env.CXX = cxx
+	conf.env.CXX_NAME = 'emscripten'
+	conf.find_program(['emar'], var='AR')
+
+def configure(conf):
+	conf.find_emscripten()
+	conf.find_ar()
+	conf.gcc_common_flags()
+	conf.gxx_common_flags()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()
+	conf.env.ARFLAGS = ['rcs']
+	conf.env.cshlib_PATTERN = '%s.js'
+	conf.env.cxxshlib_PATTERN = '%s.js'
+	conf.env.cstlib_PATTERN = '%s.a'
+	conf.env.cxxstlib_PATTERN = '%s.a'
+	conf.env.cprogram_PATTERN = '%s.html'
+	conf.env.cxxprogram_PATTERN = '%s.html'
+	conf.env.CXX_TGT_F           = ['-c', '-o', '']
+	conf.env.CC_TGT_F            = ['-c', '-o', '']
+	conf.env.CXXLNK_TGT_F        = ['-o', '']
+	conf.env.CCLNK_TGT_F         = ['-o', '']
+	conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])

+ 77 - 0
sdk/waf/waflib/extras/c_nec.py

@@ -0,0 +1,77 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# harald at klimachs.de
+
+"""
+NEC SX Compiler for SX vector systems
+"""
+
+import re
+from waflib import Errors, Utils
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+
+from waflib.Tools import xlc # method xlc_common_flags
+from waflib.Tools.compiler_c import c_compiler
+c_compiler['linux'].append('c_nec')
+
+@conf
+def find_sxc(conf):
+	cc = conf.find_program(['sxcc'], var='CC')
+	conf.get_sxc_version(cc)
+	conf.env.CC = cc
+	conf.env.CC_NAME = 'sxcc'
+
+@conf
+def get_sxc_version(conf, fc):
+	version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
+	cmd = fc + ['-V']
+
+	try:
+		out, err = conf.cmd_and_log(cmd, output=0)
+	except Errors.WafError:
+		conf.fatal('Could not determine an sxcc version %r' % cmd)
+
+	if out:
+		match = version_re(out)
+	else:
+		match = version_re(err)
+	if not match:
+		conf.fatal('Could not determine the NEC C compiler version.')
+	k = match.groupdict()
+	conf.env['C_VERSION'] = (k['major'], k['minor'])
+
+@conf
+def sxc_common_flags(conf):
+	v=conf.env
+	v['CC_SRC_F']=[]
+	v['CC_TGT_F']=['-c','-o']
+	if not v['LINK_CC']:
+		v['LINK_CC']=v['CC']
+	v['CCLNK_SRC_F']=[]
+	v['CCLNK_TGT_F']=['-o']
+	v['CPPPATH_ST']='-I%s'
+	v['DEFINES_ST']='-D%s'
+	v['LIB_ST']='-l%s'
+	v['LIBPATH_ST']='-L%s'
+	v['STLIB_ST']='-l%s'
+	v['STLIBPATH_ST']='-L%s'
+	v['RPATH_ST']=''
+	v['SONAME_ST']=[]
+	v['SHLIB_MARKER']=[]
+	v['STLIB_MARKER']=[]
+	v['LINKFLAGS_cprogram']=['']
+	v['cprogram_PATTERN']='%s'
+	v['CFLAGS_cshlib']=['-fPIC']
+	v['LINKFLAGS_cshlib']=['']
+	v['cshlib_PATTERN']='lib%s.so'
+	v['LINKFLAGS_cstlib']=[]
+	v['cstlib_PATTERN']='lib%s.a'
+
+def configure(conf):
+	conf.find_sxc()
+	conf.find_program('sxar',VAR='AR')
+	conf.sxc_common_flags()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()

+ 110 - 0
sdk/waf/waflib/extras/cfg_altoptions.py

@@ -0,0 +1,110 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Tool to extend c_config.check_cfg()
+
+__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
+__copyright__ = "Jérôme Carretero, 2014"
+
+"""
+
+This tool allows to work around the absence of ``*-config`` programs
+on systems, by keeping the same clean configuration syntax but inferring
+values or permitting their modification via the options interface.
+
+Note that pkg-config can also support setting ``PKG_CONFIG_PATH``,
+so you can put custom files in a folder containing new .pc files.
+This tool could also be implemented by taking advantage of this fact.
+
+Usage::
+
+   def options(opt):
+     opt.load('c_config_alt')
+     opt.add_package_option('package')
+
+   def configure(cfg):
+     conf.load('c_config_alt')
+     conf.check_cfg(...)
+
+Known issues:
+
+- Behavior with different build contexts...
+
+"""
+
+import os
+import functools
+from waflib import Configure, Options, Errors
+
+def name_to_dest(x):
+	return x.lower().replace('-', '_')
+
+
+def options(opt):
+	def x(opt, param):
+		dest = name_to_dest(param)
+		gr = opt.get_option_group("Configuration options")
+		gr.add_option('--%s-root' % dest,
+		 help="path containing include and lib subfolders for %s" \
+		  % param,
+		)
+
+	opt.add_package_option = functools.partial(x, opt)
+
+
+check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg')
+
+@Configure.conf
+def check_cfg(conf, *k, **kw):
+	if k:
+		lst = k[0].split()
+		kw['package'] = lst[0]
+		kw['args'] = ' '.join(lst[1:])
+
+	if not 'package' in kw:
+		return check_cfg_old(conf, **kw)
+
+	package = kw['package']
+
+	package_lo = name_to_dest(package)
+	package_hi = package.upper().replace('-', '_') # TODO FIXME
+	package_hi = kw.get('uselib_store', package_hi)
+
+	def check_folder(path, name):
+		try:
+			assert os.path.isdir(path)
+		except AssertionError:
+			raise Errors.ConfigurationError(
+				"%s_%s (%s) is not a folder!" \
+				% (package_lo, name, path))
+		return path
+
+	root = getattr(Options.options, '%s_root' % package_lo, None)
+
+	if root is None:
+		return check_cfg_old(conf, **kw)
+	else:
+		def add_manual_var(k, v):
+			conf.start_msg('Adding for %s a manual var' % (package))
+			conf.env["%s_%s" % (k, package_hi)] = v
+			conf.end_msg("%s = %s" % (k, v))
+
+
+		check_folder(root, 'root')
+
+		pkg_inc = check_folder(os.path.join(root, "include"), 'inc')
+		add_manual_var('INCLUDES', [pkg_inc])
+		pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath')
+		add_manual_var('LIBPATH', [pkg_lib])
+		add_manual_var('LIB', [package])
+
+		for x in kw.get('manual_deps', []):
+			for k, v in sorted(conf.env.get_merged_dict().items()):
+				if k.endswith('_%s' % x):
+					k = k.replace('_%s' % x, '')
+					conf.start_msg('Adding for %s a manual dep' \
+					 %(package))
+					conf.env["%s_%s" % (k, package_hi)] += v
+					conf.end_msg('%s += %s' % (k, v))
+
+		return True
+

+ 124 - 0
sdk/waf/waflib/extras/clang_cl.py

@@ -0,0 +1,124 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# vim: sw=4 ts=4 noexpandtab
+
+"""
+LLVM Clang-CL support.
+
+Clang-CL is supposed to be a drop-in replacement for MSVC CL, but also serves
+well as a cross compiler for Windows from Linux (provided you have set up the
+environment). Requires Visual Studio 2015+ to be installed.
+
+On Windows it uses (most) MSVC tools.
+
+Usage:
+	$ waf configure
+Or:
+	$ LLVM_PATH=C:\\Program Files\\LLVM\\bin waf configure
+Or:
+	def configure(self):
+		self.env.LLVM_PATH = 'C:\\Program Files\\LLVM\\bin'
+		self.load('clang_cl')
+"""
+
+import os
+
+from waflib import Utils, Errors, Logs
+from waflib.Configure import conf
+from waflib.Tools import msvc
+
+def options(opt):
+	msvc.options(opt)
+
+@conf
+def get_llvm_paths(self):
+	llvm_path = []
+	if Utils.is_win32:
+		try:
+			llvm_key = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432Node\\LLVM\\LLVM')
+		except OSError:
+			try:
+				llvm_key = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\LLVM\\LLVM')
+			except OSError:
+				llvm_key = None
+
+		if llvm_key:
+			llvm_dir, _ = Utils.winreg.QueryValueEx(llvm_key, '')
+			if llvm_dir:
+				llvm_path.append(os.path.join(llvm_dir, 'bin'))
+
+	tmp = self.environ.get('LLVM_PATH') or self.env.LLVM_PATH
+	if tmp:
+		llvm_path.append(tmp)
+	llvm_path += self.env.PATH
+	return llvm_path
+
+@conf
+def find_clang_cl(self):
+	"""
+	Find the program clang-cl.
+	"""
+	del(self.env.CC)
+	del(self.env.CXX)
+
+	paths = self.get_llvm_paths()
+	cc = self.find_program('clang-cl', var='CC', path_list=paths)
+	self.env.CC = self.env.CXX = cc
+	self.env.CC_NAME_SECONDARY = self.env.CXX_NAME_SECONDARY = 'clang'
+
+	if not Utils.is_win32:
+		self.env.MSVC_COMPILER = 'msvc'
+		self.env.MSVC_VERSION = 19
+
+		if not self.env.LINK_CXX:
+			self.find_program('lld-link', path_list=paths, errmsg='lld-link was not found (linker)', var='LINK_CXX')
+
+		if not self.env.LINK_CC:
+			self.env.LINK_CC = self.env.LINK_CXX
+
+@conf
+def find_llvm_tools(self):
+	"""
+	Find the librarian, manifest tool, and resource compiler.
+	"""
+	self.env.CC_NAME = self.env.CXX_NAME = 'msvc'
+
+	paths = self.get_llvm_paths()
+	llvm_path = self.environ.get('LLVM_PATH') or self.env.LLVM_PATH
+	if llvm_path:
+		paths = [llvm_path] + self.env.PATH
+	else:
+		paths = self.env.PATH
+
+	if not self.env.AR:
+		stliblink = self.find_program('llvm-lib', path_list=paths, var='AR')
+		if not stliblink:
+			self.fatal('Unable to find required program "llvm-lib"')
+
+		self.env.ARFLAGS = ['/nologo']
+
+	# We assume clang_cl to only be used with relatively new MSVC installations.
+	self.env.MSVC_MANIFEST = True
+	self.find_program('llvm-mt', path_list=paths, var='MT')
+	self.env.MTFLAGS = ['/nologo']
+
+	try:
+		self.load('winres')
+	except Errors.ConfigurationError:
+		Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+
+def configure(self):
+	if Utils.is_win32:
+		self.autodetect(True)
+		self.find_msvc()
+	else:
+		self.find_llvm_tools()
+
+	self.find_clang_cl()
+	self.msvc_common_flags()
+	self.cc_load_tools()
+	self.cxx_load_tools()
+	self.cc_add_flags()
+	self.cxx_add_flags()
+	self.link_add_flags()
+	self.visual_studio_add_flags()

+ 137 - 0
sdk/waf/waflib/extras/clang_compilation_database.py

@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Christoph Koke, 2013
+# Alibek Omarov, 2019
+
+"""
+Writes the c and cpp compile commands into build/compile_commands.json
+see http://clang.llvm.org/docs/JSONCompilationDatabase.html
+
+Usage:
+
+	Load this tool in `options` to be able to generate database
+	by request in command-line and before build:
+
+	$ waf clangdb
+
+	def options(opt):
+		opt.load('clang_compilation_database')
+
+	Otherwise, load only in `configure` to generate it always before build.
+
+	def configure(conf):
+		conf.load('compiler_cxx')
+		...
+		conf.load('clang_compilation_database')
+"""
+
+from waflib import Logs, TaskGen, Task, Build, Scripting
+
+Task.Task.keep_last_cmd = True
+
+class ClangDbContext(Build.BuildContext):
+	'''generates compile_commands.json by request'''
+	cmd = 'clangdb'
+
+	def write_compilation_database(self):
+		"""
+		Write the clang compilation database as JSON
+		"""
+		database_file = self.bldnode.make_node('compile_commands.json')
+		Logs.info('Build commands will be stored in %s', database_file.path_from(self.path))
+		try:
+			root = database_file.read_json()
+		except IOError:
+			root = []
+		clang_db = dict((x['file'], x) for x in root)
+		for task in self.clang_compilation_database_tasks:
+			try:
+				cmd = task.last_cmd
+			except AttributeError:
+				continue
+			f_node = task.inputs[0]
+			filename = f_node.path_from(task.get_cwd())
+			entry = {
+				"directory": task.get_cwd().abspath(),
+				"arguments": cmd,
+				"file": filename,
+			}
+			clang_db[filename] = entry
+		root = list(clang_db.values())
+		database_file.write_json(root)
+
+	def execute(self):
+		"""
+		Build dry run
+		"""
+		self.restore()
+		self.cur_tasks = []
+		self.clang_compilation_database_tasks = []
+
+		if not self.all_envs:
+			self.load_envs()
+
+		self.recurse([self.run_dir])
+		self.pre_build()
+
+		# we need only to generate last_cmd, so override
+		# exec_command temporarily
+		def exec_command(self, *k, **kw):
+			return 0
+
+		for g in self.groups:
+			for tg in g:
+				try:
+					f = tg.post
+				except AttributeError:
+					pass
+				else:
+					f()
+
+				if isinstance(tg, Task.Task):
+					lst = [tg]
+				else: lst = tg.tasks
+				for tsk in lst:
+					if tsk.__class__.__name__ == "swig":
+						tsk.runnable_status()
+						if hasattr(tsk, 'more_tasks'):
+							lst.extend(tsk.more_tasks)
+					# Not all dynamic tasks can be processed, in some cases
+					# one may have to call the method "run()" like this:
+					#elif tsk.__class__.__name__ == 'src2c':
+					#	tsk.run()
+					#	if hasattr(tsk, 'more_tasks'):
+					#		lst.extend(tsk.more_tasks)
+
+					tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
+					if isinstance(tsk, tup):
+						self.clang_compilation_database_tasks.append(tsk)
+						tsk.nocache = True
+						old_exec = tsk.exec_command
+						tsk.exec_command = exec_command
+						tsk.run()
+						tsk.exec_command = old_exec
+
+		self.write_compilation_database()
+
+EXECUTE_PATCHED = False
+def patch_execute():
+	global EXECUTE_PATCHED
+
+	if EXECUTE_PATCHED:
+		return
+
+	def new_execute_build(self):
+		"""
+		Invoke clangdb command before build
+		"""
+		if self.cmd.startswith('build'):
+			Scripting.run_command(self.cmd.replace('build','clangdb'))
+
+		old_execute_build(self)
+
+	old_execute_build = getattr(Build.BuildContext, 'execute_build', None)
+	setattr(Build.BuildContext, 'execute_build', new_execute_build)
+	EXECUTE_PATCHED = True
+
+patch_execute()

+ 92 - 0
sdk/waf/waflib/extras/clang_cross.py

@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Krzysztof Kosiński 2014
+# DragoonX6 2018
+
+"""
+Detect the Clang C compiler
+This version is an attempt at supporting the -target and -sysroot flag of Clang.
+"""
+
+from waflib.Tools import ccroot, ar, gcc
+from waflib.Configure import conf
+import waflib.Context
+import waflib.extras.clang_cross_common
+
+def options(opt):
+	"""
+	Target triplet for clang::
+			$ waf configure --clang-target-triple=x86_64-pc-linux-gnu
+	"""
+	cc_compiler_opts = opt.add_option_group('Configuration options')
+	cc_compiler_opts.add_option('--clang-target-triple', default=None,
+		help='Target triple for clang',
+		dest='clang_target_triple')
+	cc_compiler_opts.add_option('--clang-sysroot', default=None,
+		help='Sysroot for clang',
+		dest='clang_sysroot')
+
+@conf
+def find_clang(conf):
+	"""
+	Finds the program clang and executes it to ensure it really is clang
+	"""
+
+	import os
+
+	cc = conf.find_program('clang', var='CC')
+
+	if conf.options.clang_target_triple != None:
+		conf.env.append_value('CC', ['-target', conf.options.clang_target_triple])
+
+	if conf.options.clang_sysroot != None:
+		sysroot = str()
+
+		if os.path.isabs(conf.options.clang_sysroot):
+			sysroot = conf.options.clang_sysroot
+		else:
+			sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clang_sysroot))
+
+		conf.env.append_value('CC', ['--sysroot', sysroot])
+
+	conf.get_cc_version(cc, clang=True)
+	conf.env.CC_NAME = 'clang'
+
+@conf
+def clang_modifier_x86_64_w64_mingw32(conf):
+	conf.gcc_modifier_win32()
+
+@conf
+def clang_modifier_i386_w64_mingw32(conf):
+	conf.gcc_modifier_win32()
+
+@conf
+def clang_modifier_x86_64_windows_msvc(conf):
+	conf.clang_modifier_msvc()
+
+	# Allow the user to override any flags if they so desire.
+	clang_modifier_user_func = getattr(conf, 'clang_modifier_x86_64_windows_msvc_user', None)
+	if clang_modifier_user_func:
+		clang_modifier_user_func()
+
+@conf
+def clang_modifier_i386_windows_msvc(conf):
+	conf.clang_modifier_msvc()
+
+	# Allow the user to override any flags if they so desire.
+	clang_modifier_user_func = getattr(conf, 'clang_modifier_i386_windows_msvc_user', None)
+	if clang_modifier_user_func:
+		clang_modifier_user_func()
+
+def configure(conf):
+	conf.find_clang()
+	conf.find_program(['llvm-ar', 'ar'], var='AR')
+	conf.find_ar()
+	conf.gcc_common_flags()
+	# Allow the user to provide flags for the target platform.
+	conf.gcc_modifier_platform()
+	# And allow more fine grained control based on the compiler's triplet.
+	conf.clang_modifier_target_triple()
+	conf.cc_load_tools()
+	conf.cc_add_flags()
+	conf.link_add_flags()

+ 113 - 0
sdk/waf/waflib/extras/clang_cross_common.py

@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# DragoonX6 2018
+
+"""
+Common routines for cross_clang.py and cross_clangxx.py
+"""
+
+from waflib.Configure import conf
+import waflib.Context
+
+def normalize_target_triple(target_triple):
+	target_triple = target_triple[:-1]
+	normalized_triple = target_triple.replace('--', '-unknown-')
+
+	if normalized_triple.startswith('-'):
+		normalized_triple = 'unknown' + normalized_triple
+
+	if normalized_triple.endswith('-'):
+		normalized_triple += 'unknown'
+
+	# Normalize MinGW builds to *arch*-w64-mingw32
+	if normalized_triple.endswith('windows-gnu'):
+		normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-w64-mingw32'
+
+	# Strip the vendor when doing msvc builds, since it's unused anyway.
+	if normalized_triple.endswith('windows-msvc'):
+		normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-windows-msvc'
+
+	return normalized_triple.replace('-', '_')
+
+@conf
+def clang_modifier_msvc(conf):
+	import os
+
+	"""
+	Really basic setup to use clang in msvc mode.
+	We actually don't really want to do a lot, even though clang is msvc compatible
+	in this mode, that doesn't mean we're actually using msvc.
+	It's probably the best to leave it to the user, we can assume msvc mode if the user
+	uses the clang-cl frontend, but this module only concerns itself with the gcc-like frontend.
+	"""
+	v = conf.env
+	v.cprogram_PATTERN = '%s.exe'
+
+	v.cshlib_PATTERN   = '%s.dll'
+	v.implib_PATTERN   = '%s.lib'
+	v.IMPLIB_ST        = '-Wl,-IMPLIB:%s'
+	v.SHLIB_MARKER     = []
+
+	v.CFLAGS_cshlib    = []
+	v.LINKFLAGS_cshlib = ['-Wl,-DLL']
+	v.cstlib_PATTERN   = '%s.lib'
+	v.STLIB_MARKER     = []
+
+	del(v.AR)
+	conf.find_program(['llvm-lib', 'lib'], var='AR')
+	v.ARFLAGS          = ['-nologo']
+	v.AR_TGT_F         = ['-out:']
+
+	# Default to the linker supplied with llvm instead of link.exe or ld
+	v.LINK_CC          = v.CC + ['-fuse-ld=lld', '-nostdlib']
+	v.CCLNK_TGT_F      = ['-o']
+	v.def_PATTERN      = '-Wl,-def:%s'
+
+	v.LINKFLAGS = []
+
+	v.LIB_ST            = '-l%s'
+	v.LIBPATH_ST        = '-Wl,-LIBPATH:%s'
+	v.STLIB_ST          = '-l%s'
+	v.STLIBPATH_ST      = '-Wl,-LIBPATH:%s'
+
+	CFLAGS_CRT_COMMON = [
+		'-Xclang', '--dependent-lib=oldnames',
+		'-Xclang', '-fno-rtti-data',
+		'-D_MT'
+	]
+
+	v.CFLAGS_CRT_MULTITHREADED = CFLAGS_CRT_COMMON + [
+		'-Xclang', '-flto-visibility-public-std',
+		'-Xclang', '--dependent-lib=libcmt',
+	]
+	v.CXXFLAGS_CRT_MULTITHREADED = v.CFLAGS_CRT_MULTITHREADED
+
+	v.CFLAGS_CRT_MULTITHREADED_DBG = CFLAGS_CRT_COMMON + [
+		'-D_DEBUG',
+		'-Xclang', '-flto-visibility-public-std',
+		'-Xclang', '--dependent-lib=libcmtd',
+	]
+	v.CXXFLAGS_CRT_MULTITHREADED_DBG = v.CFLAGS_CRT_MULTITHREADED_DBG
+
+	v.CFLAGS_CRT_MULTITHREADED_DLL = CFLAGS_CRT_COMMON + [
+		'-D_DLL',
+		'-Xclang', '--dependent-lib=msvcrt'
+	]
+	v.CXXFLAGS_CRT_MULTITHREADED_DLL = v.CFLAGS_CRT_MULTITHREADED_DLL
+
+	v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = CFLAGS_CRT_COMMON + [
+		'-D_DLL',
+		'-D_DEBUG',
+		'-Xclang', '--dependent-lib=msvcrtd',
+	]
+	v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CFLAGS_CRT_MULTITHREADED_DLL_DBG
+
+@conf
+def clang_modifier_target_triple(conf, cpp=False):
+	compiler = conf.env.CXX if cpp else conf.env.CC
+	output = conf.cmd_and_log(compiler + ['-dumpmachine'], output=waflib.Context.STDOUT)
+
+	modifier = ('clangxx' if cpp else 'clang') + '_modifier_'
+	clang_modifier_func = getattr(conf, modifier + normalize_target_triple(output), None)
+	if clang_modifier_func:
+		clang_modifier_func()

+ 106 - 0
sdk/waf/waflib/extras/clangxx_cross.py

@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy 2009-2018 (ita)
+# DragoonX6 2018
+
+"""
+Detect the Clang++ C++ compiler
+This version is an attempt at supporting the -target and -sysroot flag of Clang++.
+"""
+
+from waflib.Tools import ccroot, ar, gxx
+from waflib.Configure import conf
+import waflib.extras.clang_cross_common
+
+def options(opt):
+	"""
+	Target triplet for clang++::
+			$ waf configure --clangxx-target-triple=x86_64-pc-linux-gnu
+	"""
+	cxx_compiler_opts = opt.add_option_group('Configuration options')
+	cxx_compiler_opts.add_option('--clangxx-target-triple', default=None,
+		help='Target triple for clang++',
+		dest='clangxx_target_triple')
+	cxx_compiler_opts.add_option('--clangxx-sysroot', default=None,
+		help='Sysroot for clang++',
+		dest='clangxx_sysroot')
+
+@conf
+def find_clangxx(conf):
+	"""
+	Finds the program clang++, and executes it to ensure it really is clang++
+	"""
+
+	import os
+
+	cxx = conf.find_program('clang++', var='CXX')
+
+	if conf.options.clangxx_target_triple != None:
+		conf.env.append_value('CXX', ['-target', conf.options.clangxx_target_triple])
+
+	if conf.options.clangxx_sysroot != None:
+		sysroot = str()
+
+		if os.path.isabs(conf.options.clangxx_sysroot):
+			sysroot = conf.options.clangxx_sysroot
+		else:
+			sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clangxx_sysroot))
+
+		conf.env.append_value('CXX', ['--sysroot', sysroot])
+
+	conf.get_cc_version(cxx, clang=True)
+	conf.env.CXX_NAME = 'clang'
+
+@conf
+def clangxx_modifier_x86_64_w64_mingw32(conf):
+	conf.gcc_modifier_win32()
+
+@conf
+def clangxx_modifier_i386_w64_mingw32(conf):
+	conf.gcc_modifier_win32()
+
+@conf
+def clangxx_modifier_msvc(conf):
+	v = conf.env
+	v.cxxprogram_PATTERN = v.cprogram_PATTERN
+	v.cxxshlib_PATTERN   = v.cshlib_PATTERN
+
+	v.CXXFLAGS_cxxshlib  = []
+	v.LINKFLAGS_cxxshlib = v.LINKFLAGS_cshlib
+	v.cxxstlib_PATTERN   = v.cstlib_PATTERN
+
+	v.LINK_CXX           = v.CXX + ['-fuse-ld=lld', '-nostdlib']
+	v.CXXLNK_TGT_F       = v.CCLNK_TGT_F
+
+@conf
+def clangxx_modifier_x86_64_windows_msvc(conf):
+	conf.clang_modifier_msvc()
+	conf.clangxx_modifier_msvc()
+
+	# Allow the user to override any flags if they so desire.
+	clang_modifier_user_func = getattr(conf, 'clangxx_modifier_x86_64_windows_msvc_user', None)
+	if clang_modifier_user_func:
+		clang_modifier_user_func()
+
+@conf
+def clangxx_modifier_i386_windows_msvc(conf):
+	conf.clang_modifier_msvc()
+	conf.clangxx_modifier_msvc()
+
+	# Allow the user to override any flags if they so desire.
+	clang_modifier_user_func = getattr(conf, 'clangxx_modifier_i386_windows_msvc_user', None)
+	if clang_modifier_user_func:
+		clang_modifier_user_func()
+
+def configure(conf):
+	conf.find_clangxx()
+	conf.find_program(['llvm-ar', 'ar'], var='AR')
+	conf.find_ar()
+	conf.gxx_common_flags()
+	# Allow the user to provide flags for the target platform.
+	conf.gxx_modifier_platform()
+	# And allow more fine grained control based on the compiler's triplet.
+	conf.clang_modifier_target_triple(cpp=True)
+	conf.cxx_load_tools()
+	conf.cxx_add_flags()
+	conf.link_add_flags()

+ 875 - 0
sdk/waf/waflib/extras/codelite.py

@@ -0,0 +1,875 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# CodeLite Project
+# Christian Klein (chrikle@berlios.de)
+# Created: Jan 2012
+# As templete for this file I used the msvs.py
+# I hope this template will work proper
+
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+   derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+"""
+ 
+
+To add this tool to your project:
+def options(conf):
+        opt.load('codelite')
+
+It can be a good idea to add the sync_exec tool too.
+
+To generate solution files:
+$ waf configure codelite
+
+To customize the outputs, provide subclasses in your wscript files:
+
+from waflib.extras import codelite
+class vsnode_target(codelite.vsnode_target):
+        def get_build_command(self, props):
+                # likely to be required
+                return "waf.bat build"
+        def collect_source(self):
+                # likely to be required
+                ...
+class codelite_bar(codelite.codelite_generator):
+        def init(self):
+                codelite.codelite_generator.init(self)
+                self.vsnode_target = vsnode_target
+
+The codelite class re-uses the same build() function for reading the targets (task generators),
+you may therefore specify codelite settings on the context object:
+
+def build(bld):
+        bld.codelite_solution_name = 'foo.workspace'
+        bld.waf_command = 'waf.bat'
+        bld.projects_dir = bld.srcnode.make_node('')
+        bld.projects_dir.mkdir()
+
+
+ASSUMPTIONS:
+* a project can be either a directory or a target, project files are written only for targets that have source files
+* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
+"""
+
+import os, re, sys
+import uuid # requires python 2.5
+from waflib.Build import BuildContext
+from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
+
+HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
+
+PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
+<CodeLite_Project Name="${project.name}" InternalType="Library">
+  <Plugins>
+    <Plugin Name="qmake">
+      <![CDATA[00010001N0005Release000000000000]]>
+    </Plugin>
+  </Plugins>
+  <Description/>
+  <Dependencies/>
+  <VirtualDirectory Name="src">
+  ${for x in project.source}  
+  ${if (project.get_key(x)=="sourcefile")}
+  <File Name="${x.abspath()}"/>
+  ${endif}
+  ${endfor}  
+  </VirtualDirectory>
+  <VirtualDirectory Name="include">  
+  ${for x in project.source}
+  ${if (project.get_key(x)=="headerfile")}
+  <File Name="${x.abspath()}"/>
+  ${endif}
+  ${endfor}
+  </VirtualDirectory>  
+  <Settings Type="Dynamic Library">
+    <GlobalSettings>
+      <Compiler Options="" C_Options="">
+        <IncludePath Value="."/>
+      </Compiler>
+      <Linker Options="">
+        <LibraryPath Value="."/>
+      </Linker>
+      <ResourceCompiler Options=""/>
+    </GlobalSettings>
+    <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
+      <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
+        <IncludePath Value="."/>
+        <IncludePath Value="."/>
+      </Compiler>
+      <Linker Options="" Required="yes">
+        <LibraryPath Value=""/>
+      </Linker>
+      <ResourceCompiler Options="" Required="no"/>
+      <General OutputFile="${xml:project.build_properties[0].output_file}" IntermediateDirectory="" Command="" CommandArguments="" PauseExecWhenProcTerminates="yes"/>
+      <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
+        <![CDATA[]]>
+      </Environment>
+      <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
+        <PostConnectCommands/>
+        <StartupCommands/>
+      </Releaseger>
+      <PreBuild/>
+      <PostBuild/>
+      <CustomBuild Enabled="yes">
+        $b = project.build_properties[0]}
+        <RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
+        <CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
+        <BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand> 
+        <Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
+        <Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>        
+        <Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
+        <Target Name="Rebuild All">${xml:project.get_rebuild_all_command(project.build_properties[0])}</Target>
+        <Target Name="Clean All">${xml:project.get_clean_all_command(project.build_properties[0])}</Target>
+        <Target Name="Build and Install All">${xml:project.get_build_and_install_all_command(project.build_properties[0])}</Target>
+        <PreprocessFileCommand/>
+        <SingleFileCommand/>
+        <MakefileGenerationCommand/>
+        <ThirdPartyToolName>None</ThirdPartyToolName>
+        <WorkingDirectory/>
+      </CustomBuild>
+      <AdditionalRules>
+        <CustomPostBuild/>
+        <CustomPreBuild/>
+      </AdditionalRules>
+      <Completion>
+        <ClangCmpFlags/>
+        <ClangPP/>
+        <SearchPaths/>
+      </Completion>
+    </Configuration>
+    <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
+      <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
+        <IncludePath Value="."/>
+      </Compiler>
+      <Linker Options="" Required="yes"/>
+      <ResourceCompiler Options="" Required="no"/>
+      <General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateReleaseArgs="no" ReleaseArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes"/>
+      <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
+        <![CDATA[
+      
+      
+      
+      ]]>
+      </Environment>
+      <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
+        <PostConnectCommands/>
+        <StartupCommands/>
+      </Releaseger>
+      <PreBuild/>
+      <PostBuild/>
+      <CustomBuild Enabled="no">
+        <RebuildCommand/>
+        <CleanCommand/>
+        <BuildCommand/>
+        <PreprocessFileCommand/>
+        <SingleFileCommand/>
+        <MakefileGenerationCommand/>
+        <ThirdPartyToolName/>
+        <WorkingDirectory/>
+      </CustomBuild>
+      <AdditionalRules>
+        <CustomPostBuild/>
+        <CustomPreBuild/>
+      </AdditionalRules>
+      <Completion>
+        <ClangCmpFlags/>
+        <ClangPP/>
+        <SearchPaths/>
+      </Completion>
+    </Configuration>
+  </Settings>
+</CodeLite_Project>'''
+
+
+
+
+SOLUTION_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
+<CodeLite_Workspace Name="${getattr(project, 'codelite_solution_name', None)[:-10]}" Database="./${getattr(project, 'codelite_solution_name', None)[:-10]}.tags">
+${for p in project.all_projects}
+  <Project Name = "${p.name}" Path = "${p.title}" Active="No"/>
+${endfor}
+  <BuildMatrix>
+    <WorkspaceConfiguration Name="Release" Selected="yes">
+${for p in project.all_projects}
+      <Project Name="${p.name}" ConfigName="Release"/>        
+${endfor}
+    </WorkspaceConfiguration>        
+  </BuildMatrix>
+</CodeLite_Workspace>'''
+
+
+
+COMPILE_TEMPLATE = '''def f(project):
+        lst = []
+        def xml_escape(value):
+                return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+        %s
+
+        #f = open('cmd.txt', 'w')
+        #f.write(str(lst))
+        #f.close()
+        return ''.join(lst)
+'''
+reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
+def compile_template(line):
+        """
+        Compile a template expression into a python function (like jsps, but way shorter)
+        """
+        extr = []
+        def repl(match):
+                g = match.group
+                if g('dollar'):
+                        return "$"
+                elif g('backslash'):
+                        return "\\"
+                elif g('subst'):
+                        extr.append(g('code'))
+                        return "<<|@|>>"
+                return None
+
+        line2 = reg_act.sub(repl, line)
+        params = line2.split('<<|@|>>')
+        assert(extr)
+
+
+        indent = 0
+        buf = []
+        app = buf.append
+
+        def app(txt):
+                buf.append(indent * '\t' + txt)
+
+        for x in range(len(extr)):
+                if params[x]:
+                        app("lst.append(%r)" % params[x])
+
+                f = extr[x]
+                if f.startswith(('if', 'for')):
+                        app(f + ':')
+                        indent += 1
+                elif f.startswith('py:'):
+                        app(f[3:])
+                elif f.startswith(('endif', 'endfor')):
+                        indent -= 1
+                elif f.startswith(('else', 'elif')):
+                        indent -= 1
+                        app(f + ':')
+                        indent += 1
+                elif f.startswith('xml:'):
+                        app('lst.append(xml_escape(%s))' % f[4:])
+                else:
+                        #app('lst.append((%s) or "cannot find %s")' % (f, f))
+                        app('lst.append(%s)' % f)
+
+        if extr:
+                if params[-1]:
+                        app("lst.append(%r)" % params[-1])
+
+        fun = COMPILE_TEMPLATE % "\n\t".join(buf)
+        #print(fun)
+        return Task.funex(fun)
+
+
+re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
+def rm_blank_lines(txt):
+        txt = re_blank.sub('\r\n', txt)
+        return txt
+
+BOM = '\xef\xbb\xbf'
+try:
+        BOM = bytes(BOM, 'latin-1') # python 3
+except (TypeError, NameError):
+        pass
+
+def stealth_write(self, data, flags='wb'):
+        try:
+                unicode
+        except NameError:
+                data = data.encode('utf-8') # python 3
+        else:
+                data = data.decode(sys.getfilesystemencoding(), 'replace')
+                data = data.encode('utf-8')
+
+        if self.name.endswith('.project'):
+                data = BOM + data
+
+        try:
+                txt = self.read(flags='rb')
+                if txt != data:
+                        raise ValueError('must write')
+        except (IOError, ValueError):
+                self.write(data, flags=flags)
+        else:
+                Logs.debug('codelite: skipping %r', self)
+Node.Node.stealth_write = stealth_write
+
+re_quote = re.compile("[^a-zA-Z0-9-]")
+def quote(s):
+        return re_quote.sub("_", s)
+
+def xml_escape(value):
+        return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
+
+def make_uuid(v, prefix = None):
+        """
+        simple utility function
+        """
+        if isinstance(v, dict):
+                keys = list(v.keys())
+                keys.sort()
+                tmp = str([(k, v[k]) for k in keys])
+        else:
+                tmp = str(v)
+        d = Utils.md5(tmp.encode()).hexdigest().upper()
+        if prefix:
+                d = '%s%s' % (prefix, d[8:])
+        gid = uuid.UUID(d, version = 4)
+        return str(gid).upper()
+
+def diff(node, fromnode):
+        # difference between two nodes, but with "(..)" instead of ".."
+        c1 = node
+        c2 = fromnode
+
+        c1h = c1.height()
+        c2h = c2.height()
+
+        lst = []
+        up = 0
+
+        while c1h > c2h:
+                lst.append(c1.name)
+                c1 = c1.parent
+                c1h -= 1
+
+        while c2h > c1h:
+                up += 1
+                c2 = c2.parent
+                c2h -= 1
+
+        while id(c1) != id(c2):
+                lst.append(c1.name)
+                up += 1
+
+                c1 = c1.parent
+                c2 = c2.parent
+
+        for i in range(up):
+                lst.append('(..)')
+        lst.reverse()
+        return tuple(lst)
+
+class build_property(object):
+        pass
+
+class vsnode(object):
+        """
+        Abstract class representing visual studio elements
+        We assume that all visual studio nodes have a uuid and a parent
+        """
+        def __init__(self, ctx):
+                self.ctx = ctx # codelite context
+                self.name = '' # string, mandatory
+                self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
+                self.uuid = '' # string, mandatory
+                self.parent = None # parent node for visual studio nesting
+
+        def get_waf(self):
+                """
+                Override in subclasses...
+                """
+                return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf'))
+
+        def ptype(self):
+                """
+                Return a special uuid for projects written in the solution file
+                """
+                pass
+
+        def write(self):
+                """
+                Write the project file, by default, do nothing
+                """
+                pass
+
+        def make_uuid(self, val):
+                """
+                Alias for creating uuid values easily (the templates cannot access global variables)
+                """
+                return make_uuid(val)
+
+class vsnode_vsdir(vsnode):
+        """
+        Nodes representing visual studio folders (which do not match the filesystem tree!)
+        """
+        VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
+        def __init__(self, ctx, uuid, name, vspath=''):
+                vsnode.__init__(self, ctx)
+                self.title = self.name = name
+                self.uuid = uuid
+                self.vspath = vspath or name
+
+        def ptype(self):
+                return self.VS_GUID_SOLUTIONFOLDER
+
+class vsnode_project(vsnode):
+        """
+        Abstract class representing visual studio project elements
+        A project is assumed to be writable, and has a node representing the file to write to
+        """
+        VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+        def ptype(self):
+                return self.VS_GUID_VCPROJ
+
+        def __init__(self, ctx, node):
+                vsnode.__init__(self, ctx)
+                self.path = node
+                self.uuid = make_uuid(node.abspath())
+                self.name = node.name
+                self.title = self.path.abspath()
+                self.source = [] # list of node objects
+                self.build_properties = [] # list of properties (nmake commands, output dir, etc)
+
+        def dirs(self):
+                """
+                Get the list of parent folders of the source files (header files included)
+                for writing the filters
+                """
+                lst = []
+                def add(x):
+                        if x.height() > self.tg.path.height() and x not in lst:
+                                lst.append(x)
+                                add(x.parent)
+                for x in self.source:
+                        add(x.parent)
+                return lst
+
+        def write(self):
+                Logs.debug('codelite: creating %r', self.path)
+                #print "self.name:",self.name
+
+                # first write the project file
+                template1 = compile_template(PROJECT_TEMPLATE)
+                proj_str = template1(self)
+                proj_str = rm_blank_lines(proj_str)
+                self.path.stealth_write(proj_str)
+
+                # then write the filter
+                #template2 = compile_template(FILTER_TEMPLATE)
+                #filter_str = template2(self)
+                #filter_str = rm_blank_lines(filter_str)
+                #tmp = self.path.parent.make_node(self.path.name + '.filters')
+                #tmp.stealth_write(filter_str)
+
+        def get_key(self, node):
+                """
+                required for writing the source files
+                """
+                name = node.name
+                if name.endswith(('.cpp', '.c')):
+                        return 'sourcefile'
+                return 'headerfile'
+
+        def collect_properties(self):
+                """
+                Returns a list of triplet (configuration, platform, output_directory)
+                """
+                ret = []
+                for c in self.ctx.configurations:
+                        for p in self.ctx.platforms:
+                                x = build_property()
+                                x.outdir = ''
+
+                                x.configuration = c
+                                x.platform = p
+
+                                x.preprocessor_definitions = ''
+                                x.includes_search_path = ''
+
+                                # can specify "deploy_dir" too
+                                ret.append(x)
+                self.build_properties = ret
+
+        def get_build_params(self, props):
+                opt = ''
+                return (self.get_waf(), opt)
+
+        def get_build_command(self, props):
+                return "%s build %s" % self.get_build_params(props)
+
+        def get_clean_command(self, props):
+                return "%s clean %s" % self.get_build_params(props)
+
+        def get_rebuild_command(self, props):
+                return "%s clean build %s" % self.get_build_params(props)
+                
+        def get_install_command(self, props):
+                return "%s install %s" % self.get_build_params(props)
+        def get_build_and_install_command(self, props):
+                return "%s build install %s" % self.get_build_params(props)
+                
+        def get_build_and_install_all_command(self, props):
+                return "%s build install" % self.get_build_params(props)[0]
+                
+        def get_clean_all_command(self, props):
+                return "%s clean" % self.get_build_params(props)[0]
+        
+        def get_build_all_command(self, props):
+                return "%s build" % self.get_build_params(props)[0]
+                
+        def get_rebuild_all_command(self, props):
+                return "%s clean build" % self.get_build_params(props)[0]
+
+        def get_filter_name(self, node):
+                lst = diff(node, self.tg.path)
+                return '\\'.join(lst) or '.'
+
+class vsnode_alias(vsnode_project):
+        def __init__(self, ctx, node, name):
+                vsnode_project.__init__(self, ctx, node)
+                self.name = name
+                self.output_file = ''
+
+class vsnode_build_all(vsnode_alias):
+        """
+        Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
+        This is the only alias enabled by default
+        """
+        def __init__(self, ctx, node, name='build_all_projects'):
+                vsnode_alias.__init__(self, ctx, node, name)
+                self.is_active = True
+
+class vsnode_install_all(vsnode_alias):
+        """
+        Fake target used to emulate the behaviour of "make install"
+        """
+        def __init__(self, ctx, node, name='install_all_projects'):
+                vsnode_alias.__init__(self, ctx, node, name)
+
+        def get_build_command(self, props):
+                return "%s build install %s" % self.get_build_params(props)
+
+        def get_clean_command(self, props):
+                return "%s clean %s" % self.get_build_params(props)
+
+        def get_rebuild_command(self, props):
+                return "%s clean build install %s" % self.get_build_params(props)
+
+class vsnode_project_view(vsnode_alias):
+        """
+        Fake target used to emulate a file system view
+        """
+        def __init__(self, ctx, node, name='project_view'):
+                vsnode_alias.__init__(self, ctx, node, name)
+                self.tg = self.ctx() # fake one, cannot remove
+                self.exclude_files = Node.exclude_regs + '''
+waf-2*
+waf3-2*/**
+.waf-2*
+.waf3-2*/**
+**/*.sdf
+**/*.suo
+**/*.ncb
+**/%s
+                ''' % Options.lockfile
+
+        def collect_source(self):
+                # this is likely to be slow
+                self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
+
+        def get_build_command(self, props):
+                params = self.get_build_params(props) + (self.ctx.cmd,)
+                return "%s %s %s" % params
+
+        def get_clean_command(self, props):
+                return ""
+
+        def get_rebuild_command(self, props):
+                return self.get_build_command(props)
+
+class vsnode_target(vsnode_project):
+        """
+        CodeLite project representing a targets (programs, libraries, etc) and bound
+        to a task generator
+        """
+        def __init__(self, ctx, tg):
+                """
+                A project is more or less equivalent to a file/folder
+                """
+                base = getattr(ctx, 'projects_dir', None) or tg.path
+                node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
+                vsnode_project.__init__(self, ctx, node)
+                self.name = quote(tg.name)
+                self.tg     = tg  # task generator
+
+        def get_build_params(self, props):
+                """
+                Override the default to add the target name
+                """
+                opt = ''
+                if getattr(self, 'tg', None):
+                        opt += " --targets=%s" % self.tg.name
+                return (self.get_waf(), opt)
+
+        def collect_source(self):
+                tg = self.tg
+                source_files = tg.to_nodes(getattr(tg, 'source', []))
+                include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', []))
+                include_files = []
+                for x in include_dirs:
+                        if isinstance(x, str):
+                                x = tg.path.find_node(x)
+                        if x:
+                                lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
+                                include_files.extend(lst)
+
+                # remove duplicates
+                self.source.extend(list(set(source_files + include_files)))
+                self.source.sort(key=lambda x: x.abspath())
+
+        def collect_properties(self):
+                """
+                CodeLite projects are associated with platforms and configurations (for building especially)
+                """
+                super(vsnode_target, self).collect_properties()
+                for x in self.build_properties:
+                        x.outdir = self.path.parent.abspath()
+                        x.preprocessor_definitions = ''
+                        x.includes_search_path = ''
+
+                        try:
+                                tsk = self.tg.link_task
+                        except AttributeError:
+                                pass
+                        else:                                
+                                x.output_file = tsk.outputs[0].abspath()
+                                x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
+                                x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
+
+class codelite_generator(BuildContext):
+        '''generates a CodeLite workspace'''
+        cmd = 'codelite'
+        fun = 'build'
+
+        def init(self):
+                """
+                Some data that needs to be present
+                """
+                if not getattr(self, 'configurations', None):
+                        self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
+                if not getattr(self, 'platforms', None):
+                        self.platforms = ['Win32']
+                if not getattr(self, 'all_projects', None):
+                        self.all_projects = []
+                if not getattr(self, 'project_extension', None):
+                        self.project_extension = '.project'
+                if not getattr(self, 'projects_dir', None):
+                        self.projects_dir = self.srcnode.make_node('')
+                        self.projects_dir.mkdir()
+
+                # bind the classes to the object, so that subclass can provide custom generators
+                if not getattr(self, 'vsnode_vsdir', None):
+                        self.vsnode_vsdir = vsnode_vsdir
+                if not getattr(self, 'vsnode_target', None):
+                        self.vsnode_target = vsnode_target
+                if not getattr(self, 'vsnode_build_all', None):
+                        self.vsnode_build_all = vsnode_build_all
+                if not getattr(self, 'vsnode_install_all', None):
+                        self.vsnode_install_all = vsnode_install_all
+                if not getattr(self, 'vsnode_project_view', None):
+                        self.vsnode_project_view = vsnode_project_view
+
+                self.numver = '11.00'
+                self.vsver  = '2010'
+
+        def execute(self):
+                """
+                Entry point
+                """
+                self.restore()
+                if not self.all_envs:
+                        self.load_envs()
+                self.recurse([self.run_dir])
+
+                # user initialization
+                self.init()
+
+                # two phases for creating the solution
+                self.collect_projects() # add project objects into "self.all_projects"
+                self.write_files() # write the corresponding project and solution files
+
+        def collect_projects(self):
+                """
+                Fill the list self.all_projects with project objects
+                Fill the list of build targets
+                """
+                self.collect_targets()
+                #self.add_aliases()
+                #self.collect_dirs()
+                default_project = getattr(self, 'default_project', None)
+                def sortfun(x):
+                        if x.name == default_project:
+                                return ''
+                        return getattr(x, 'path', None) and x.path.abspath() or x.name
+                self.all_projects.sort(key=sortfun)
+
+        def write_files(self):
+                """
+                Write the project and solution files from the data collected
+                so far. It is unlikely that you will want to change this
+                """
+                for p in self.all_projects:
+                        p.write()
+
+                # and finally write the solution file
+                node = self.get_solution_node()
+                node.parent.mkdir()
+                Logs.warn('Creating %r', node)
+                #a = dir(self.root)
+                #for b in a:
+                #        print b
+                #print self.group_names
+                #print "Hallo2:   ",self.root.listdir()
+                #print getattr(self, 'codelite_solution_name', None)
+                template1 = compile_template(SOLUTION_TEMPLATE)
+                sln_str = template1(self)
+                sln_str = rm_blank_lines(sln_str)
+                node.stealth_write(sln_str)
+
+        def get_solution_node(self):
+                """
+                The solution filename is required when writing the .vcproj files
+                return self.solution_node and if it does not exist, make one
+                """
+                try:
+                        return self.solution_node
+                except:
+                        pass
+
+                codelite_solution_name = getattr(self, 'codelite_solution_name', None)
+                if not codelite_solution_name:
+                        codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace'
+                        setattr(self, 'codelite_solution_name', codelite_solution_name)
+                if os.path.isabs(codelite_solution_name):
+                        self.solution_node = self.root.make_node(codelite_solution_name)
+                else:
+                        self.solution_node = self.srcnode.make_node(codelite_solution_name)
+                return self.solution_node
+
+        def project_configurations(self):
+                """
+                Helper that returns all the pairs (config,platform)
+                """
+                ret = []
+                for c in self.configurations:
+                        for p in self.platforms:
+                                ret.append((c, p))
+                return ret
+
+        def collect_targets(self):
+                """
+                Process the list of task generators
+                """
+                for g in self.groups:
+                        for tg in g:
+                                if not isinstance(tg, TaskGen.task_gen):
+                                        continue
+
+                                if not hasattr(tg, 'codelite_includes'):
+                                        tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
+                                tg.post()
+                                if not getattr(tg, 'link_task', None):
+                                        continue
+
+                                p = self.vsnode_target(self, tg)
+                                p.collect_source() # delegate this processing
+                                p.collect_properties()                               
+                                self.all_projects.append(p)
+
+        def add_aliases(self):
+                """
+                Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
+                We also add an alias for "make install" (disabled by default)
+                """
+                base = getattr(self, 'projects_dir', None) or self.tg.path
+
+                node_project = base.make_node('build_all_projects' + self.project_extension) # Node
+                p_build = self.vsnode_build_all(self, node_project)
+                p_build.collect_properties()
+                self.all_projects.append(p_build)
+
+                node_project = base.make_node('install_all_projects' + self.project_extension) # Node
+                p_install = self.vsnode_install_all(self, node_project)
+                p_install.collect_properties()
+                self.all_projects.append(p_install)
+
+                node_project = base.make_node('project_view' + self.project_extension) # Node
+                p_view = self.vsnode_project_view(self, node_project)
+                p_view.collect_source()
+                p_view.collect_properties()
+                self.all_projects.append(p_view)
+
+                n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases")
+                p_build.parent = p_install.parent = p_view.parent = n
+                self.all_projects.append(n)
+
+        def collect_dirs(self):
+                """
+                Create the folder structure in the CodeLite project view
+                """
+                seen = {}
+                def make_parents(proj):
+                        # look at a project, try to make a parent
+                        if getattr(proj, 'parent', None):
+                                # aliases already have parents
+                                return
+                        x = proj.iter_path
+                        if x in seen:
+                                proj.parent = seen[x]
+                                return
+
+                        # There is not vsnode_vsdir for x.
+                        # So create a project representing the folder "x"
+                        n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name)
+                        n.iter_path = x.parent
+                        self.all_projects.append(n)
+
+                        # recurse up to the project directory
+                        if x.height() > self.srcnode.height() + 1:
+                                make_parents(n)
+
+                for p in self.all_projects[:]: # iterate over a copy of all projects
+                        if not getattr(p, 'tg', None):
+                                # but only projects that have a task generator
+                                continue
+
+                        # make a folder for each task generator
+                        p.iter_path = p.tg.path
+                        make_parents(p)
+

Some files were not shown because too many files changed in this diff