[rtems_waf commit] Use gccdeps for dependency scanning.

Chris Johns chrisj at rtems.org
Thu Oct 10 06:47:30 UTC 2019


Module:    rtems_waf
Branch:    master
Commit:    096372fc4504730e50b51b952ce47ca603b35f01
Changeset: http://git.rtems.org/rtems_waf/commit/?id=096372fc4504730e50b51b952ce47ca603b35f01

Author:    Chris Johns <chrisj at rtems.org>
Date:      Thu Oct 10 17:43:11 2019 +1100

Use gccdeps for dependency scanning.

---

 gccdeps.py | 214 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 rtems.py   |  13 +++-
 2 files changed, 224 insertions(+), 3 deletions(-)

diff --git a/gccdeps.py b/gccdeps.py
new file mode 100644
index 0000000..bfabe72
--- /dev/null
+++ b/gccdeps.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Thomas Nagy, 2008-2010 (ita)
+
+"""
+Execute the tasks with gcc -MD, read the dependencies from the .d file
+and prepare the dependency calculation for the next run.
+This affects the cxx class, so make sure to load Qt5 after this tool.
+
+Usage::
+
+	def options(opt):
+		opt.load('compiler_cxx')
+	def configure(conf):
+		conf.load('compiler_cxx gccdeps')
+"""
+
+import os, re, threading
+from waflib import Task, Logs, Utils, Errors
+from waflib.Tools import c_preproc
+from waflib.TaskGen import before_method, feature
+
+lock = threading.Lock()
+
+gccdeps_flags = ['-MD']
+if not c_preproc.go_absolute:
+	gccdeps_flags = ['-MMD']
+
+# Third-party tools are allowed to add extra names in here with append()
+supported_compilers = ['gcc', 'icc', 'clang']
+
+def scan(self):
+	if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+		return super(self.derived_gccdeps, self).scan()
+	nodes = self.generator.bld.node_deps.get(self.uid(), [])
+	names = []
+	return (nodes, names)
+
+re_o = re.compile(r"\.o$")
+re_splitter = re.compile(r'(?<!\\)\s+') # split by space, except when spaces are escaped
+
+def remove_makefile_rule_lhs(line):
+	# Splitting on a plain colon would accidentally match inside a
+	# Windows absolute-path filename, so we must search for a colon
+	# followed by whitespace to find the divider between LHS and RHS
+	# of the Makefile rule.
+	rulesep = ': '
+
+	sep_idx = line.find(rulesep)
+	if sep_idx >= 0:
+		return line[sep_idx + 2:]
+	else:
+		return line
+
+def path_to_node(base_node, path, cached_nodes):
+	# Take the base node and the path and return a node
+	# Results are cached because searching the node tree is expensive
+	# The following code is executed by threads, it is not safe, so a lock is needed...
+	if getattr(path, '__hash__'):
+		node_lookup_key = (base_node, path)
+	else:
+		# Not hashable, assume it is a list and join into a string
+		node_lookup_key = (base_node, os.path.sep.join(path))
+	try:
+		lock.acquire()
+		node = cached_nodes[node_lookup_key]
+	except KeyError:
+		node = base_node.find_resource(path)
+		cached_nodes[node_lookup_key] = node
+	finally:
+		lock.release()
+	return node
+
+def post_run(self):
+	if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+		return super(self.derived_gccdeps, self).post_run()
+
+	name = self.outputs[0].abspath()
+	name = re_o.sub('.d', name)
+	try:
+		txt = Utils.readf(name)
+	except EnvironmentError:
+		Logs.error('Could not find a .d dependency file, are cflags/cxxflags overwritten?')
+		raise
+	#os.remove(name)
+
+	# Compilers have the choice to either output the file's dependencies
+	# as one large Makefile rule:
+	#
+	#   /path/to/file.o: /path/to/dep1.h \
+	#                    /path/to/dep2.h \
+	#                    /path/to/dep3.h \
+	#                    ...
+	#
+	# or as many individual rules:
+	#
+	#   /path/to/file.o: /path/to/dep1.h
+	#   /path/to/file.o: /path/to/dep2.h
+	#   /path/to/file.o: /path/to/dep3.h
+	#   ...
+	#
+	# So the first step is to sanitize the input by stripping out the left-
+	# hand side of all these lines. After that, whatever remains are the
+	# implicit dependencies of task.outputs[0]
+	txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()])
+
+	# Now join all the lines together
+	txt = txt.replace('\\\n', '')
+
+	val = txt.strip()
+	val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x]
+
+	nodes = []
+	bld = self.generator.bld
+
+	# Dynamically bind to the cache
+	try:
+		cached_nodes = bld.cached_nodes
+	except AttributeError:
+		cached_nodes = bld.cached_nodes = {}
+
+	for x in val:
+
+		node = None
+		if os.path.isabs(x):
+			node = path_to_node(bld.root, x, cached_nodes)
+		else:
+			# TODO waf 1.9 - single cwd value
+			path = getattr(bld, 'cwdx', bld.bldnode)
+			# when calling find_resource, make sure the path does not contain '..'
+			x = [k for k in Utils.split_path(x) if k and k != '.']
+			while '..' in x:
+				idx = x.index('..')
+				if idx == 0:
+					x = x[1:]
+					path = path.parent
+				else:
+					del x[idx]
+					del x[idx-1]
+
+			node = path_to_node(path, x, cached_nodes)
+
+		if not node:
+			raise ValueError('could not find %r for %r' % (x, self))
+		if id(node) == id(self.inputs[0]):
+			# ignore the source file, it is already in the dependencies
+			# this way, successful config tests may be retrieved from the cache
+			continue
+		nodes.append(node)
+
+	Logs.debug('deps: gccdeps for %s returned %s', self, nodes)
+
+	bld.node_deps[self.uid()] = nodes
+	bld.raw_deps[self.uid()] = []
+
+	try:
+		del self.cache_sig
+	except AttributeError:
+		pass
+
+	Task.Task.post_run(self)
+
+def sig_implicit_deps(self):
+	if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
+		return super(self.derived_gccdeps, self).sig_implicit_deps()
+	try:
+		return Task.Task.sig_implicit_deps(self)
+	except Errors.WafError:
+		return Utils.SIG_NIL
+
+def wrap_compiled_task(classname):
+	derived_class = type(classname, (Task.classes[classname],), {})
+	derived_class.derived_gccdeps = derived_class
+	derived_class.post_run = post_run
+	derived_class.scan = scan
+	derived_class.sig_implicit_deps = sig_implicit_deps
+
+for k in ('c', 'cxx'):
+	if k in Task.classes:
+		wrap_compiled_task(k)
+
+ at before_method('process_source')
+ at feature('force_gccdeps')
+def force_gccdeps(self):
+	self.env.ENABLE_GCCDEPS = ['c', 'cxx']
+
+def configure(conf):
+	# in case someone provides a --enable-gccdeps command-line option
+	if not getattr(conf.options, 'enable_gccdeps', True):
+		return
+
+	global gccdeps_flags
+	flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
+	if conf.env.CC_NAME in supported_compilers:
+		try:
+			conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))
+		except Errors.ConfigurationError:
+			pass
+		else:
+			conf.env.append_value('CFLAGS', flags)
+			conf.env.append_unique('ENABLE_GCCDEPS', 'c')
+
+	if conf.env.CXX_NAME in supported_compilers:
+		try:
+			conf.check(fragment='int main() { return 0; }', features='cxx force_gccdeps', cxxflags=flags, msg='Checking for cxx flags %r' % ''.join(flags))
+		except Errors.ConfigurationError:
+			pass
+		else:
+			conf.env.append_value('CXXFLAGS', flags)
+			conf.env.append_unique('ENABLE_GCCDEPS', 'cxx')
+
+def options(opt):
+	raise ValueError('Do not load gccdeps options')
+
diff --git a/rtems.py b/rtems.py
index ffb386f..6dd71b1 100644
--- a/rtems.py
+++ b/rtems.py
@@ -229,6 +229,7 @@ def configure(conf, bsp_configure = None):
         conf.load('gcc')
         conf.load('g++')
         conf.load('gas')
+        conf.load('gccdeps', tooldir = os.path.dirname(__file__))
 
         #
         # Get the version of the tools being used.
@@ -799,6 +800,11 @@ def _load_flags(conf, arch_bsp, path):
     flags['CFLAGS'] = _load_flags_set('CFLAGS', arch_bsp, conf, config, pkg)
     flags['LDFLAGS'] = _load_flags_set('LDFLAGS', arch_bsp, conf, config, pkg)
     flags['LIB'] = _load_flags_set('LIB', arch_bsp, conf, config, pkg)
+    #
+    # Handle gccdeps flags.
+    #
+    if '-MMD' in conf.env['CFLAGS']:
+        flags['CFLAGS'] += ['-MMD']
     return flags
 
 def _load_flags_set(flags, arch_bsp, conf, config, pkg):
@@ -836,7 +842,8 @@ def _filter_flags(label, flags, arch, rtems_path):
         [ { 'key': 'warnings', 'path': False, 'flags': { '-W': 1 }, 'cflags': False, 'lflags': False },
           { 'key': 'includes', 'path': True,  'flags': { '-I': 1, '-isystem': 2, '-sysroot': 2 } },
           { 'key': 'libpath',  'path': True,  'flags': { '-L': 1 } },
-          { 'key': 'machines', 'path': True,  'flags': { '-O': 1, '-m': 1, '-f': 1, '-G':1, '-E':1 } },
+          { 'key': 'machines', 'path': True,  'flags': { '-O': 1, '-m': 1, '-f': 1, '-G': 1, '-E': 1 } },
+          { 'key': 'prepro',   'path': False, 'flags': { '-MMD': 1 } },
           { 'key': 'specs',    'path': True,  'flags': { '-q': 1, '-B': 2, '--specs': 2 } } ]
 
     flags = _strip_cflags(flags)
@@ -871,8 +878,8 @@ def _filter_flags(label, flags, arch, rtems_path):
                     if label in fg and not fg[label]:
                         in_label = False
                     break
-            if in_label:
-                _flags[label] += opts
+        if in_label:
+            _flags[label] += opts
     return _flags
 
 def _strip_cflags(cflags):



More information about the vc mailing list