[PATCH 4.11] sb: Back port the RTEMS 5 and 6 RSB engine.

chrisj at rtems.org chrisj at rtems.org
Tue Oct 6 03:19:22 UTC 2020


From: Chris Johns <chrisj at rtems.org>

- Build GDb first as we do for RTEMS 5 and later

- Update GDB to 9.1 for all archs expect SPARC. The SIS patches
  only apply to 7.9. Disable Python for SPARC

Closes #4111
---
 rtems/config/4.11/rtems-arm.bset        |   2 +-
 rtems/config/4.11/rtems-avr.bset        |   2 +-
 rtems/config/4.11/rtems-bfin.bset       |   2 +-
 rtems/config/4.11/rtems-h8300.bset      |   2 +-
 rtems/config/4.11/rtems-i386.bset       |   2 +-
 rtems/config/4.11/rtems-lm32.bset       |   2 +-
 rtems/config/4.11/rtems-m32c.bset       |   2 +-
 rtems/config/4.11/rtems-m32r.bset       |   2 +-
 rtems/config/4.11/rtems-m68k.bset       |   2 +-
 rtems/config/4.11/rtems-microblaze.bset |   2 +-
 rtems/config/4.11/rtems-mips.bset       |   2 +-
 rtems/config/4.11/rtems-moxie.bset      |   2 +-
 rtems/config/4.11/rtems-nios2.bset      |   2 +-
 rtems/config/4.11/rtems-or1k.bset       |   2 +-
 rtems/config/4.11/rtems-powerpc.bset    |   2 +-
 rtems/config/4.11/rtems-sh.bset         |   2 +-
 rtems/config/4.11/rtems-sparc.bset      |   2 +-
 rtems/config/4.11/rtems-sparc64.bset    |   2 +-
 rtems/config/4.11/rtems-v850.bset       |   2 +-
 rtems/config/tools/rtems-gdb-7.9-1.cfg  |   6 +
 rtems/config/tools/rtems-gdb-9.1-1.cfg  |  12 +
 source-builder/config/autoconf-2-1.cfg  |   9 +-
 source-builder/config/automake-1-1.cfg  |   9 +-
 source-builder/config/binutils-2-1.cfg  |  17 +-
 source-builder/config/dtc-1-1.cfg       |   3 +-
 source-builder/config/expat-2-1.cfg     |   1 -
 source-builder/config/gcc-4.3-1.cfg     |  15 +-
 source-builder/config/gcc-4.4-1.cfg     |  10 +-
 source-builder/config/gcc-4.5-1.cfg     |   8 +-
 source-builder/config/gcc-4.6-1.cfg     |  10 +-
 source-builder/config/gcc-4.7-1.cfg     |   8 +-
 source-builder/config/gcc-4.8-1.cfg     |   8 +-
 source-builder/config/gcc-4.9-1.cfg     |   8 +-
 source-builder/config/gcc-common-1.cfg  |  53 +-
 source-builder/config/gdb-6-1.cfg       |  84 ++-
 source-builder/config/gdb-7-1.cfg       | 141 +----
 source-builder/config/gdb-common-1.cfg  | 286 +++++++++++
 source-builder/config/gettext-0-1.cfg   |   1 -
 source-builder/config/glib-2-1.cfg      |   4 +-
 source-builder/config/libffi-3-1.cfg    |   3 +-
 source-builder/config/libiconv-1-1.cfg  |   1 -
 source-builder/config/libjpeg-1.cfg     |   6 +-
 source-builder/config/libtool-2-1.cfg   |   4 +-
 source-builder/config/libusb-1-1.cfg    |   5 +-
 source-builder/config/m4-1-1.cfg        |   3 +-
 source-builder/config/net-snmp-5-1.cfg  |   4 +-
 source-builder/config/ntp-4-1.cfg       |   1 -
 source-builder/config/or1ksim-1-1.cfg   |   1 -
 source-builder/config/pixman-0-1.cfg    |   1 -
 source-builder/config/protobuf-2-1.cfg  |   3 +-
 source-builder/config/qemu-1-1.cfg      | 101 +---
 source-builder/config/sqlite-3-1.cfg    |  47 +-
 source-builder/defaults.mc              | 110 +++-
 source-builder/pkg-config               |  25 +-
 source-builder/sb-check                 |   8 +-
 source-builder/sb-defaults              |   9 +-
 source-builder/sb-get-sources           |  30 ++
 source-builder/sb-reports               |   9 +-
 source-builder/sb-rtems-config          |   9 +-
 source-builder/sb-set-builder           |   9 +-
 source-builder/sb-track                 |  29 ++
 source-builder/sb/__init__.py           |  20 +
 source-builder/sb/build.py              | 326 ++++++++----
 source-builder/sb/check.py              |  48 +-
 source-builder/sb/config.py             | 371 ++++++++++----
 source-builder/sb/cvs.py                |  10 +-
 source-builder/sb/darwin.py             |   3 +-
 source-builder/sb/download.py           |  79 +--
 source-builder/sb/ereport.py            |  64 +--
 source-builder/sb/error.py              |   4 +-
 source-builder/sb/execute.py            | 365 ++++++++++---
 source-builder/sb/freebsd.py            |  25 +-
 source-builder/sb/getsources.py         | 138 +++++
 source-builder/sb/git.py                |  73 ++-
 source-builder/sb/linux.py              |  48 +-
 source-builder/sb/log.py                |  17 +-
 source-builder/sb/macros.py             |  72 +--
 source-builder/sb/mailer.py             |  12 +-
 source-builder/sb/netbsd.py             |   4 +-
 source-builder/sb/options.py            | 130 +++--
 source-builder/sb/path.py               |  84 ++-
 source-builder/sb/pkgconfig.py          |  37 +-
 source-builder/sb/reports.py            |  87 ++--
 source-builder/sb/rtems-build-dep       | 169 ++++++
 source-builder/sb/rtems-check-command   |  26 +
 source-builder/sb/rtemsconfig.py        |  12 +-
 source-builder/sb/setbuilder.py         | 340 ++++++++----
 source-builder/sb/shell.py              |  74 +++
 source-builder/sb/simhost.py            | 656 ++++++++++++++++++++++++
 source-builder/sb/solaris.py            |   6 +-
 source-builder/sb/sources.py            |  55 +-
 source-builder/sb/track.py              | 250 +++++++++
 source-builder/sb/version.py            | 249 ++++++---
 source-builder/sb/windows.py            |  50 +-
 source-builder/sha512-base64            |   2 +
 95 files changed, 3832 insertions(+), 1223 deletions(-)
 create mode 100644 rtems/config/tools/rtems-gdb-9.1-1.cfg
 create mode 100644 source-builder/config/gdb-common-1.cfg
 create mode 100755 source-builder/sb-get-sources
 create mode 100755 source-builder/sb-track
 create mode 100644 source-builder/sb/__init__.py
 create mode 100644 source-builder/sb/getsources.py
 create mode 100755 source-builder/sb/rtems-build-dep
 create mode 100755 source-builder/sb/rtems-check-command
 create mode 100644 source-builder/sb/shell.py
 create mode 100644 source-builder/sb/simhost.py
 create mode 100644 source-builder/sb/track.py
 create mode 100755 source-builder/sha512-base64

diff --git a/rtems/config/4.11/rtems-arm.bset b/rtems/config/4.11/rtems-arm.bset
index d54570e..ec6cbf8 100644
--- a/rtems/config/4.11/rtems-arm.bset
+++ b/rtems/config/4.11/rtems-arm.bset
@@ -34,8 +34,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-avr.bset b/rtems/config/4.11/rtems-avr.bset
index 883120a..5cd1497 100644
--- a/rtems/config/4.11/rtems-avr.bset
+++ b/rtems/config/4.11/rtems-avr.bset
@@ -34,8 +34,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-bfin.bset b/rtems/config/4.11/rtems-bfin.bset
index 0a8bacd..6659f45 100644
--- a/rtems/config/4.11/rtems-bfin.bset
+++ b/rtems/config/4.11/rtems-bfin.bset
@@ -16,8 +16,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.8.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-h8300.bset b/rtems/config/4.11/rtems-h8300.bset
index 6e28f0b..984d09f 100644
--- a/rtems/config/4.11/rtems-h8300.bset
+++ b/rtems/config/4.11/rtems-h8300.bset
@@ -28,8 +28,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-i386.bset b/rtems/config/4.11/rtems-i386.bset
index 6a91672..5c26e94 100644
--- a/rtems/config/4.11/rtems-i386.bset
+++ b/rtems/config/4.11/rtems-i386.bset
@@ -16,8 +16,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-lm32.bset b/rtems/config/4.11/rtems-lm32.bset
index 6814268..81ee768 100644
--- a/rtems/config/4.11/rtems-lm32.bset
+++ b/rtems/config/4.11/rtems-lm32.bset
@@ -22,8 +22,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-m32c.bset b/rtems/config/4.11/rtems-m32c.bset
index 22e6d17..460a4b1 100644
--- a/rtems/config/4.11/rtems-m32c.bset
+++ b/rtems/config/4.11/rtems-m32c.bset
@@ -40,8 +40,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.8.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-m32r.bset b/rtems/config/4.11/rtems-m32r.bset
index 3d5d8ff..628ae30 100644
--- a/rtems/config/4.11/rtems-m32r.bset
+++ b/rtems/config/4.11/rtems-m32r.bset
@@ -16,8 +16,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-m68k.bset b/rtems/config/4.11/rtems-m68k.bset
index dea9e0f..9ffe528 100644
--- a/rtems/config/4.11/rtems-m68k.bset
+++ b/rtems/config/4.11/rtems-m68k.bset
@@ -16,8 +16,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-microblaze.bset b/rtems/config/4.11/rtems-microblaze.bset
index 94d5874..2ce9ae6 100644
--- a/rtems/config/4.11/rtems-microblaze.bset
+++ b/rtems/config/4.11/rtems-microblaze.bset
@@ -27,8 +27,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-mips.bset b/rtems/config/4.11/rtems-mips.bset
index 50ece5c..4a29e33 100644
--- a/rtems/config/4.11/rtems-mips.bset
+++ b/rtems/config/4.11/rtems-mips.bset
@@ -26,9 +26,9 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-mipstx39-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-moxie.bset b/rtems/config/4.11/rtems-moxie.bset
index c8f764e..f2e2017 100644
--- a/rtems/config/4.11/rtems-moxie.bset
+++ b/rtems/config/4.11/rtems-moxie.bset
@@ -22,8 +22,8 @@
 4.11/rtems-autotools
 devel/expat-2.1.0-1
 devel/dtc-1.4.1-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.25-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-nios2.bset b/rtems/config/4.11/rtems-nios2.bset
index 82d45b2..83589bb 100644
--- a/rtems/config/4.11/rtems-nios2.bset
+++ b/rtems/config/4.11/rtems-nios2.bset
@@ -16,8 +16,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-or1k.bset b/rtems/config/4.11/rtems-or1k.bset
index 06ba931..bd7d553 100644
--- a/rtems/config/4.11/rtems-or1k.bset
+++ b/rtems/config/4.11/rtems-or1k.bset
@@ -37,7 +37,7 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1.cfg
 tools/rtems-tools-4.11-1
-tools/rtems-gdb-7.9-1
diff --git a/rtems/config/4.11/rtems-powerpc.bset b/rtems/config/4.11/rtems-powerpc.bset
index 7c9bab8..b94e3ba 100644
--- a/rtems/config/4.11/rtems-powerpc.bset
+++ b/rtems/config/4.11/rtems-powerpc.bset
@@ -31,8 +31,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-sh.bset b/rtems/config/4.11/rtems-sh.bset
index 2e0f854..1b06520 100644
--- a/rtems/config/4.11/rtems-sh.bset
+++ b/rtems/config/4.11/rtems-sh.bset
@@ -16,8 +16,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-sparc.bset b/rtems/config/4.11/rtems-sparc.bset
index 18e3cfc..ebf061e 100644
--- a/rtems/config/4.11/rtems-sparc.bset
+++ b/rtems/config/4.11/rtems-sparc.bset
@@ -76,8 +76,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-7.9-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-sparc64.bset b/rtems/config/4.11/rtems-sparc64.bset
index 7c97c82..5917ed8 100644
--- a/rtems/config/4.11/rtems-sparc64.bset
+++ b/rtems/config/4.11/rtems-sparc64.bset
@@ -16,8 +16,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/4.11/rtems-v850.bset b/rtems/config/4.11/rtems-v850.bset
index 1e64d23..b28dc22 100644
--- a/rtems/config/4.11/rtems-v850.bset
+++ b/rtems/config/4.11/rtems-v850.bset
@@ -21,8 +21,8 @@
 #
 4.11/rtems-autotools
 devel/expat-2.1.0-1
+tools/rtems-gdb-9.1-1
 tools/rtems-binutils-2.26-1
 tools/rtems-gcc-4.9.3-newlib-2.2.0-20150423-1
-tools/rtems-gdb-7.9-1
 tools/rtems-tools-4.11-1
 tools/rtems-kernel-4.11
diff --git a/rtems/config/tools/rtems-gdb-7.9-1.cfg b/rtems/config/tools/rtems-gdb-7.9-1.cfg
index 87db312..adf4a3c 100644
--- a/rtems/config/tools/rtems-gdb-7.9-1.cfg
+++ b/rtems/config/tools/rtems-gdb-7.9-1.cfg
@@ -22,6 +22,12 @@
  %hash  md5 patch-gdb-python-python-config.py c0260fcca4c1a5509635049c0094eee3
 %endif
 
+#
+# Disable python because 7.9 needs python2 and that is now end-of-life and
+# available on some hosts
+#
+%define without_python
+
 #
 # The gdb build instructions. We use 7.xx Release 1.
 #
diff --git a/rtems/config/tools/rtems-gdb-9.1-1.cfg b/rtems/config/tools/rtems-gdb-9.1-1.cfg
new file mode 100644
index 0000000..99e6571
--- /dev/null
+++ b/rtems/config/tools/rtems-gdb-9.1-1.cfg
@@ -0,0 +1,12 @@
+#
+# GDB 9.1
+#
+
+%include %{_configdir}/checks.cfg
+%include %{_configdir}/base.cfg
+
+%define gdb_version 9.1
+%define gdb_src_ext xz
+%hash sha512 gdb-%{gdb_version}.tar.xz hM3UCNgKP8V3neRZxbJhVNMbMp695+OqeHmfseskXYtkuMjuckI4Kh29lbTm+dhP70HRKgZGqnXT3uRwnqH25w==
+
+%include %{_configdir}/gdb-common-1.cfg
diff --git a/source-builder/config/autoconf-2-1.cfg b/source-builder/config/autoconf-2-1.cfg
index 5061cfd..2b9466c 100644
--- a/source-builder/config/autoconf-2-1.cfg
+++ b/source-builder/config/autoconf-2-1.cfg
@@ -6,9 +6,9 @@
 
 %ifn %{defined _internal_autotools}
   %define _internal_autotools no
-  %ifn %{defined _internal_autotools_path}
-    %define _internal_autotools_path %{nil}
-  %endif
+%endif
+%ifn %{defined _internal_autotools_path}
+  %define _internal_autotools_path %{_prefix}
 %endif
 
 Name:      autoconf-%{autoconf_version}-%{_host}-%{release}
@@ -16,12 +16,11 @@ Summary:   Autoconf v%{autoconf_version} for host %{_host}
 Version:   %{autoconf_version}
 Release:   %{release}
 URL: 	   http://www.gnu.org/software/autoconf/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
 #
-%source set autoconf ftp://ftp.gnu.org/gnu/autoconf/autoconf-%{autoconf_version}.tar.gz
+%source set autoconf https://ftp.gnu.org/gnu/autoconf/autoconf-%{autoconf_version}.tar.gz
 
 #
 # Prepare the source code.
diff --git a/source-builder/config/automake-1-1.cfg b/source-builder/config/automake-1-1.cfg
index 6f45928..83473de 100644
--- a/source-builder/config/automake-1-1.cfg
+++ b/source-builder/config/automake-1-1.cfg
@@ -6,9 +6,9 @@
 
 %ifn %{defined _internal_autotools}
   %define _internal_autotools no
-  %ifn %{defined _internal_autotools_path}
-    %define _internal_autotools_path %{nil}
-  %endif
+%endif
+%ifn %{defined _internal_autotools_path}
+  %define _internal_autotools_path %{_prefix}
 %endif
 
 Name:      automake-%{automake_version}-%{_host}-%{release}
@@ -16,12 +16,11 @@ Summary:   Automake v%{automake_version} for host %{_host}
 Version:   %{automake_version}
 Release:   %{release}
 URL: 	   http://www.gnu.org/software/automake/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
 #
-%source set automake ftp://ftp.gnu.org/gnu/automake/automake-%{automake_version}.tar.gz
+%source set automake https://ftp.gnu.org/gnu/automake/automake-%{automake_version}.tar.gz
 
 #
 # Prepare the source code.
diff --git a/source-builder/config/binutils-2-1.cfg b/source-builder/config/binutils-2-1.cfg
index 397dea5..a49fa2a 100644
--- a/source-builder/config/binutils-2-1.cfg
+++ b/source-builder/config/binutils-2-1.cfg
@@ -19,17 +19,26 @@ Summary:   Binutils v%{binutils_version} for target %{_target} on host %{_host}
 Version:   %{binutils_version}
 Release:   %{release}
 URL: 	   http://sources.redhat.com/binutils
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Supports Candian Cross (Cxc).
 #
 %define allow_cxc
 
+#
+# Default gold and LTO to disable.
+#
+%ifn %{defined with_gold}
+ %define with_gold 0
+%endif
+%ifn %{defined with_lto}
+ %define with_lto 0
+%endif
+
 #
 # Source
 #
-%source set binutils ftp://ftp.gnu.org/gnu/binutils/binutils-%{binutils_version}.tar.bz2
+%source set binutils https://ftp.gnu.org/gnu/binutils/binutils-%{binutils_version}.tar.bz2
 
 #
 # Prepare the source code.
@@ -59,7 +68,11 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
     --build=%{_build} --host=%{_host} \
     --target=%{_target} \
     --verbose --disable-nls \
+    --disable-gdb --disable-libdecnumber --disable-readline --disable-sim \
     %{?with_deterministic_archives:--enable-deterministic-archives} \
+    %{?with_64_bit_bfd:--enable-64-bit-bfd} \
+    %{?with_gold:--enable-gold=yes} \
+    %{?with_lto:--enable-lto --enable-plugins}%{!?with_lto:--disable-lto} \
     --without-included-gettext \
     --disable-win32-registry \
     --disable-werror \
diff --git a/source-builder/config/dtc-1-1.cfg b/source-builder/config/dtc-1-1.cfg
index c9cb816..686a1e0 100644
--- a/source-builder/config/dtc-1-1.cfg
+++ b/source-builder/config/dtc-1-1.cfg
@@ -12,8 +12,7 @@ Name:      dtc-%{dtc_version}-%{_host}-%{release}
 Summary:   Device Tree Compiler v%{dtc_version} for target %{_target} on host %{_host}
 Version:   %{dtc_version}
 Release:   %{release}
-URL: 	   http://www.jdl.com/software/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
+URL: 	   https://www.devicetree.org/
 
 #
 # Source
diff --git a/source-builder/config/expat-2-1.cfg b/source-builder/config/expat-2-1.cfg
index 77774e7..2219d15 100644
--- a/source-builder/config/expat-2-1.cfg
+++ b/source-builder/config/expat-2-1.cfg
@@ -13,7 +13,6 @@ Summary:   Expat XML Parser v%{expat_version} for target %{_target} on host %{_h
 Version:   %{expat_version}
 Release:   %{release}
 URL: 	   http://expat.sourceforge.net/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
diff --git a/source-builder/config/gcc-4.3-1.cfg b/source-builder/config/gcc-4.3-1.cfg
index 4c7a335..69ea367 100644
--- a/source-builder/config/gcc-4.3-1.cfg
+++ b/source-builder/config/gcc-4.3-1.cfg
@@ -1,28 +1,33 @@
 #
-# GCC 4.7 Version 1.
+# GCC 4.3.1 Version 1.
 #
 # This configuration file configure's, make's and install's gcc. It uses
 # newlib, MPFR, MPC, and GMP in a one-tree build configuration.
 #
 
+#
+# Force the standard of C code for GCC.
+#
+%define build_cflags %{build_cflags} --std=gnu89
+
 #
 # Source
 #
 #
 # GCC core and G++
 #
-%source set gcc ftp://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-%{gcc_version}.tar.bz2
+%source set gcc https://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-%{gcc_version}.tar.bz2
 
 #
 # Newlib
 #
-%source set newlib ftp://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
+%source set newlib https://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
 
 #
 # Packages GCC requires
 #
-%source set mpfr https://ftp.gnu.org/gnu/mpfr/mpfr-%{mpfr_version}.tar.bz2
-%source set mpc https://ftp.gnu.org/gnu/mpc/mpc-%{mpc_version}.tar.gz
+%source set mpfr http://www.mpfr.org/mpfr-%{mpfr_version}/mpfr-%{mpfr_version}.tar.bz2
+%source set mpc http://www.multiprecision.org/mpc/download/mpc-%{mpc_version}.tar.gz
 %source set gmp https://ftp.gnu.org/gnu/gmp/gmp-%{gmp_version}.tar.bz2
 
 #
diff --git a/source-builder/config/gcc-4.4-1.cfg b/source-builder/config/gcc-4.4-1.cfg
index ecfa651..2c70919 100644
--- a/source-builder/config/gcc-4.4-1.cfg
+++ b/source-builder/config/gcc-4.4-1.cfg
@@ -11,22 +11,22 @@
 #
 # GCC core and G++
 #
-%source set gcc ftp://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-core-%{gcc_version}.tar.bz2
+%source set gcc https://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-core-%{gcc_version}.tar.bz2
 
 %if %{enable_cxx}
- %source add gcc ftp://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-g++-%{gcc_version}.tar.gz
+ %source add gcc https://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-g++-%{gcc_version}.tar.gz
 %endif
 
 #
 # Newlib
 #
-%source set newlib ftp://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
+%source set newlib https://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
 
 #
 # Packages GCC requires
 #
-%source set mpfr https://ftp.gnu.org/gnu/mpfr/mpfr-%{mpfr_version}.tar.bz2
-%source set mpc https://ftp.gnu.org/gnu/mpc/mpc-%{mpc_version}.tar.gz
+%source set mpfr http://www.mpfr.org/mpfr-%{mpfr_version}/mpfr-%{mpfr_version}.tar.bz2
+%source set mpc http://www.multiprecision.org/mpc/download/mpc-%{mpc_version}.tar.gz
 %source set gmp https://ftp.gnu.org/gnu/gmp/gmp-%{gmp_version}.tar.bz2
 
 #
diff --git a/source-builder/config/gcc-4.5-1.cfg b/source-builder/config/gcc-4.5-1.cfg
index acca717..fbff31c 100644
--- a/source-builder/config/gcc-4.5-1.cfg
+++ b/source-builder/config/gcc-4.5-1.cfg
@@ -11,18 +11,18 @@
 #
 # GCC core and G++
 #
-%source set gcc ftp://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-%{gcc_version}.tar.bz2
+%source set gcc https://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-%{gcc_version}.tar.bz2
 
 #
 # Newlib
 #
-%source set newlib ftp://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
+%source set newlib https://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
 
 #
 # Packages GCC requires
 #
-%source set mpfr https://ftp.gnu.org/gnu/mpfr/mpfr-%{mpfr_version}.tar.bz2
-%source set mpc https://ftp.gnu.org/gnu/mpc/mpc-%{mpc_version}.tar.gz
+%source set mpfr http://www.mpfr.org/mpfr-%{mpfr_version}/mpfr-%{mpfr_version}.tar.bz2
+%source set mpc http://www.multiprecision.org/mpc/download/mpc-%{mpc_version}.tar.gz
 %source set gmp https://ftp.gnu.org/gnu/gmp/gmp-%{gmp_version}.tar.bz2
 
 #
diff --git a/source-builder/config/gcc-4.6-1.cfg b/source-builder/config/gcc-4.6-1.cfg
index 4eff844..98f8d7e 100644
--- a/source-builder/config/gcc-4.6-1.cfg
+++ b/source-builder/config/gcc-4.6-1.cfg
@@ -11,22 +11,22 @@
 #
 # GCC core and G++
 #
-%source set gcc ftp://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-core-%{gcc_version}.tar.bz2
+%source set gcc https://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-core-%{gcc_version}.tar.bz2
 
 %if %{enable_cxx}
- %source add gcc ftp://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-g++-%{gcc_version}.tar.gz
+ %source add gcc https://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-g++-%{gcc_version}.tar.gz
 %endif
 
 #
 # Newlib
 #
-%source set newlib ftp://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
+%source set newlib https://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
 
 #
 # Packages GCC requires
 #
-%source set mpfr https://ftp.gnu.org/gnu/mpfr/mpfr-%{mpfr_version}.tar.bz2
-%source set mpc https://ftp.gnu.org/gnu/mpc/mpc-%{mpc_version}.tar.gz
+%source set mpfr http://www.mpfr.org/mpfr-%{mpfr_version}/mpfr-%{mpfr_version}.tar.bz2
+%source set mpc http://www.multiprecision.org/mpc/download/mpc-%{mpc_version}.tar.gz
 %source set gmp https://ftp.gnu.org/gnu/gmp/gmp-%{gmp_version}.tar.bz2
 
 #
diff --git a/source-builder/config/gcc-4.7-1.cfg b/source-builder/config/gcc-4.7-1.cfg
index 25ac233..0eac21f 100644
--- a/source-builder/config/gcc-4.7-1.cfg
+++ b/source-builder/config/gcc-4.7-1.cfg
@@ -20,18 +20,18 @@
 #
 # GCC core and G++
 #
-%source set gcc ftp://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-%{gcc_version}.tar.bz2
+%source set gcc https://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-%{gcc_version}.tar.bz2
 
 #
 # Newlib
 #
-%source set newlib ftp://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
+%source set newlib https://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
 
 #
 # Packages GCC requires
 #
-%source set mpfr https://ftp.gnu.org/gnu/mpfr/mpfr-%{mpfr_version}.tar.bz2
-%source set mpc https://ftp.gnu.org/gnu/mpc/mpc-%{mpc_version}.tar.gz
+%source set mpfr http://www.mpfr.org/mpfr-%{mpfr_version}/mpfr-%{mpfr_version}.tar.bz2
+%source set mpc http://www.multiprecision.org/mpc/download/mpc-%{mpc_version}.tar.gz
 %source set gmp https://ftp.gnu.org/gnu/gmp/gmp-%{gmp_version}.tar.bz2
 
 #
diff --git a/source-builder/config/gcc-4.8-1.cfg b/source-builder/config/gcc-4.8-1.cfg
index a090717..aa62837 100644
--- a/source-builder/config/gcc-4.8-1.cfg
+++ b/source-builder/config/gcc-4.8-1.cfg
@@ -11,18 +11,18 @@
 #
 # GCC core and G++
 #
-%source set gcc ftp://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-%{gcc_version}.tar.bz2
+%source set gcc https://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-%{gcc_version}.tar.bz2
 
 #
 # Newlib
 #
-%source set newlib ftp://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
+%source set newlib https://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
 
 #
 # Packages GCC requires
 #
-%source set mpfr https://ftp.gnu.org/gnu/mpfr/mpfr-%{mpfr_version}.tar.bz2
-%source set mpc https://ftp.gnu.org/gnu/mpc/mpc-%{mpc_version}.tar.gz
+%source set mpfr http://www.mpfr.org/mpfr-%{mpfr_version}/mpfr-%{mpfr_version}.tar.bz2
+%source set mpc http://www.multiprecision.org/downloads/mpc-%{mpc_version}.tar.gz
 %source set gmp https://ftp.gnu.org/gnu/gmp/gmp-%{gmp_version}.tar.bz2
 
 #
diff --git a/source-builder/config/gcc-4.9-1.cfg b/source-builder/config/gcc-4.9-1.cfg
index c73850f..25e4247 100644
--- a/source-builder/config/gcc-4.9-1.cfg
+++ b/source-builder/config/gcc-4.9-1.cfg
@@ -11,18 +11,18 @@
 #
 # GCC core and G++
 #
-%source set gcc ftp://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-%{gcc_version}.tar.bz2
+%source set gcc https://ftp.gnu.org/gnu/gcc/gcc-%{gcc_version}/gcc-%{gcc_version}.tar.bz2
 
 #
 # Newlib
 #
-%source set newlib ftp://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
+%source set newlib https://sourceware.org/pub/newlib/newlib-%{newlib_version}.tar.gz
 
 #
 # Packages GCC requires
 #
-%source set mpfr https://ftp.gnu.org/gnu/mpfr/mpfr-%{mpfr_version}.tar.bz2
-%source set mpc https://ftp.gnu.org/gnu/mpc/mpc-%{mpc_version}.tar.gz
+%source set mpfr http://www.mpfr.org/mpfr-%{mpfr_version}/mpfr-%{mpfr_version}.tar.bz2
+%source set mpc http://www.multiprecision.org/mpc/download/mpc-%{mpc_version}.tar.gz
 %source set gmp https://ftp.gnu.org/gnu/gmp/gmp-%{gmp_version}.tar.bz2
 
 #
diff --git a/source-builder/config/gcc-common-1.cfg b/source-builder/config/gcc-common-1.cfg
index 9154026..4e13efc 100644
--- a/source-builder/config/gcc-common-1.cfg
+++ b/source-builder/config/gcc-common-1.cfg
@@ -19,7 +19,6 @@ Summary:   GCC v%{gcc_version} and Newlib v%{newlib_version} for target %{_targe
 Version:   %{gcc_version}
 Release:   %{release}
 URL: 	   http://gcc.gnu.org/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Supports Candian Cross (Cxc).
@@ -31,6 +30,13 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 #
 %global _gcclibdir %{_prefix}/lib
 
+#
+# Default LTO to off.
+#
+%ifn %{defined with_lto}
+ %define with_lto 0
+%endif
+
 #
 # The GCC version depends on the type of build we are doing.
 #
@@ -40,6 +46,16 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
  %define rtems_gcc_version %{rtems_version}
 %endif
 
+#
+# Packages GCC requires
+#
+%if %{defined cloog_version}
+%source set cloog https://gcc.gnu.org/pub/gcc/infrastructure/cloog-%{cloog_version}.tar.gz
+%endif
+%if %{defined isl_version}
+%source set isl https://gcc.gnu.org/pub/gcc/infrastructure/isl-%{isl_version}.tar.bz2
+%endif
+
 #
 # Prepare the source code.
 #
@@ -59,8 +75,8 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
   cd ${build_top}
 
   # newlib
-  source_dir_newlib="newlib-%{newlib_version}"
-  %source setup newlib -q -D -n newlib-%{newlib_version}
+  source_dir_newlib=%{?newlib_external:%{newlib_expand_name}}%{!?newlib_external:"newlib-%{newlib_version}"}
+  %source setup newlib -q -D -n ${source_dir_newlib}
   %patch setup newlib -p1
   cd ${build_top}
 
@@ -68,6 +84,28 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
   %{__rmfile} ${source_dir_gcc}/newlib
   %{__ln_s} $PWD/${source_dir_newlib}/newlib ${source_dir_gcc}/newlib
 
+%if %{defined cloog_version}
+  # CLooG
+  source_dir_cloog="cloog-%{cloog_version}"
+  %source setup cloog -q -D -n cloog-%{cloog_version}
+  %patch setup cloog -p1
+  cd ${build_top}
+  # Build MPFR one-tree style
+  %{__rmfile} ${source_dir_gcc}/cloog
+  %{__ln_s} $PWD/${source_dir_cloog} ${source_dir_gcc}/cloog
+%endif
+
+%if %{defined isl_version}
+  # ISL
+  source_dir_isl="isl-%{isl_version}"
+  %source setup isl -q -D -n isl-%{isl_version}
+  %patch setup isl -p1
+  cd ${build_top}
+  # Build MPFR one-tree style
+  %{__rmfile} ${source_dir_gcc}/isl
+  %{__ln_s} $PWD/${source_dir_isl} ${source_dir_gcc}/isl
+%endif
+
   # MPFR
   source_dir_mpfr="mpfr-%{mpfr_version}"
   %source setup mpfr -q -D -n mpfr-%{mpfr_version}
@@ -131,12 +169,6 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
   languages="$languages,objc"
 %endif
 
-%if %{_host_os} == mingw32
-  zlib_option=""
-%else
-  zlib_option="--with-system-zlib"
-%endif
-
   %{host_build_flags}
 
   ../${source_dir_gcc}/configure \
@@ -154,11 +186,10 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
     --disable-libstdcxx-pch \
     --with-gnu-as --with-gnu-ld --verbose \
     --with-newlib \
-    "$zlib_option" \
     --disable-nls --without-included-gettext \
     --disable-win32-registry \
     --enable-version-specific-runtime-libs \
-    --disable-lto \
+    %{?with_lto:--enable-lto}%{!?with_lto:--disable-lto} \
     --enable-newlib-io-c99-formats \
     %{?disable_MAKEINFO:MAKEINFO=missing} \
     %{?with_iconv:--enable-newlib-iconv} \
diff --git a/source-builder/config/gdb-6-1.cfg b/source-builder/config/gdb-6-1.cfg
index deca448..70729b2 100644
--- a/source-builder/config/gdb-6-1.cfg
+++ b/source-builder/config/gdb-6-1.cfg
@@ -4,6 +4,24 @@
 # This configuration file configure's, make's and install's gdb.
 #
 
+#
+# See if the simulator has been disabled for Windows.
+#
+%if %{_host_os} == win32
+ %if %{defined win32-gdb-disable-sim}
+  %define gdb-disable-sim 1
+  %endif
+%endif
+
+#
+# Default to building simulators.
+#
+%ifn %{defined gdb-disable-sim}
+ %define gdb-disable-sim 0
+%else
+ %undefine gdb-sim-options
+%endif
+
 %include %{_configdir}/checks.cfg
 
 Name:      %{_target}-gdb-%{gdb_version}-%{_host}-%{release}
@@ -11,38 +29,62 @@ Summary:   GDB v%{gdb_version} for target %{_target} on host %{_host}
 Version:   %{gdb_version}
 Release:   %{release}
 URL: 	   http://www.gnu.org/software/gdb/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
 #
 %source set gdb http://ftp.gnu.org/gnu/gdb/gdb-%{gdb_version}.tar.bz2
 
+#
+# Disable Python on Cxc builds for now.
+#
+%if "%{_build}" != "%{_host}"
+  %define without_python
+%endif
+
 #
 # Prepare the source code.
 #
 %prep
-  %source setup gdb -q -c -n %{name}-%{version}
-  cd gdb-%{gdb_version}
+  build_top=$(pwd)
+
+  gdb_source=%{?gdb_external:%{gdb_expand_name}}%{!?gdb_external:"gdb-%{gdb_version}"}
+
+  source_dir_gdb=${gdb_source}
+  %source setup gdb -q -n ${gdb_source}
   %patch setup gdb -p1
-  cd ..
+
+  cd ${build_top}
 
 %build
-  export PATH="%{_bindir}:${PATH}"
-  mkdir -p build
-  cd build
-%if "%{_build}" != "%{_host}"
-  CFLAGS_FOR_BUILD="-g -O2 -Wall" \
-%endif
-  CFLAGS="$SB_CFLAGS" \
-  ../gdb-%{gdb_version}/configure \
+  build_top=$(pwd)
+
+  %{build_directory}
+
+  mkdir -p ${build_dir}
+  cd ${build_dir}
+
+  %{host_build_flags}
+
+  if test "%{_build}" != "%{_host}" ; then
+    GDB_LIBS_STATIC="-lexpat"
+  else
+    GDB_LIBS_STATIC="-lexpat"
+    GDB_LIBS="%{_forced_static}"
+  fi
+
+  LIBS_STATIC=${GDB_LIBS_STATIC} \
+  LIBS=${GDB_LIBS} \
+  ../${source_dir_gdb}/configure \
     --build=%{_build} --host=%{_host} \
     --target=%{_target} \
     --verbose --disable-nls \
     --without-included-gettext \
     --disable-win32-registry \
     --disable-werror \
-    --enable-sim \
+    %{!?gdb-disable-sim:--enable-sim}%{?gdb-disable-sim:--disable-sim} \
+    %{?gdb-sim-options:%{gdb-sim-options}} \
+    --without-zlib \
     --with-expat \
     %{!?without_python:--with-python} \
     --prefix=%{_prefix} --bindir=%{_bindir} \
@@ -51,19 +93,21 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
     --mandir=%{_mandir} --infodir=%{_infodir}
 
   %{__make} %{?_smp_mflags} all
-  cd ..
+
+  cd ${build_top}
 
 %install
-  export PATH="%{_bindir}:${PATH}"
-  rm -rf $SB_BUILD_ROOT
+  build_top=$(pwd)
+
+  %{__rmdir} $SB_BUILD_ROOT
 
-  cd build
+  cd ${build_dir}
   %{__make} DESTDIR=$SB_BUILD_ROOT install
 
   # Dropped in FSF-binutils-2.9.5, but Cygwin still ships it.
-  rm -rf $SB_BUILD_ROOT%{_infodir}/configure.info*
+  %{__rmdir} $SB_BUILD_ROOT%{_infodir}/configure.info*
 
-  rm -f $SB_BUILD_ROOT%{_infodir}/dir
+  %{__rmfile} $SB_BUILD_ROOT%{_infodir}/dir
   touch $SB_BUILD_ROOT%{_infodir}/dir
 
-  cd ..
+  cd ${build_top}
diff --git a/source-builder/config/gdb-7-1.cfg b/source-builder/config/gdb-7-1.cfg
index 21591b5..a415f51 100644
--- a/source-builder/config/gdb-7-1.cfg
+++ b/source-builder/config/gdb-7-1.cfg
@@ -4,143 +4,4 @@
 # This configuration file configure's, make's and install's gdb.
 #
 
-#
-# See if the simulator has been disabled for Windows.
-#
-# Build using the system's readline, it is in better shape
-# than GDB's one with MSYS2.
-#
-%if %{_host_os} == win32
- %if %{defined win32-gdb-disable-sim}
-  %define gdb-disable-sim 1
- %endif
- %define with_system_readline 1
-%endif
-
-#
-# Default to building simulators.
-#
-%ifn %{defined gdb-disable-sim}
- %define gdb-disable-sim 0
-%else
- %undefine gdb-sim-options
-%endif
-
-%include %{_configdir}/checks.cfg
-
-#
-# Select Snapshot Macro Maps
-#
-%select gdb-snapshot
-%select expat-snapshot
-
-#
-# The description.
-#
-Name:      %{_target}-gdb-%{gdb_version}-%{_host}-%{release}
-Summary:   GDB v%{gdb_version} for target %{_target} on host %{_host}
-Version:   %{gdb_version}
-Release:   %{release}
-URL: 	   http://www.gnu.org/software/gdb/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
-
-#
-# GDB has changed it default extension.
-#
-%ifn %{defined gdb_src_ext}
-  %define gdb_src_ext gz
-%endif
-
-#
-# Source
-#
-%source set gdb http://ftp.gnu.org/gnu/gdb/gdb-%{gdb_version}.tar.%{gdb_src_ext}
-
-#
-# Disable Python on Cxc builds for now.
-#
-%if "%{_build}" != "%{_host}"
-  %define without_python
-%endif
-
-#
-# The --with-python option is either the default which is auto or the path to
-# the specific python to be used.
-#
-%if %{defined with_python_path}
-  %define with_python_option --with-python=%{with_python_path}
-%else
-  %define with_python_option --with-python
-%endif
-
-#
-#
-# Prepare the source code.
-#
-%prep
-  build_top=$(pwd)
-
-  gdb_source=%{?gdb_external:%{gdb_expand_name}}%{!?gdb_external:"gdb-%{gdb_version}"}
-
-  source_dir_gdb=${gdb_source}
-  %source setup gdb -q -n ${gdb_source}
-  %patch setup gdb -p1
-
-  cd ${build_top}
-
-%build
-  build_top=$(pwd)
-
-  %{build_directory}
-
-  mkdir -p ${build_dir}
-  cd ${build_dir}
-
-  %{host_build_flags}
-
-  if test "%{_build}" != "%{_host}" ; then
-    GDB_LIBS_STATIC="-lexpat"
-  else
-    GDB_LIBS_STATIC="-lexpat"
-    GDB_LIBS="%{_forced_static}"
-  fi
-
-  LIBS_STATIC=${GDB_LIBS_STATIC} \
-  LIBS=${GDB_LIBS} \
-  ../${source_dir_gdb}/configure \
-    --build=%{_build} --host=%{_host} \
-    --target=%{_target} \
-    --verbose --disable-nls \
-    %{?with_system_readline:--with-system-readline} \
-    --without-included-gettext \
-    --disable-win32-registry \
-    --disable-werror \
-    %{!?gdb-disable-sim:--enable-sim}%{?gdb-disable-sim:--disable-sim} \
-    %{?gdb-sim-options:%{gdb-sim-options}} \
-    --without-zlib \
-    --with-expat \
-    %{!?without_python:%{with_python_option}} \
-    --prefix=%{_prefix} --bindir=%{_bindir} \
-    --exec-prefix=%{_exec_prefix} \
-    --includedir=%{_includedir} --libdir=%{_libdir} \
-    --mandir=%{_mandir} --infodir=%{_infodir}
-
-  %{__make} %{?_smp_mflags} all
-
-  cd ${build_top}
-
-%install
-  build_top=$(pwd)
-
-  %{__rmdir} $SB_BUILD_ROOT
-
-  cd ${build_dir}
-  %{__make} DESTDIR=$SB_BUILD_ROOT install
-
-  # Dropped in FSF-binutils-2.9.5, but Cygwin still ships it.
-  %{__rmdir} $SB_BUILD_ROOT%{_infodir}/configure.info*
-
-  %{__rmfile} $SB_BUILD_ROOT%{_infodir}/dir
-  touch $SB_BUILD_ROOT%{_infodir}/dir
-
-  cd ${build_top}
+%include %{_configdir}/gdb-common-1.cfg
diff --git a/source-builder/config/gdb-common-1.cfg b/source-builder/config/gdb-common-1.cfg
new file mode 100644
index 0000000..cfddf53
--- /dev/null
+++ b/source-builder/config/gdb-common-1.cfg
@@ -0,0 +1,286 @@
+#
+# GDB Common Version 1.
+#
+# This configuration file configure's, make's and install's gdb.
+#
+
+#
+# Python
+#
+# GDB uses python internally so we need to link to a suitable python
+# dev kit. The dev kit for python is the header and a library. These
+# files are versioned where the header file is located in a directory
+# based on:
+#
+#  pythonM.m/Python.h
+#
+# where 'M' is the major version number and 'm' is the minor verison
+# number. The library is:
+#
+#  libpythonM.m.a
+#
+# The python command is 'pythonM' which means we need to query it for
+# the minor number.
+#
+# The python running the RSB may not be suitable, for example a MSC,
+# MSYS or Cygwin version on Windows when we want the MinGW python dev
+# file. A specific version cannot be forced because older versions of
+# GDB can only link to 'python2'.
+#
+# Host support can perform a complex query of the system, for example
+# Windows and set '%{gdb_python2}' and '%{gdb_python3}' with the full
+# path to that version's executable.
+#
+# A configuration of GDB can set the version required by setting
+# '%{gdb-python-version}' to the versions command, eg python2.
+#
+# The procedure is:
+#
+# 1. If the macros '%{gdb_python2}' or '%{gdb_python3}' are present
+#    use that path they contain. Assume the path is valid.
+#
+# 2. Does the version of gdb specify a version of python that must be
+#    used. Override with '%define gdb-python-version python2'.
+#
+# 3. Search for 'python2' and if not found search for 'python3'.
+#
+%if !%{defined without_python}
+  %if %{defined gdb-python2}
+    %define gdb-enable-python %{gdb_python2}
+  %else
+    %if %{defined gdb-python3}
+      %define gdb-enable-python %{gdb_python3}
+    %else
+      %if %{defined gdb-python-version}
+        %define gdb-enable-python %(command -v %{gdb-python-version} || true)
+      %else
+        %define gdb-enable-python %(command -v python2 || true)
+        %if %{gdb-enable-python} == %{nil}
+          %define gdb-enable-python %(command -v python3 || true)
+        %endif
+        %if %{gdb-enable-python} == %{nil}
+          %define gdb-enable-python %(command -v python || true})
+        %endif
+      %endif
+      %if %{gdb-enable-python} == %{nil}
+        %error "gdb: python: no valid version of python found"
+      %endif
+    %endif
+  %endif
+
+  #
+  # Get the Python's major and minor version from the python
+  # command. The headers and libraries are installed under a major/minor
+  # (mm) version path and name.
+  #
+  # The library file name can vary across the hosts so wildcard the search.
+  #
+  %if %{!defined gdb-python-config}
+    %define gdb-python-config %{nil}
+  %endif
+  %if %{gdb-python-config} == %{nil}
+   %define gdb-python-config %(command -v %{gdb-enable-python}-config || true)
+  %endif
+  %define gdb-python-ver-mm %(%{gdb-enable-python} --version 2>&1 | sed -e 's/.* //g' | rev | cut -d'.' -f2- | rev)
+  %define gdb-python-header Python.h
+  %define gdb-python-ver-header python%{gdb-python-ver-mm}/%{gdb-python-header}
+  %define gdb-python-ver-lib libpython%{gdb-python-ver-mm}.*
+  %if %{host_includes} == %{nil}
+    %define gdb-host-incs %{nil}
+  %else
+    %define gdb-host-incs -I '%{host_includes}'
+  %endif
+  %if %{gdb-python-config} != %{nil}
+    %define gdb-python-config-incs -I '%(%{gdb-python-config} --includes)'
+    %define gdb-python-header-check %(%{_sbdir}/sb/rtems-build-dep -c %{__cc} %{gdb-host-incs} %{gdb-python-config-incs} -H %{gdb-python-header})
+  %else
+    %define gdb-python-header-check %(%{_sbdir}/sb/rtems-build-dep -c %{__cc} %{gdb-host-incs} -H %{gdb-python-ver-header})
+  %endif
+  %if %{gdb-python-header-check} == not-found && !%{_rsb_getting_source}
+    %error "gdb: python: header file not found: %{gdb-python-ver-header}, please install"
+  %endif
+  #
+  # Too hard to find on MacOS (darwin), the header is good enough.
+  #
+  %ifnos darwin
+    %if %{host_ldflags} == %{nil}
+      %define gdb-host-libs %{nil}
+    %else
+      %define gdb-host-libs -L '%{host_ldflags}'
+    %endif
+    %if %{gdb-python-config} != %{nil}
+      %define gdb-python-lib-filter awk 'BEGIN{FS=" "}/python/{for(i=1;i<NF;++i)if(match($i,".*lpython.*")) print "lib"substr($i,3)"*";}'
+      %if %{gdb-python-ver-mm} < 3.8
+          %define gdb-python-config-lib-check-flags --ldflags
+      %else
+          %define gdb-python-config-lib-check-flags --ldflags --embed
+      %endif
+      %define gdb-python-config-libs %(%{gdb-python-config} %{gdb-python-config-lib-check-flags} | %{gdb-python-lib-filter})
+      %define gdb-python-lib-check %(%{_sbdir}/sb/rtems-build-dep -c %{__cc} %{gdb-host-libs} -l %{gdb-python-config-libs})
+    %else
+      %define gdb-python-lib-check %(%{_sbdir}/sb/rtems-build-dep -c %{__cc} %{gdb-host-libs} -l %{gdb-python-ver-lib})
+    %endif
+    %if %{gdb-python-lib-check} == not-found && !%{_rsb_getting_source}
+      %error "gdb: python: library file not found: %{gdb-python-ver-lib}, please install"
+    %endif
+  %endif
+%endif
+
+#
+# See if the simulator has been disabled for Windows.
+#
+# Build using the system's readline, it is in better shape
+# than GDB's one with MSYS2.
+#
+%if %{_host_os} == win32
+ %if %{defined win32-gdb-disable-sim}
+  %define gdb-disable-sim 1
+ %endif
+ %define with_system_readline 1
+%endif
+
+#
+# Default to building simulators.
+#
+%ifn %{defined gdb-disable-sim}
+ %define gdb-disable-sim 0
+%else
+ %undefine gdb-sim-options
+%endif
+
+%include %{_configdir}/checks.cfg
+
+#
+# Select Snapshot Macro Maps
+#
+%select gdb-snapshot
+%select expat-snapshot
+
+#
+# The description.
+#
+Name:      %{_target}-gdb-%{gdb_version}-%{_host}-%{release}
+Summary:   GDB v%{gdb_version} for target %{_target} on host %{_host}
+Version:   %{gdb_version}
+Release:   %{release}
+URL:       http://www.gnu.org/software/gdb/
+
+#
+# GDB has changed it default extension.
+#
+%ifn %{defined gdb_src_ext}
+  %define gdb_src_ext gz
+%endif
+
+#
+# Source
+#
+%source set gdb http://ftp.gnu.org/gnu/gdb/gdb-%{gdb_version}.tar.%{gdb_src_ext}
+
+#
+# Disable Python on Cxc builds for now.
+#
+%if "%{_build}" != "%{_host}"
+  %define without_python
+%endif
+
+#
+# The --with-python option is either the default which is auto or the path to
+# the specific python to be used.
+#
+%if !%{defined without_python}
+  %if %{defined with_python_path}
+    %define with_python_option --with-python=%{with_python_path}
+  %else
+    %if %{defined gdb-enable-python}
+      %define with_python_option --with-python=%{gdb-enable-python}
+    %else
+      %define with_python_option --with-python
+    %endif
+  %endif
+%endif
+
+#
+# Fix the mess iconv is on FreeBSD 10.0 and higher.
+#
+%if %{defined iconv_prefix}
+ %define iconv_opts --with-libiconv-prefix=%{iconv_prefix}
+%else
+ %define iconv_opts %{nil}
+%endif
+
+#
+#
+# Prepare the source code.
+#
+%prep
+  build_top=$(pwd)
+
+  gdb_source=%{?gdb_external:%{gdb_expand_name}}%{!?gdb_external:"gdb-%{gdb_version}"}
+
+  source_dir_gdb=${gdb_source}
+  %source setup gdb -q -n ${gdb_source}
+  %patch setup gdb -p1
+
+  cd ${build_top}
+
+%build
+  build_top=$(pwd)
+
+  %{build_directory}
+
+  mkdir -p ${build_dir}
+  cd ${build_dir}
+
+  %{host_build_flags}
+
+  if test "%{_build}" != "%{_host}" ; then
+    GDB_LIBS_STATIC="-lexpat"
+  else
+    GDB_LIBS_STATIC="-lexpat"
+    GDB_LIBS="%{_forced_static}"
+  fi
+
+  LIBS_STATIC=${GDB_LIBS_STATIC} \
+  LIBS=${GDB_LIBS} \
+  ../${source_dir_gdb}/configure \
+    --build=%{_build} --host=%{_host} \
+    --target=%{_target} \
+    --verbose --disable-nls \
+    --disable-gas --disable-binutils --disable-ld --disable-gold --disable-gprof \
+    %{?with_system_readline:--with-system-readline} \
+    --without-included-gettext \
+    --disable-win32-registry \
+    --disable-werror \
+    %{!?gdb-disable-sim:--enable-sim}%{?gdb-disable-sim:--disable-sim} \
+    %{?gdb-sim-options:%{gdb-sim-options}} \
+    --without-zlib \
+    --with-expat \
+    --with-guile=no \
+    %{iconv_opts} \
+    %{?without_python:--without-python} \
+    %{!?without_python:%{with_python_option}} \
+    --prefix=%{_prefix} --bindir=%{_bindir} \
+    --exec-prefix=%{_exec_prefix} \
+    --includedir=%{_includedir} --libdir=%{_libdir} \
+    --mandir=%{_mandir} --infodir=%{_infodir}
+
+  %{__make} %{?_smp_mflags} all
+
+  cd ${build_top}
+
+%install
+  build_top=$(pwd)
+
+  %{__rmdir} $SB_BUILD_ROOT
+
+  cd ${build_dir}
+  %{__make} DESTDIR=$SB_BUILD_ROOT install
+
+  # Dropped in FSF-binutils-2.9.5, but Cygwin still ships it.
+  %{__rmdir} $SB_BUILD_ROOT%{_infodir}/configure.info*
+
+  %{__rmfile} $SB_BUILD_ROOT%{_infodir}/dir
+  touch $SB_BUILD_ROOT%{_infodir}/dir
+
+  cd ${build_top}
diff --git a/source-builder/config/gettext-0-1.cfg b/source-builder/config/gettext-0-1.cfg
index 7758d7b..19402b6 100644
--- a/source-builder/config/gettext-0-1.cfg
+++ b/source-builder/config/gettext-0-1.cfg
@@ -14,7 +14,6 @@ Summary:   GetText offers to programmers, translators, and even users, a well
 Version:   %{gettext_version}
 Release:   %{release}
 URL: 	   http://www.gnu.org/software/gettext/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
diff --git a/source-builder/config/glib-2-1.cfg b/source-builder/config/glib-2-1.cfg
index 6c40be2..09b43fa 100644
--- a/source-builder/config/glib-2-1.cfg
+++ b/source-builder/config/glib-2-1.cfg
@@ -14,7 +14,6 @@ Summary:   GLib provides the core application building blocks for
 Version:   %{glib_version}
 Release:   %{release}
 URL: 	   https://developer.gnome.org/glib/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
@@ -60,7 +59,8 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
     --datadir=%{_datadir} \
     --build=%{_build} --host=%{_host} \
     --with-sysroot=$SYSROOT \
-    --disable-dtrace
+    --disable-dtrace \
+    --with-pcre=internal
 
   %{_ld_library_path}=$SYSROOT/lib \
   %{__make} %{?_smp_mflags} all
diff --git a/source-builder/config/libffi-3-1.cfg b/source-builder/config/libffi-3-1.cfg
index d258acb..a201485 100644
--- a/source-builder/config/libffi-3-1.cfg
+++ b/source-builder/config/libffi-3-1.cfg
@@ -14,12 +14,11 @@ Summary:   LibFFI provides a portable, high level programming interface to
 Version:   %{libffi_version}
 Release:   %{release}
 URL: 	   https://sourceware.org/libffi/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
 #
-%source set libffi ftp://sourceware.org/pub/libffi/libffi-%{libffi_version}.tar.gz
+%source set libffi https://sourceware.org/pub/libffi/libffi-%{libffi_version}.tar.gz
 
 #
 # Prepare the source code.
diff --git a/source-builder/config/libiconv-1-1.cfg b/source-builder/config/libiconv-1-1.cfg
index 1ccecd4..fd2d738 100644
--- a/source-builder/config/libiconv-1-1.cfg
+++ b/source-builder/config/libiconv-1-1.cfg
@@ -15,7 +15,6 @@ Summary:   Provides an iconv() implementation, for use on systems which
 Version:   %{libiconv_version}
 Release:   %{release}
 URL: 	   http://www.gnu.org/software/libiconv/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
diff --git a/source-builder/config/libjpeg-1.cfg b/source-builder/config/libjpeg-1.cfg
index e6af6ae..6acb02f 100644
--- a/source-builder/config/libjpeg-1.cfg
+++ b/source-builder/config/libjpeg-1.cfg
@@ -9,11 +9,10 @@
 %endif
 
 Name:      libjpeg-v%{libjpeg_version}-%{_host}-%{release}
-Summary:   libjpeg is the Independent JPEG Group jpeg library 
+Summary:   libjpeg is the Independent JPEG Group jpeg library
 Version:   %{libjpeg_version}
 Release:   %{release}
 URL: 	   http://www.ijg.org/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # NTP Source
@@ -53,8 +52,7 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
     --mandir=%{_mandir} \
     --infodir=%{_infodir} \
     --datadir=%{_datadir} \
-    --disable-shared \
-    --disable-programs
+    --disable-shared
 
   %{__make} %{?_smp_mflags} all
 
diff --git a/source-builder/config/libtool-2-1.cfg b/source-builder/config/libtool-2-1.cfg
index 0d1f972..e834e87 100644
--- a/source-builder/config/libtool-2-1.cfg
+++ b/source-builder/config/libtool-2-1.cfg
@@ -16,12 +16,11 @@ Summary:   Libtool v%{libtool_version} for host %{_host}
 Version:   %{libtool_version}
 Release:   %{release}
 URL: 	   http://www.gnu.org/software/libtool/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
 #
-%source set libtool ftp://ftp.gnu.org/gnu/libtool/libtool-%{libtool_version}.tar.gz
+%source set libtool https://ftp.gnu.org/gnu/libtool/libtool-%{libtool_version}.tar.gz
 
 #
 # Prepare the source code.
@@ -54,6 +53,7 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
   fi
   export CFLAGS CFLAGS_FOR_BUILD CC
 
+  CC=%{__cc} \
   CFLAGS="$SB_CFLAGS" \
   ./configure \
     --build=%{_build} --host=%{_host} \
diff --git a/source-builder/config/libusb-1-1.cfg b/source-builder/config/libusb-1-1.cfg
index 271d5e0..803f326 100644
--- a/source-builder/config/libusb-1-1.cfg
+++ b/source-builder/config/libusb-1-1.cfg
@@ -12,7 +12,6 @@ Summary:   LibUSB v%{libusb_version} for target %{_target} on host %{_host}
 Version:   %{libusb_version}
 Release:   %{release}
 URL: 	   http://libusb.org/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
@@ -34,8 +33,10 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
   cd libusb-%{libusb_version}
 
 %if "%{_build}" != "%{_host}"
-  CFLAGS_FOR_BUILD="-g -O2 -Wall" \
+  LIBUSB_CFLAGS_FOR_BUILD="-g -O2 -Wall"
 %endif
+
+  CFLAGS_FOR_BUILD=${LIBUSB_CFLAGS_FOR_BUILD} \
   CFLAGS="$SB_CFLAGS" \
   ./configure \
     --build=%{_build} --host=%{_host} \
diff --git a/source-builder/config/m4-1-1.cfg b/source-builder/config/m4-1-1.cfg
index c77e4a3..dc47c6b 100644
--- a/source-builder/config/m4-1-1.cfg
+++ b/source-builder/config/m4-1-1.cfg
@@ -12,12 +12,11 @@ Summary:   M4 v%{m4_version} for host %{_host}
 Version:   %{m4_version}
 Release:   %{release}
 URL: 	   http://www.gnu.org/software/m4/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
 #
-%source set m4 ftp://ftp.gnu.org/gnu/m4/m4-%{m4_version}.tar.gz
+%source set m4 https://ftp.gnu.org/gnu/m4/m4-%{m4_version}.tar.gz
 
 #
 # Prepare the source code.
diff --git a/source-builder/config/net-snmp-5-1.cfg b/source-builder/config/net-snmp-5-1.cfg
index 3bdaa9f..c82e310 100644
--- a/source-builder/config/net-snmp-5-1.cfg
+++ b/source-builder/config/net-snmp-5-1.cfg
@@ -13,12 +13,11 @@ Summary:   NetSNMP is a SNMP v1, v2c and v3 Management Agent with MIB-II support
 Version:   %{net_snmp_version}
 Release:   %{release}
 URL: 	   http://www.net-snmp.org/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # NetSNMP Source
 #
-%source set net-snmp https://downloads.sourceforge.net/project/net-snmp/net-snmp/%{net_snmp_version}/net-snmp-%{net_snmp_version}.tar.gz
+%source set net-snmp http://downloads.sourceforge.net/project/net-snmp/net-snmp/%{net_snmp_version}/net-snmp-%{net_snmp_version}.tar.gz
 
 #
 # Prepare the source code.
@@ -44,6 +43,7 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
   CFLAGS="${CFLAGS} %{net_snmp_cflags}"
 
+  LIBS="-lbsd -lm -lz -lrtemsdefaultconfig" \
   ../${source_dir_net_snmp}/configure \
     --host=%{_host} \
     --prefix=%{_prefix} \
diff --git a/source-builder/config/ntp-4-1.cfg b/source-builder/config/ntp-4-1.cfg
index 014c935..a7d5982 100644
--- a/source-builder/config/ntp-4-1.cfg
+++ b/source-builder/config/ntp-4-1.cfg
@@ -13,7 +13,6 @@ Summary:   NTP is the Network Time Protocol.
 Version:   %{ntp_version}
 Release:   %{release}
 URL: 	   http://www.ntp.org/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # NTP Source
diff --git a/source-builder/config/or1ksim-1-1.cfg b/source-builder/config/or1ksim-1-1.cfg
index fb77567..468f524 100644
--- a/source-builder/config/or1ksim-1-1.cfg
+++ b/source-builder/config/or1ksim-1-1.cfg
@@ -13,7 +13,6 @@ Summary:   or1ksim-github
 Version:   %{or1ksim_version}
 Release:   %{release}
 URL: 	   https://github.com/openrisc/or1ksim
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
diff --git a/source-builder/config/pixman-0-1.cfg b/source-builder/config/pixman-0-1.cfg
index f02c3d6..fc393e8 100644
--- a/source-builder/config/pixman-0-1.cfg
+++ b/source-builder/config/pixman-0-1.cfg
@@ -15,7 +15,6 @@ Summary:   Pixman is a low-level software library for pixel manipulation,
 Version:   %{pixman_version}
 Release:   %{release}
 URL: 	   http://www.pixman.org/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Source
diff --git a/source-builder/config/protobuf-2-1.cfg b/source-builder/config/protobuf-2-1.cfg
index f5b49c8..408c56f 100644
--- a/source-builder/config/protobuf-2-1.cfg
+++ b/source-builder/config/protobuf-2-1.cfg
@@ -15,12 +15,11 @@ Summary:   Protocol buffers are Googles language-neutral,
 Version:   %{protobuf_version}
 Release:   %{release}
 URL: 	   https://developers.google.com/protocol-buffers/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
 #
 # Protocol Buffers Source
 #
-%source set protobuf https://github.com/google/protobuf/releases/download/v%{protobuf_version}/protobuf-%{protobuf_version}.tar.gz
+%source set protobuf https://github.com/protocolbuffers/protobuf/releases/download/v%{protobuf_version}/protobuf-%{protobuf_version}.tar.gz
 
 #
 # Prepare the source code.
diff --git a/source-builder/config/qemu-1-1.cfg b/source-builder/config/qemu-1-1.cfg
index c19419f..5d78f68 100644
--- a/source-builder/config/qemu-1-1.cfg
+++ b/source-builder/config/qemu-1-1.cfg
@@ -4,103 +4,6 @@
 # This configuration file configure's, make's and install's QEMU.
 #
 
-%if %{release} == %{nil}
-%define release 1
-%endif
+%define qemu_disables --disable-smartcard-nss
 
-#
-# Select Snapshot Macro Maps
-#
-%select qemu-snapshot
-
-#
-# The description.
-#
-Name:      qemu-%{qemu_version}-%{_host}-%{release}
-Summary:   Qemu is a simulator of various processors.
-Version:   %{qemu_version}
-Release:   %{release}
-URL: 	   http://www.qemu.org/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
-
-#
-# Source
-#
-%source set qemu http://wiki.qemu-project.org/download/qemu-%{qemu_version}.tar.bz2
-
-#
-# Prepare the source code.
-#
-%prep
-  build_top=$(pwd)
-
-  source_dir_qemu="qemu-%{qemu_version}"
-  %source setup qemu -q -n qemu-%{qemu_version}
-  %patch setup qemu -p1
-
-  cd ${build_top}
-
-%build
-  build_top=$(pwd)
-
-  %{build_directory}
-
-  mkdir -p ${build_dir}
-  cd ${build_dir}
-
-  %if %{pkgconfig check vdeplug}
-    VDE_CONFIG="--enable-vde"
-    VDE_CFLAGS="%{pkgconfig cflags vdeplug}"
-    VDE_LDFLAGS="%{pkgconfig ldflags vdeplug} %{pkgconfig libs vdeplug}"
-  %endif
-
-  %{host_build_flags}
-
-  if test "%{_build}" != "%{_host}" ; then
-    CROSS_PREFIX_OPTION="--cross-prefix=%{_host}-"
-  fi
-
-  SYSROOT=$SB_TMPPREFIX
-
-  #
-  # The --extra-cflags and --extra-ldflags do not work as expected.
-  #
-  # Hack warning: MSYS2 does not seem to convert the path to
-  #               a shell path from Windows so we keep them
-  #               separate and handle it in the pkgconfig tool.
-  #
-  PKG_CONFIG_DEFAULT_PATH=${PKG_CONFIG_PATH} \
-  PKG_CONFIG_PATH=$SYSROOT/lib/pkgconfig \
-  PKG_CONFIG_BUILD_TOP_DIR=$SB_TMPROOT \
-  %{_ld_library_path}=$SYSROOT/lib \
-  LDFLAGS="-Wl,-rpath -Wl,/$SB_PREFIX_CLEAN/lib ${VDE_LDFLAGS}" \
-  CFLAGS="${CFLAGS} ${VDE_CFLAGS}" \
-  ../${source_dir_qemu}/configure \
-    --prefix=%{_prefix} \
-    ${CROSS_PREFIX_OPTION} \
-    --make=%{__make} \
-    ${VDE_CONFIG} \
-    --disable-smartcard-nss \
-    --disable-werror \
-    --disable-tools \
-    --disable-pie \
-    --disable-vnc \
-    --disable-sdl \
-    --disable-gtk \
-    --disable-opengl \
-    --disable-netmap
-
-  %{_ld_library_path}=$SYSROOT/lib \
-  %{__make} %{?_smp_mflags} all
-
-  cd ${build_top}
-
-%install
-  build_top=$(pwd)
-
-  %{__rmdir} $SB_BUILD_ROOT
-
-  cd ${build_dir}
-  %{_ld_library_path}=$SYSROOT/lib \
-  %{__make} DESTDIR=$SB_BUILD_ROOT install
-  cd ${build_top}
+%include %{_configdir}/qemu-common-1.cfg
diff --git a/source-builder/config/sqlite-3-1.cfg b/source-builder/config/sqlite-3-1.cfg
index a748848..a0863bd 100644
--- a/source-builder/config/sqlite-3-1.cfg
+++ b/source-builder/config/sqlite-3-1.cfg
@@ -11,16 +11,16 @@
 Name:      sqlite-%{sqlite_version}-%{_host}-%{release}
 Summary:   SQLite is an in-process library that implements a
            self-contained, serverless, zero-configuration,
-	   transactional SQL database engine.
+           transactional SQL database engine.
 Version:   %{sqlite_version}
 Release:   %{release}
-URL: 	   http://www.sqlite.org/
-BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
+URL:       https://www.sqlite.org/
 
 #
 # Sqlite Source
 #
-%source set sqlite http://www.sqlite.org/%{sqlite_src_year}/sqlite-src-%{sqlite_src_version}.zip
+%source set sqlite \
+    https://www.sqlite.org/%{sqlite_src_year}/sqlite-autoconf-%{sqlite_src_version}.tar.gz
 
 #
 # Prepare the source code.
@@ -28,8 +28,8 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 %prep
   build_top=$(pwd)
 
-  source_dir_sqlite="sqlite-src-%{sqlite_src_version}"
-  %source setup sqlite -q -n sqlite-src-%{sqlite_src_version}
+  source_dir_sqlite="sqlite-autoconf-%{sqlite_src_version}"
+  %source setup sqlite -q -n sqlite-autoconf-%{sqlite_src_version}
   %patch setup sqlite -p1
 
   cd ${build_top}
@@ -44,7 +44,21 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
 
   %{host_build_flags}
 
-  CFLAGS="${CFLAGS} -DSQLITE_OMIT_WAL=1 -DSQLITE_ENABLE_COLUMN_METADATA=1"
+  # RTEMS-specific SQLite configuration options.
+  # See also
+  # - https://sqlite.org/wal.html#noshm
+  # - https://sqlite.org/malloc.html
+  #
+  # Many other compile-time options may be passed in via CFLAGS.
+  #   See also https://sqlite.org/compile.html
+  #
+  # RTEMS filesystems do not support POSIX advisory file locks.  Applications
+  # must choose an appropriate SQLite VFS which avoids them, such as unix-none
+  # (no locking at all: Application logic must avoid collisions) or
+  # unix-dotfile.  This step must be performed at application startup time.
+  #   See also https://sqlite.org/c3ref/vfs_find.html
+  #
+  CFLAGS="${CFLAGS} -DSQLITE_MAX_MMAP_SIZE=0 -DSQLITE_DEFAULT_LOCKING_MODE=1 -DSQLITE_ENABLE_MEMSYS5"
 
   ../${source_dir_sqlite}/configure \
     --host=%{_host} \
@@ -58,17 +72,10 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
     --infodir=%{_infodir} \
     --datadir=%{_datadir} \
     --disable-largefile \
-    --disable-tcl \
-    --disable-readline \
-    --disable-amalgamation
+    --disable-readline
 
   %{__make} %{?_smp_mflags} sqlite3.h libsqlite3.la
 
-  #
-  # Create the sqlite shell executable so the install works.
-  #
-  touch sqlite3
-
   cd ${build_top}
 
 %install
@@ -77,10 +84,8 @@ BuildRoot: %{_tmppath}/%{name}-root-%(%{__id_u} -n)
   %{__rmdir} $SB_BUILD_ROOT
 
   cd ${build_dir}
-  %{__make} DESTDIR=$SB_BUILD_ROOT install
+  # Warning: The unusual install targets avoid attempting to compile and link
+  # the SQLite3 command-line shell.  The SQLite3 shell is not supported by
+  # RTEMS.
+  %{__make} DESTDIR=$SB_BUILD_ROOT install-data install-libLTLIBRARIES
   cd ${build_top}
-
-  #
-  # Remove the sqlite shell as it is just a touch.
-  #
-  %{__rm} $SB_BUILD_ROOT%{_bindir}/sqlite3
diff --git a/source-builder/defaults.mc b/source-builder/defaults.mc
index c65fa22..8ed7003 100644
--- a/source-builder/defaults.mc
+++ b/source-builder/defaults.mc
@@ -40,6 +40,7 @@ nil:                 none,    none,     ''
 
 # Set to invalid values.
 _bset:               none,    none,     ''
+_bset_tmp:           none,    none,     ''
 name:                none,    none,     ''
 version:             none,    none,     ''
 release:             none,    none,     ''
@@ -55,20 +56,37 @@ _build:              triplet, required, ''
 _target:             none,    optional, ''
 
 # RTEMS release URL
-rtems_release_url:   none,    none,     'ftp://ftp.rtems.org/pub/rtems/releases/%{rtems_version}'
+rtems_release_url:   none,    none,     'https://ftp.rtems.org/pub/rtems/releases/%{rtems_version}'
 
 # The user
-_uid:                none,    convert,  '%(%{__id_u} -n)'
+_uid:                none,    convert,  '%(%{__id_u})'
 
 # Default flags that can be overridded to supply specific host or build
 # flags and include paths to the tools. The host is the final platform
 # the tools will run on and build is the host building the tools.
-host_cflags:         none,    convert,  '-O2 -pipe'
-host_cxxflags:       none,    convert,  '-O2 -pipe'
+host_cflags:         none,    convert,  '-O2 -g -pipe'
+host_cxxflags:       none,    convert,  '-O2 -g -pipe'
+host_ldflags:        none,    convert,  ''
 host_includes:       none,    convert,  ''
-build_cflags:        none,    convert,  '-O2 -pipe'
-build_cxxflags:      none,    convert,  '-O2 -pipe'
+host_libs:           none,    convert,  ''
+build_cflags:        none,    convert,  '-O2 -g -pipe'
+build_cxxflags:      none,    convert,  '-O2 -g -pipe'
+build_ldflags:       none,    convert,  ''
 build_includes:      none,    convert,  ''
+build_libs:          none,    convert,  ''
+
+#
+# Build and staging paths.
+#
+buildroot:           dir,     none,     '%{_tmppath}/%{buildname}-%{_uid}'
+buildcxcroot:        dir,     none,     '%{_tmppath}/%{buildname}-%{_uid}-cxc'
+buildxcroot:         dir,     none,     '%{_tmppath}/%{buildname}-%{_uid}-xx'
+stagingroot:         dir,     none,     '%{_tmppath}/sb-%{_uid}-staging'
+
+#
+# Install mode can be installing or staging. Defaults to installing.
+#
+install_mode:        none,    none,     'installing'
 
 # Extra path a platform can override.
 _extra_path:         none,    none,     '%{_sbdir}'
@@ -89,13 +107,11 @@ _buildcxcdir:        dir,     optional, '%{_topdir}/build/%{buildname}-cxc'
 _buildxcdir:         dir,     optional, '%{_topdir}/build/%{buildname}-xc'
 _docdir:             dir,     none,     '%{_defaultdocdir}'
 _tmppath:            dir,     none,     '%{_topdir}/build/tmp'
-_tmproot:            dir,     none,     '%{_tmppath}/sb-%{_uid}/%{_bset}'
-_tmpcxcroot:         dir,     none,     '%{_tmppath}/sb-%{_uid}-cxc/%{_bset}'
-buildroot:           dir,     none,     '%{_tmppath}/%{buildname}-%{_uid}'
-buildcxcroot:        dir,     none,     '%{_tmppath}/%{buildname}-%{_uid}-cxc'
-buildxcroot:         dir,     none,     '%{_tmppath}/%{buildname}-%{_uid}-xx'
+_tmproot:            dir,     none,     '%{_tmppath}/sb-%{_uid}/%{_bset_tmp}'
+_tmpcxcroot:         dir,     none,     '%{_tmppath}/sb-%{_uid}-cxc/%{_bset_tmp}'
 _datadir:            dir,     none,     '%{_prefix}/share'
 _defaultdocdir:      dir,     none,     '%{_prefix}/share/doc'
+_dry_run:            none,    none,     '0'
 _exeext:             none,    none,     ''
 _exec_prefix:        dir,     none,     '%{_prefix}'
 _bindir:             dir,     none,     '%{_exec_prefix}/bin'
@@ -123,6 +139,9 @@ _usrsrc:             dir,     none,     '%{_usr}/src'
 _var:                dir,     none,     '/usr/local/var'
 _varrun:             dir,     none,     '%{_var}/run'
 
+# Get source state
+_rsb_getting_source: none,    none,     '0'
+
 # Defaults, override in platform specific modules.
 ___setup_shell:      exe,     required, '/bin/sh'
 __aclocal:           exe,     optional, 'aclocal'
@@ -138,15 +157,16 @@ __bash:              exe,     optional, '/bin/bash'
 __bison:             exe,     required, '/usr/bin/bison'
 __bzip2:             exe,     required, '/usr/bin/bzip2'
 __cat:               exe,     required, '/bin/cat'
-__cc:                exe,     required, '/usr/bin/gcc'
+__cc:                exe,     required, 'gcc'
 __chgrp:             exe,     required, '/usr/bin/chgrp'
 __chmod:             exe,     required, '/bin/chmod'
 __chown:             exe,     required, '/usr/sbin/chown'
+__cmake:             exe,     optional, '/usr/bin/cmake'
 __cp:                exe,     required, '/bin/cp'
 __cpp:               exe,     none,     '%{__cc} -E'
 __cvs:               exe,     optional, '/usr/bin/cvs'
 __cvs_z:             none,    none,     '%{__cvs} -z 9'
-__cxx:               exe,     required, '/usr/bin/g++'
+__cxx:               exe,     required, 'g++'
 __flex:              exe,     required, '/usr/bin/flex'
 __git:               exe,     required, '/usr/bin/git'
 __grep:              exe,     required, '/usr/bin/grep'
@@ -179,7 +199,7 @@ __sed:               exe,     required, '/usr/bin/sed'
 __setup_post:        exe,     none,     '%{__chmod} -R a+rX,g-w,o-w .'
 __sh:                exe,     required, '/bin/sh'
 __tar:               exe,     required, '/usr/bin/tar'
-__tar_extract:       exe,     none,     '%{__tar} -xvvf'
+__tar_extract:       exe,     none,     '%{__tar} -xvv'
 __touch:             exe,     required, '/usr/bin/touch'
 __unzip:             exe,     required, '/usr/bin/unzip'
 __xz:                exe,     required, '/usr/bin/xz'
@@ -190,7 +210,7 @@ ___build_cmd:        none,    none,     '%{?_sudo:%{_sudo} }%{?_remsh:%{_remsh}
 ___build_post:       none,    none,     'exit 0'
 
 # Prebuild set up script.
-___build_pre:        none,    none,     '''# ___build_pre in as set up in defaults.py
+___build_pre:        none,    none,     '''# ___build_pre as set up in defaults.py
 # Save the original path away.
 export SB_ORIG_PATH=${PATH}
 # Directories
@@ -198,20 +218,24 @@ export SB_ORIG_PATH=${PATH}
 %{?_prefix:SB_PREFIX_CLEAN=$(echo "%{_prefix}" | %{__sed} -e 's/^\///')}
 SB_SOURCE_DIR="%{_sourcedir}"
 SB_BUILD_DIR="%{_builddir}"
-# host == build, use build; host != build , host uses host and build uses build
-SB_HOST_CFLAGS="%{host_cflags} %{host_includes}"
-SB_HOST_CXXFLAGS="%{host_cxxflags} %{host_includes}"
-SB_HOST_LDFLAGS="%{?host_ldflags:%{host_ldflags}}%{?_tmproot:-L%{_tmproot}/${SB_PREFIX_CLEAN}/lib}"
+# host == build, use build; host != build, host uses host and build uses build
+SB_HOST_CPPFLAGS="%{host_includes}"
+# Optionally do not add includes to c/cxx flags as newer configure's complain
+SB_HOST_CFLAGS="%{host_cflags} %{!?host_cflags_no_includes %{host_includes}}"
+SB_HOST_CXXFLAGS="%{host_cxxflags} %{!?host_cflags_no_includes %{host_includes}}"
+SB_HOST_LDFLAGS="%{host_ldflags} %{?_tmproot:-L%{_tmproot}/${SB_PREFIX_CLEAN}/lib}"
+SB_HOST_LIBS="%{host_libs}"
 SB_BUILD_CFLAGS="%{build_cflags} %{?_tmproot:-I%{_tmproot}/${SB_PREFIX_CLEAN}/include}"
 SB_BUILD_CXXFLAGS="%{build_cxxflags} %{?_tmproot:-I%{_tmproot}/${SB_PREFIX_CLEAN}/include}"
-SB_BUILD_LDFLAGS="%{?build_ldflags:%{build_ldflags}}%{?_tmproot:-L%{_tmproot}/${SB_PREFIX_CLEAN}/lib}"
+SB_BUILD_LDFLAGS="%{build_ldflags} %{?_tmproot:-L%{_tmproot}/${SB_PREFIX_CLEAN}/lib}"
+SB_BUILD_LBS="%{build_libs}"
 SB_CFLAGS="${SB_BUILD_CFLAGS} %{build_includes}"
 SB_CXXFLAGS="${SB_BUILD_CXXFLAGS} %{build_includes}"
 SB_ARCH="%{_arch}"
 SB_OS="%{_os}"
 export SB_SOURCE_DIR SB_BUILD_DIR SB_ARCH SB_OS
-export SB_HOST_CFLAGS SB_HOST_CXXFLAGS SB_HOST_LDFLAGS
-export SB_BUILD_CFLAGS SB_BUILD_CXXFLAGS SB_BUILD_LDFLAGS
+export SB_HOST_CPPFLAGS SB_HOST_CFLAGS SB_HOST_CXXFLAGS SB_HOST_LDFLAGS SB_HOST_LIBS
+export SB_BUILD_CFLAGS SB_BUILD_CXXFLAGS SB_BUILD_LDFLAGS SB_BUILD_LIBS
 export SB_CFLAGS SB_CXXFLAGS
 # Documentation
 SB_DOC_DIR="%{_docdir}"
@@ -298,7 +322,9 @@ FFLAGS="${FFLAGS:-${SB_CFLAGS}}" ; export FFLAGS ;
 # Build script support.
 build_directory:     none,    none,     '''
 if test "%{_build}" != "%{_host}" ; then
-  if test -z "%{_target}" ; then
+  # Cross-build (Xc) if no target or the host and target match.
+  # Canadian-cross (Cxc) if build, host and target are all different.
+  if test -z "%{_target}" -o "%{_host}" == "%{_target}" ; then
     build_dir="build-xc"
   else
     build_dir="build-cxc"
@@ -316,13 +342,19 @@ if test "%{_build}" != "%{_host}" ; then
   # Cross build
   CC=$(echo "%{_host}-%{_host_cc}" | sed -e 's,-std=gnu99 ,,')
   CXX=$(echo "%{_host}-%{_host_cxx}" | sed -e 's,-std=gnu99 ,,')
+  CPPFLAGS="${SB_HOST_CPPFLAGS}"
   CFLAGS="${SB_HOST_CFLAGS}"
   CXXFLAGS="${SB_HOST_CXXFLAGS}"
   LDFLAGS="${SB_HOST_LDFLAGS}"
+  LDLIBS="${SB_HOST_LIBS}"
+  LIBS="${SB_HOST_LIBS}"
   # Host
+  CPPFLAGS_FOR_HOST="${SB_HOST_CPPFLAGS}"
   CFLAGS_FOR_HOST="${SB_HOST_CFLAGS}"
   CXXFLAGS_FOR_HOST="${SB_HOST_CXXFLAGS}"
   LDFLAGS_FOR_HOST="${SB_HOST_LDFLAGS}"
+  LDLIBS_FOR_HOST="${SB_HOST_LIBS}"
+  LIBS_FOR_HOST="${SB_HOST_LIBS}"
   CXXFLAGS_FOR_HOST="${SB_HOST_CFLAGS}"
   CC_FOR_HOST=$(echo "%{_host_cc} ${SB_HOST_CFLAGS}" | sed -e 's,-std=gnu99 ,,')
   CXX_FOR_HOST=$(echo "%{_host_cxx} ${SB_HOST_CXXFLAGS}" | sed -e 's,-std=gnu99 ,,')
@@ -330,30 +362,35 @@ if test "%{_build}" != "%{_host}" ; then
   CFLAGS_FOR_BUILD="${SB_BUILD_CFLAGS}"
   CXXFLAGS_FOR_BUILD="${SB_BUILD_CXXFLAGS}"
   LDFLAGS_FOR_BUILD="${SB_BUILD_LDFLAGS}"
+  LDLIBS_FOR_BUILD="${SB_BUILD_LIBS}"
+  LIBS_FOR_BUILD="${SB_BUILD_LIBS}"
   CXXFLAGS_FOR_BUILD="${SB_BUILD_CFLAGS}"
   CC_FOR_BUILD=$(echo "%{__cc} ${SB_BUILD_CFLAGS}" | sed -e 's,-std=gnu99 ,,')
   CXX_FOR_BUILD=$(echo "%{__cxx} ${SB_BUILD_CXXFLAGS}" | sed -e 's,-std=gnu99 ,,')
 else
   LDFLAGS="${SB_BUILD_LDFLAGS}"
+  LDLIBS="${SB_BUILD_LIBS}"
+  LIBS="${SB_BUILD_LIBS}"
   CC=$(echo "%{__cc} ${SB_BUILD_CFLAGS}" | sed -e 's,-std=gnu99 ,,')
   CXX=$(echo "%{__cxx} ${SB_BUILD_CXXFLAGS}" | sed -e 's,-std=gnu99 ,,')
   CC_FOR_BUILD=${CC}
   CXX_FOR_BUILD=${CXX}
 fi
-export CC CXX CFLAGS CXXFLAGS LDFLAGS
-export CC_FOR_HOST CXX_FOR_HOST CFLAGS_FOR_HOST CXXFLAGS_FOR_HOST LDFLAGS_FOR_HOST
-export CC_FOR_BUILD CXX_FOR_BUILD CFLAGS_FOR_BUILD CXXFLAGS_FOR_BUILD LDFLAGS_FOR_BUILD'''
+export CC CXX CPPFLAGS CFLAGS CXXFLAGS LDFLAGS LIBS LDLIBS
+export CC_FOR_HOST CXX_FOR_HOST CPPFLAGS_FOR_HOST CFLAGS_FOR_HOST CXXFLAGS_FOR_HOST LDFLAGS_FOR_HOST LDLIBS_FOR_HOST LIBS_FOR_HOST
+export CC_FOR_BUILD CXX_FOR_BUILD CFLAGS_FOR_BUILD CXXFLAGS_FOR_BUILD LDFLAGS_FOR_BUILD LDLIBS_FOR_BUILS LIBS_FOR_BUILS'''
 
 # Build/build flags.
 build_build_flags:    none,    none,     '''
 # Build and build flags means force build == host
 # gcc is not ready to be compiled with -std=gnu99
 LDFLAGS="${SB_HOST_LDFLAGS}"
+LIBS="${SB_HOST_LIBS}"
 CC=$(echo "%{__cc} ${SB_CFLAGS}" | sed -e 's,-std=gnu99 ,,')
 CXX=$(echo "%{__cxx} ${SB_CXXFLAGS}" | sed -e 's,-std=gnu99 ,,')
 CC_FOR_BUILD=${CC}
 CXX_FOR_BUILD=${CXX}
-export CC CXX CC_FOR_BUILD CXX_FOR_BUILD CFLAGS LDFLAGS'''
+export CC CXX CC_FOR_BUILD CXX_FOR_BUILD CFLAGS LDFLAGS LIBS'''
 
 # Default package settings
 _forced_static:     none,         none, '-Xlinker -Bstatic ${LIBS_STATIC} -Xlinker -Bdynamic'
@@ -361,7 +398,7 @@ __xz:                exe,     required, '/usr/bin/xz'
 
 # Mail Support
 _mail_smtp_host:   none,         none, 'localhost'
-_mail_tools_to:    none,         none, 'rtems-tooltestresults at rtems.org'
+_mail_tools_to:    none,         none, 'build at rtems.org'
 
 # Newlib ICONV encodings
 _newlib_iconv_encodings: none,      none, '''big5,cp775,cp850,cp852,cp855,\
@@ -372,3 +409,20 @@ iso_ir_111,koi8_r,koi8_ru,koi8_u,koi8_uni,ucs_2,ucs_2_internal,\
 ucs_2be,ucs_2le,ucs_4,ucs_4_internal,ucs_4be,ucs_4le,us_ascii,\
 utf_16,utf_16be,utf_16le,utf_8,win_1250,win_1251,win_1252,\
 win_1253,win_1254,win_1255,win_1256,win_1257,win_1258'''
+
+# Waf build root suffix, only use for win32 mingw ming32 OSs
+#
+# If on Windows we need to add the driver prefix to the built root as waf
+# strips the driver prefix from the prefix path when joining it to the
+# destdir path. Waf is correct in doing this and the RSB is design to match
+# the configure behaviour which treats the whole path including the drive
+# prefix as part of the path as just a path.
+#
+waf_build_root_suffix:   none,  none, ' %(echo %{_prefix} | cut -c 1-2)'
+
+# Makefile.inc support for staging
+rtems_makefile_inc:      none,  none, '''
+export RTEMS_ROOT=%{rtems_bsp_rtems_root}
+export PROJECT_RELEASE=%{rtems_bsp_prefix}
+export RTEMS_MAKEFILE_PATH=%{rtems_bsp_prefix}
+'''
diff --git a/source-builder/pkg-config b/source-builder/pkg-config
index fa251e5..10db546 100755
--- a/source-builder/pkg-config
+++ b/source-builder/pkg-config
@@ -35,7 +35,6 @@ import os
 import sys
 
 base = os.path.dirname(sys.argv[0])
-sys.path.insert(0, base + '/sb')
 
 try:
     import argparse
@@ -47,16 +46,12 @@ except:
         print("Incorrect Source Builder installation", file = sys.stderr)
         sys.exit(1)
 
-try:
-    import pkgconfig
-except ImportError:
-    print("Incorrect Source Builder installation", file = sys.stderr)
-    sys.exit(1)
+import sb.pkgconfig
 
 #
 # Make trace true to get a file of what happens and what is being asked.
 #
-trace = True
+trace = False
 trace_stdout = False
 logfile = 'pkg-config.log'
 out = None
@@ -65,7 +60,7 @@ srcfd = None
 #
 # Write all the package source parsed to a single file.
 #
-trace_src = True
+trace_src = False
 if trace_src:
     srcfd = open('pkg-src.txt', 'w')
 
@@ -164,7 +159,7 @@ def run(argv):
                       default = False,
                       help = 'Ignored')
     opts.add_argument('--prefix-variable', dest = 'prefix', action = 'store',
-                      nargs = 1, default = pkgconfig.default_prefix(),
+                      nargs = 1, default = sb.pkgconfig.default_prefix(),
                       help = 'Define the prefix.')
     opts.add_argument('--static', dest = 'static', action = 'store_true',
                       default = False,
@@ -173,7 +168,7 @@ def run(argv):
                       default = False,
                       help = 'Dump the package if one is found.')
 
-    args = opts.parse_args(argv[1:])
+    args = opts.parse_args(argv)
 
     if (args.exists and (args.exact_version or args.max_version)) or \
             (args.exact_version and (args.exists or args.max_version)) or \
@@ -181,7 +176,7 @@ def run(argv):
         raise error('only one of --exists, --exact-version, or --max-version')
 
     if args.dont_define_prefix:
-        args.prefix = pkgconfig.default_prefix(False)
+        args.prefix = sb.pkgconfig.default_prefix(False)
 
     exists = False
 
@@ -190,7 +185,7 @@ def run(argv):
     if args.atleast_pkgconfig_version:
         ec = 0
     else:
-        ec, pkg, flags = pkgconfig.check_package(args.libraries, args, log, src)
+        ec, pkg, flags = sb.pkgconfig.check_package(args.libraries, args, log, src)
         if ec == 0:
             if args.cflags:
                 if len(flags['cflags']):
@@ -212,15 +207,15 @@ def run(argv):
 try:
     log('-' * 40)
     log('pkg-config', lf = False)
-    for a in sys.argv[1:]:
+    for a in sys.argv[2:]:
         log(' "%s"' % (a), lf = False)
     log('')
-    ec = run(sys.argv)
+    ec = run(sys.argv[1:])
     log('ec = %d' % (ec))
 except ImportError:
     print("incorrect package config installation", file = sys.stderr)
     sys.exit(1)
-except pkgconfig.error as e:
+except sb.pkgconfig.error as e:
     print('error: %s' % (e), file = sys.stderr)
     sys.exit(1)
 sys.exit(ec)
diff --git a/source-builder/sb-check b/source-builder/sb-check
index d23b799..8c60bf6 100755
--- a/source-builder/sb-check
+++ b/source-builder/sb-check
@@ -20,12 +20,10 @@
 
 from __future__ import print_function
 
-import sys, os
-base = os.path.dirname(sys.argv[0])
-sys.path.insert(0, base + '/sb')
 try:
-    import check
-    check.run()
+    import sb.check
+    sb.check.run()
 except ImportError:
+    import sys
     print("Incorrect Source Builder installation", file = sys.stderr)
     sys.exit(1)
diff --git a/source-builder/sb-defaults b/source-builder/sb-defaults
index 790a5e5..2fd2121 100755
--- a/source-builder/sb-defaults
+++ b/source-builder/sb-defaults
@@ -20,12 +20,11 @@
 
 from __future__ import print_function
 
-import sys, os
-base = os.path.dirname(sys.argv[0])
-sys.path.insert(0, base + '/sb')
+import sys
+
 try:
-    import options
-    options.run(sys.argv)
+    import sb.options
+    sb.options.run(sys.argv)
 except ImportError:
     print("Incorrect Source Builder installation", file = sys.stderr)
     sys.exit(1)
diff --git a/source-builder/sb-get-sources b/source-builder/sb-get-sources
new file mode 100755
index 0000000..97ee419
--- /dev/null
+++ b/source-builder/sb-get-sources
@@ -0,0 +1,30 @@
+#! /usr/bin/env python
+#
+# RTEMS Tools Project (http://www.rtems.org/)
+# Copyright 2010-2019 Chris Johns (chrisj at rtems.org)
+# All rights reserved.
+#
+# This file is part of the RTEMS Tools package in 'rtems-tools'.
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import print_function
+
+
+try:
+    import sb.getsources
+    sb.getsources.run()
+except ImportError:
+    import sys
+    print("Incorrect Source Builder installation", file = sys.stderr)
+    sys.exit(1)
diff --git a/source-builder/sb-reports b/source-builder/sb-reports
index 3330be3..194c4ab 100755
--- a/source-builder/sb-reports
+++ b/source-builder/sb-reports
@@ -20,12 +20,11 @@
 
 from __future__ import print_function
 
-import sys, os
-base = os.path.dirname(sys.argv[0])
-sys.path.insert(0, base + '/sb')
+import sys
+
 try:
-    import reports
-    reports.run(sys.argv)
+    import sb.reports
+    sb.reports.run(sys.argv)
 except ImportError:
     print("Incorrect Source Builder installation", file = sys.stderr)
     sys.exit(1)
diff --git a/source-builder/sb-rtems-config b/source-builder/sb-rtems-config
index 1633b6b..a84b9c8 100755
--- a/source-builder/sb-rtems-config
+++ b/source-builder/sb-rtems-config
@@ -20,12 +20,11 @@
 
 from __future__ import print_function
 
-import sys, os
-base = os.path.dirname(sys.argv[0])
-sys.path.insert(0, base + '/sb')
+import sys
+
 try:
-    import rtemsconfig
-    rtemsconfig.run(sys.argv)
+    import sb.rtemsconfig
+    sb.rtemsconfig.run(sys.argv)
 except ImportError:
     print("Incorrect Source Builder installation", file = sys.stderr)
     sys.exit(1)
diff --git a/source-builder/sb-set-builder b/source-builder/sb-set-builder
index 561199e..96dd919 100755
--- a/source-builder/sb-set-builder
+++ b/source-builder/sb-set-builder
@@ -20,13 +20,10 @@
 
 from __future__ import print_function
 
-import sys, os
-base = os.path.dirname(sys.argv[0])
-sys.path.insert(0, base + '/sb')
-
 try:
-    import setbuilder
-    setbuilder.run()
+    import sb.setbuilder
+    sb.setbuilder.run()
 except ImportError:
+    import sys
     print("Incorrect Source Builder installation", file = sys.stderr)
     sys.exit(1)
diff --git a/source-builder/sb-track b/source-builder/sb-track
new file mode 100755
index 0000000..64daeb8
--- /dev/null
+++ b/source-builder/sb-track
@@ -0,0 +1,29 @@
+#! /usr/bin/env python
+#
+# RTEMS Tools Project (http://www.rtems.org/)
+# Copyright 2010-2019 Chris Johns (chrisj at rtems.org)
+# All rights reserved.
+#
+# This file is part of the RTEMS Tools package in 'rtems-tools'.
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import print_function
+
+try:
+    import sb.track
+    sb.track.run()
+except ImportError:
+    import sys
+    print("Incorrect Source Builder installation", file = sys.stderr)
+    sys.exit(1)
diff --git a/source-builder/sb/__init__.py b/source-builder/sb/__init__.py
new file mode 100644
index 0000000..c4275e2
--- /dev/null
+++ b/source-builder/sb/__init__.py
@@ -0,0 +1,20 @@
+#
+# RTEMS Tools Project (http://www.rtems.org/)
+# Copyright 2020 Chris Johns (chrisj at rtems.org)
+# All rights reserved.
+#
+# This file is part of the RTEMS Tools package in 'rtems-tools'.
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import print_function
diff --git a/source-builder/sb/build.py b/source-builder/sb/build.py
index b995e6b..16a495b 100644
--- a/source-builder/sb/build.py
+++ b/source-builder/sb/build.py
@@ -1,6 +1,6 @@
 #
 # RTEMS Tools Project (http://www.rtems.org/)
-# Copyright 2010-2013 Chris Johns (chrisj at rtems.org)
+# Copyright 2010-2018 Chris Johns (chrisj at rtems.org)
 # All rights reserved.
 #
 # This file is part of the RTEMS Tools package in 'rtems-tools'.
@@ -33,23 +33,42 @@ import stat
 import sys
 
 try:
-    import check
-    import config
-    import download
-    import error
-    import ereport
-    import execute
-    import log
-    import options
-    import path
-    import sources
-    import version
+    from . import check
+    from . import config
+    from . import download
+    from . import error
+    from . import ereport
+    from . import execute
+    from . import log
+    from . import options
+    from . import path
+    from . import sources
+    from . import version
 except KeyboardInterrupt:
     print('abort: user terminated')
     sys.exit(1)
 except:
-    print('error: unknown application load error')
-    sys.exit(1)
+    raise
+
+def humanize_number(num, suffix):
+    for unit in ['','K','M','G','T','P','E','Z']:
+        if abs(num) < 1024.0:
+            return "%5.3f%s%s" % (num, unit, suffix)
+        num /= 1024.0
+    return "%.3f%s%s" % (size, 'Y', suffix)
+
+def short_name(name):
+    #
+    # If on Windows use short names to keep the build paths as short as possible.
+    #
+    if options.host_windows:
+        buildname = ''
+        add = True
+        for n in name.split('-'):
+            buildname += n[0]
+        return buildname
+    else:
+        return name
 
 class script:
     """Create and manage a shell script."""
@@ -62,7 +81,15 @@ class script:
         self.lc = 0
 
     def append(self, text):
+        is_str = False
         if type(text) is str:
+            is_str = True
+        try:
+            if type(text) is unicode:
+                is_str = True
+        except:
+            pass
+        if is_str:
             text = text.splitlines()
         if not log.quiet:
             i = 0
@@ -93,29 +120,9 @@ class script:
 class build:
     """Build a package given a config file."""
 
-    def _name_(self, name):
-        #
-        # If on Windows use shorter names to keep the build paths.
-        #
-        if options.host_windows:
-            buildname = ''
-            add = True
-            for c in name:
-                if c == '-':
-                    add = True
-                elif add:
-                    buildname += c
-                    add = False
-            return buildname
-        else:
-            return name
-
     def _generate_report_(self, header, footer = None):
-        label, result = self.opts.with_arg('error-report')
-        if (label.startswith('without_') and result != 'yes') or \
-           (label.startswith('with_') and result != 'no'):
-            ereport.generate('rsb-report-%s.txt' % self.macros['name'],
-                             self.opts, header, footer)
+        ereport.generate('rsb-report-%s.txt' % self.macros['name'],
+                         self.opts, header, footer)
 
     def __init__(self, name, create_tar_files, opts, macros = None):
         try:
@@ -127,8 +134,9 @@ class build:
             log.notice('config: ' + name)
             self.set_macros(macros)
             self.config = config.file(name, opts, self.macros)
-            self.script = script()
-            self.macros['buildname'] = self._name_(self.macros['name'])
+            self.script_build = script()
+            self.script_clean = script()
+            self.macros['buildname'] = short_name(self.macros['name'])
         except error.general as gerr:
             log.notice(str(gerr))
             log.stderr('Build FAILED')
@@ -194,7 +202,7 @@ class build:
             not _disable_installing and \
             not _canadian_cross
 
-    def source(self, name):
+    def source(self, name, strip_components, download_only):
         #
         # Return the list of sources. Merge in any macro defined sources as
         # these may be overridden by user loaded macros.
@@ -229,27 +237,37 @@ class build:
                     if o.startswith('--rsb-file'):
                        os_ = o.split('=')
                        if len(os_) != 2:
-                           raise error.general('invalid --rsb-file option: %s' % (' '.join(args)))
+                           raise error.general('invalid --rsb-file option: %s' % \
+                                               (' '.join(args)))
                        if os_[0] != '--rsb-file':
-                           raise error.general('invalid --rsb-file option: %s' % (' '.join(args)))
+                           raise error.general('invalid --rsb-file option: %s' % \
+                                               (' '.join(args)))
                        file_override = os_[1]
                 opts = [o for o in opts if not o.startswith('--rsb-')]
             url = self.config.expand(' '.join(url))
-            src = download.parse_url(url, '_sourcedir', self.config, self.opts, file_override)
+            src = download.parse_url(url, '_sourcedir',
+                                     self.config, self.opts, file_override)
             download.get_file(src['url'], src['local'], self.opts, self.config)
-            if 'symlink' in src:
-                sname = name.replace('-', '_')
-                src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % (src['symlink'], sname)
-            elif 'compressed' in src:
-                #
-                # Zip files unpack as well so do not use tar.
-                #
-                src['script'] = '%s %s' % (src['compressed'], src['local'])
-                if src['compressed-type'] != 'zip':
-                    src['script'] += ' | %{__tar_extract} -'
-            else:
-                src['script'] = '%%{__tar_extract} %s' % (src['local'])
-            srcs += [src]
+            if not download_only:
+                if strip_components > 0:
+                    tar_extract = '%%{__tar_extract} --strip-components %d' % \
+                        (strip_components)
+                else:
+                    tar_extract = '%{__tar_extract}'
+                if 'symlink' in src:
+                    sname = name.replace('-', '_')
+                    src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % \
+                        (src['symlink'], sname)
+                elif 'compressed' in src:
+                    #
+                    # Zip files unpack as well so do not use tar.
+                    #
+                    src['script'] = '%s %s' % (src['compressed'], src['local'])
+                    if src['compressed-type'] != 'zip':
+                        src['script'] += ' | %s -f -' % (tar_extract)
+                else:
+                    src['script'] = '%s -f %s' % (tar_extract, src['local'])
+                srcs += [src]
         return srcs
 
     def source_setup(self, package, args):
@@ -257,7 +275,7 @@ class build:
         setup_name = args[1]
         args = args[1:]
         try:
-            opts, args = getopt.getopt(args[1:], 'qDcn:ba')
+            opts, args = getopt.getopt(args[1:], 'qDcn:bas:gE')
         except getopt.GetoptError as ge:
             raise error.general('source setup error: %s' % str(ge))
         quiet = False
@@ -267,7 +285,10 @@ class build:
         deleted_dir = False
         created_dir = False
         changed_dir = False
+        no_errors = False
+        strip_components = 0
         opt_name = None
+        download_only = False
         for o in opts:
             if o[0] == '-q':
                 quiet = True
@@ -281,31 +302,65 @@ class build:
                 unpack_before_chdir = True
             elif o[0] == '-a':
                 unpack_before_chdir = False
+            elif o[0] == '-E':
+                no_errors = True
+            elif o[0] == '-s':
+                if not o[1].isdigit():
+                    raise error.general('source setup error: invalid strip count: %s' % \
+                                        (o[1]))
+                strip_components = int(o[1])
+            elif o[0] == '-g':
+                download_only = True
         name = None
-        for source in self.source(setup_name):
+        for source in self.source(setup_name, strip_components, download_only):
             if name is None:
                 if opt_name is None:
                     if source:
                         opt_name = source['name']
                     else:
-                        raise error.general('setup source tag not found: %d' % (source_tag))
+                        raise error.general('setup source tag not found: %d' % \
+                                            (source_tag))
                 else:
                     name = opt_name
-            self.script.append(self.config.expand('cd %{_builddir}'))
-            if not deleted_dir and  delete_before_unpack:
-                self.script.append(self.config.expand('%{__rm} -rf ' + name))
-                deleted_dir = True
-            if not created_dir and create_dir:
-                self.script.append(self.config.expand('%{__mkdir_p} ' + name))
-                created_dir = True
-            if not changed_dir and (not unpack_before_chdir or create_dir):
-                self.script.append(self.config.expand('cd ' + name))
-                changed_dir = True
-            self.script.append(self.config.expand(source['script']))
-        if not changed_dir and (unpack_before_chdir and not create_dir):
-            self.script.append(self.config.expand('cd ' + name))
+            if not download_only:
+                self.script_build.append(self.config.expand('cd %{_builddir}'))
+                if not deleted_dir and delete_before_unpack and name is not None:
+                    self.script_build.append(self.config.expand('%{__rm} -rf ' + name))
+                    deleted_dir = True
+                if not created_dir and create_dir and name is not None:
+                    self.script_build.append(self.config.expand('%{__mkdir_p} ' + name))
+                    created_dir = True
+                if not changed_dir and (not unpack_before_chdir or create_dir) and \
+                   name is not None:
+                    self.script_build.append(self.config.expand('cd ' + name))
+                    changed_dir = True
+                #
+                # On Windows tar can fail on links if the link appears in the
+                # tar file before the target of the link exists. We can assume the
+                # tar file is correct, that is all files and links are valid,
+                # so on error redo the untar a second time.
+                #
+                if options.host_windows or no_errors:
+                    self.script_build.append('set +e')
+                self.script_build.append(self.config.expand(source['script']))
+                if options.host_windows or not no_errors:
+                    self.script_build.append('tar_exit=$?')
+                if options.host_windows or no_errors:
+                    self.script_build.append('set -e')
+                if options.host_windows:
+                    if no_errors:
+                        self.script_build.append(' set +e')
+                        self.script_build.append(' ' + self.config.expand(source['script']))
+                        self.script_build.append(' set -e')
+                    else:
+                        self.script_build.append('if test $tar_exit != 0; then')
+                        self.script_build.append(' ' + self.config.expand(source['script']))
+                        self.script_build.append('fi')
+        if not changed_dir and (unpack_before_chdir and not create_dir) and \
+           name is not None and not download_only:
+            self.script_build.append(self.config.expand('cd ' + name))
             changed_dir = True
-        self.script.append(self.config.expand('%{__setup_post}'))
+        self.script_build.append(self.config.expand('%{__setup_post}'))
 
     def patch_setup(self, package, args):
         name = args[1]
@@ -327,7 +382,7 @@ class build:
                 else:
                     url += [pp]
             if len(url) == 0:
-                raise error.general('patch URL not found: %s' % (' '.join(args)))
+                raise error.general('patch URL not found: %s' % (' '.join(opts)))
             #
             # Look for --rsb-file as an option we use as a local file name.
             # This can be used if a URL has no reasonable file name the
@@ -339,9 +394,11 @@ class build:
                     if o.startswith('--rsb-file'):
                        os_ = o.split('=')
                        if len(os_) != 2:
-                           raise error.general('invalid --rsb-file option: %s' % (' '.join(args)))
+                           raise error.general('invalid --rsb-file option: %s' % \
+                                               (' '.join(opts)))
                        if os_[0] != '--rsb-file':
-                           raise error.general('invalid --rsb-file option: %s' % (' '.join(args)))
+                           raise error.general('invalid --rsb-file option: %s' % \
+                                               (' '.join(opts)))
                        file_override = os_[1]
                 opts = [o for o in opts if not o.startswith('--rsb-')]
             if len(opts) == 0:
@@ -353,7 +410,8 @@ class build:
             #
             # Parse the URL first in the source builder's patch directory.
             #
-            patch = download.parse_url(url, '_patchdir', self.config, self.opts, file_override)
+            patch = download.parse_url(url, '_patchdir', self.config,
+                                       self.opts, file_override)
             #
             # Download the patch
             #
@@ -363,7 +421,7 @@ class build:
             else:
                 patch['script'] = '%{__cat} ' + patch['local']
             patch['script'] += ' | %%{__patch} %s' % (opts)
-            self.script.append(self.config.expand(patch['script']))
+            self.script_build.append(self.config.expand(patch['script']))
 
     def run(self, command, shell_opts = '', cwd = None):
         e = execute.capture_execution(log = log.default, dump = self.opts.quiet())
@@ -381,7 +439,7 @@ class build:
             self.mkdir(builddir)
 
     def prep(self, package):
-        self.script.append('echo "==> %prep:"')
+        self.script_build.append('echo "==> %prep:"')
         _prep = package.prep()
         if _prep:
             for l in _prep:
@@ -403,59 +461,78 @@ class build:
                         sources.hash(args[1:], self.macros, err)
                         self.hash(package, args)
                     else:
-                        self.script.append(' '.join(args))
+                        self.script_build.append(' '.join(args))
 
     def build(self, package):
-        self.script.append('echo "==> clean %{buildroot}: ${SB_BUILD_ROOT}"')
-        self.script.append('%s ${SB_BUILD_ROOT}' %
-                           (self.config.expand('%{__rmdir}')))
-        self.script.append('%s ${SB_BUILD_ROOT}' %
-                           (self.config.expand('%{__mkdir_p}')))
-        self.script.append('echo "==> %build:"')
+        self.script_build.append('echo "==> clean %{buildroot}: ${SB_BUILD_ROOT}"')
+        self.script_build.append('%s ${SB_BUILD_ROOT}' %
+                                 (self.config.expand('%{__rmdir}')))
+        self.script_build.append('%s ${SB_BUILD_ROOT}' %
+                                 (self.config.expand('%{__mkdir_p}')))
+        self.script_build.append('echo "==> %build:"')
         _build = package.build()
         if _build:
             for l in _build:
-                self.script.append(l)
+                self.script_build.append(l)
 
     def install(self, package):
-        self.script.append('echo "==> %install:"')
+        self.script_build.append('echo "==> %install:"')
         _install = package.install()
         if _install:
             for l in _install:
                 args = l.split()
-                self.script.append(' '.join(args))
+                self.script_build.append(' '.join(args))
 
     def files(self, package):
         if self.create_tar_files \
            and not self.macros.get('%{_disable_packaging'):
-            self.script.append('echo "==> %files:"')
+            self.script_build.append('echo "==> %files:"')
             inpath = path.abspath(self.config.expand('%{buildroot}'))
             tardir = path.abspath(self.config.expand('%{_tardir}'))
-            self.script.append(self.config.expand('if test -d %s; then' % (inpath)))
-            self.script.append(self.config.expand('  %%{__mkdir_p} %s' % tardir))
-            self.script.append(self.config.expand('  cd ' + inpath))
+            self.script_build.append(self.config.expand('if test -d %s; then' % (inpath)))
+            self.script_build.append(self.config.expand('  %%{__mkdir_p} %s' % tardir))
+            self.script_build.append(self.config.expand('  cd ' + inpath))
             tar = path.join(tardir, package.long_name() + '.tar.bz2')
             cmd = self.config.expand('  %{__tar} -cf - . ' + '| %{__bzip2} > ' + tar)
-            self.script.append(cmd)
-            self.script.append(self.config.expand('  cd %{_builddir}'))
-            self.script.append('fi')
+            self.script_build.append(cmd)
+            self.script_build.append(self.config.expand('  cd %{_builddir}'))
+            self.script_build.append('fi')
 
     def clean(self, package):
-        self.script.append('echo "==> %clean:"')
+        self.script_clean.reset()
+        self.script_clean.append(self.config.expand('%{___build_template}'))
+        self.script_clean.append('echo "=> ' + package.name() + ': CLEAN"')
+        self.script_clean.append('echo "==> %clean:"')
         _clean = package.clean()
         if _clean is not None:
             for l in _clean:
                 args = l.split()
-                self.script.append(' '.join(args))
+                self.script_clean.append(' '.join(args))
+
+    def sizes(self, package):
+        def _sizes(package, what, path):
+            package.set_size(what, path)
+            s = humanize_number(package.get_size(what), 'B')
+            log.trace('size: %s (%s): %s (%d)' % (what, path, s, package.get_size(what)))
+            return s
+        s = {}
+        for p in [('build', '%{_builddir}'),
+                  ('build', '%{buildroot}'),
+                  ('installed', '%{buildroot}')]:
+            hs = _sizes(package, p[0], self.config.expand(p[1]))
+            s[p[0]] = hs
+        log.notice('sizes: %s: %s (installed: %s)' % (package.name(),
+                                                      s['build'],
+                                                      s['installed']))
 
     def build_package(self, package):
         if self.canadian_cross():
             if not self.config.defined('%{allow_cxc}'):
                 raise error.general('Canadian Cross is not allowed')
-            self.script.append('echo "==> Candian-cross build/target:"')
-            self.script.append('SB_CXC="yes"')
+            self.script_build.append('echo "==> Candian-cross build/target:"')
+            self.script_build.append('SB_CXC="yes"')
         else:
-            self.script.append('SB_CXC="no"')
+            self.script_build.append('SB_CXC="no"')
         self.build(package)
         self.install(package)
         self.files(package)
@@ -501,18 +578,24 @@ class build:
                 log.trace('---- macro maps %s' % ('-' * 55))
                 log.trace('%s' % (str(self.config.macros)))
                 log.trace('-' * 70)
-                self.script.reset()
-                self.script.append(self.config.expand('%{___build_template}'))
-                self.script.append('echo "=> ' + name + ':"')
+                self.script_build.reset()
+                self.script_build.append(self.config.expand('%{___build_template}'))
+                self.script_build.append('echo "=> ' + name + ': BUILD"')
                 self.prep(package)
                 self.build_package(package)
                 if not self.opts.dry_run():
                     self.builddir()
-                    sn = path.join(self.config.expand('%{_builddir}'), 'doit')
-                    log.output('write script: ' + sn)
-                    self.script.write(sn)
+                    build_sn = path.join(self.config.expand('%{_builddir}'), 'do-build')
+                    log.output('write script: ' + build_sn)
+                    self.script_build.write(build_sn)
+                    clean_sn = path.join(self.config.expand('%{_builddir}'), 'do-clean')
+                    log.output('write script: ' + clean_sn)
+                    self.script_clean.write(clean_sn)
                     log.notice('building: %s%s' % (cxc_label, name))
-                    self.run(sn)
+                    self.run(build_sn)
+                    self.sizes(package)
+                    log.notice('cleaning: %s%s' % (cxc_label, name))
+                    self.run(clean_sn)
             except error.general as gerr:
                 log.notice(str(gerr))
                 log.stderr('Build FAILED')
@@ -539,6 +622,22 @@ class build:
         package = packages['main']
         return package.disabled()
 
+    def get_build_size(self):
+        package = self.main_package()
+        if package.disabled():
+            return 0
+        return package.get_size('build')
+
+    def get_installed_size(self):
+        package = self.main_package()
+        if package.disabled():
+            return 0
+        return package.get_size('installed')
+
+    def includes(self):
+        if self.config:
+            return self.config.includes()
+
 def get_configs(opts):
 
     def _scan(_path, ext):
@@ -552,10 +651,17 @@ def get_configs(opts):
         return configs
 
     configs = { 'paths': [], 'files': [] }
-    for cp in opts.defaults.expand('%{_configdir}').split(':'):
+    paths = opts.defaults.expand('%{_configdir}').split(':')
+    root = path.host(os.path.commonprefix(paths))
+    configs['root'] = root
+    configs['localpaths'] = [lp[len(root):] for lp in paths]
+    for cp in paths:
         hcp = path.host(path.abspath(cp))
         configs['paths'] += [hcp]
-        configs['files'] += _scan(hcp, ['.cfg', '.bset'])
+        hpconfigs = sorted(set(_scan(hcp, ['.cfg', '.bset'])))
+        hcplocal = hcp[len(root):]
+        configs[hcplocal] = [path.join(hcplocal, c) for c in hpconfigs]
+        configs['files'] += hpconfigs
     configs['files'] = sorted(set(configs['files']))
     return configs
 
@@ -576,7 +682,7 @@ def run(args):
     try:
         optargs = { '--list-configs': 'List available configurations' }
         opts = options.load(args, optargs)
-        log.notice('RTEMS Source Builder, Package Builder, %s' % (version.str()))
+        log.notice('RTEMS Source Builder, Package Builder, %s' % (version.string()))
         opts.log_info()
         if not check.host_setup(opts):
             if not opts.force():
diff --git a/source-builder/sb/check.py b/source-builder/sb/check.py
index ef8f6a4..7e23223 100644
--- a/source-builder/sb/check.py
+++ b/source-builder/sb/check.py
@@ -23,16 +23,16 @@
 
 from __future__ import print_function
 
-import os
-
-import error
-import execute
 import fnmatch
-import log
-import options
-import path
+import os
 import re
-import version
+
+from . import error
+from . import execute
+from . import log
+from . import options
+from . import path
+from . import version
 
 def _check_none(_opts, macro, value, constraint):
     return True
@@ -104,20 +104,24 @@ def path_check(opts, silent = False):
     if 'PATH' in os.environ:
         paths = os.environ['PATH'].split(os.pathsep)
         for p in paths:
-            if len(p.strip()) == 0:
-                if not silent:
-                    log.notice('error: environment PATH contains an empty path')
-                return False
-            elif not options.host_windows and (p.strip() == '.' or p.strip() == '..'):
+            try:
+                if len(p.strip()) == 0:
+                    if not silent:
+                        log.notice('error: environment PATH contains an empty path')
+                    return False
+                elif not options.host_windows and (p.strip() == '.' or p.strip() == '..'):
+                    if not silent:
+                        log.notice('error: environment PATH invalid path: %s' % (p))
+                    return False
+                elif not path.exists(p):
+                    if not silent and opts.warn_all():
+                        log.notice('warning: environment PATH not found: %s' % (p))
+                elif not path.isdir(p):
+                    if not silent and opts.warn_all():
+                        log.notice('warning: environment PATH not a directory: %s' % (p))
+            except Exception as e:
                 if not silent:
-                    log.notice('error: environment PATH invalid path: %s' % (p))
-                return False
-            elif not path.exists(p):
-                if not silent and opts.warn_all():
-                    log.notice('warning: environment PATH not found: %s' % (p))
-            elif not path.isdir(p):
-                if not silent and opts.warn_all():
-                    log.notice('warning: environment PATH not a directory: %s' % (p))
+                    log.notice('warning: environment PATH suspicious path: %s' % (e))
     return True
 
 
@@ -264,7 +268,7 @@ def run():
     import sys
     try:
         _opts = options.load(args = sys.argv, logfile = False)
-        log.notice('RTEMS Source Builder - Check, %s' % (version.str()))
+        log.notice('RTEMS Source Builder - Check, %s' % (version.string()))
 
         orphans = _opts.parse_args('--check-orphans', error = False, extra = False)
         if orphans:
diff --git a/source-builder/sb/config.py b/source-builder/sb/config.py
index f5bd81c..cd0bf94 100644
--- a/source-builder/sb/config.py
+++ b/source-builder/sb/config.py
@@ -1,6 +1,6 @@
 #
 # RTEMS Tools Project (http://www.rtems.org/)
-# Copyright 2010-2016 Chris Johns (chrisj at rtems.org)
+# Copyright 2010-2018 Chris Johns (chrisj at rtems.org)
 # All rights reserved.
 #
 # This file is part of the RTEMS Tools package in 'rtems-tools'.
@@ -34,28 +34,31 @@ import re
 import sys
 
 try:
-    import error
-    import execute
-    import log
-    import options
-    import path
-    import pkgconfig
-    import sources
+    from . import error
+    from . import execute
+    from . import log
+    from . import options
+    from . import path
+    from . import pkgconfig
+    from . import sources
 except KeyboardInterrupt:
     print('user terminated', file = sys.stderr)
     sys.exit(1)
 except:
-    print('error: unknown application load error', file = sys.stderr)
-    sys.exit(1)
+    raise
 
 def _check_bool(value):
+    istrue = None
     if value.isdigit():
         if int(value) == 0:
             istrue = False
         else:
             istrue = True
     else:
-        istrue = None
+        if type(value) is str and len(value) == 2 and value[0] == '!':
+            istrue = _check_bool(value[1])
+            if type(istrue) is bool:
+                istrue = not istrue
     return istrue
 
 def _check_nil(value):
@@ -73,6 +76,7 @@ class package:
         self.config = config
         self.directives = {}
         self.infos = {}
+        self.sizes = {}
 
     def __str__(self):
 
@@ -218,6 +222,16 @@ class package:
     def disabled(self):
         return len(self.name()) == 0
 
+    def set_size(self, what, path_):
+        if what not in self.sizes:
+            self.sizes[what] = 0
+        self.sizes[what] += path.get_size(path_)
+
+    def get_size(self, what):
+        if what in self.sizes:
+            return self.sizes[what]
+        return 0
+
 class file:
     """Parse a config file."""
 
@@ -268,6 +282,7 @@ class file:
         return s
 
     def _reset(self, name):
+        self.parent = 'root'
         self.name = name
         self.load_depth = 0
         self.configpath = []
@@ -312,16 +327,18 @@ class file:
             log.output(text)
 
     def _error(self, msg):
-        err = 'error: %s' % (self._name_line_msg(msg))
-        log.stderr(err)
-        log.output(err)
-        self.in_error = True
         if not self.opts.dry_run():
-            log.stderr('warning: switched to dry run due to errors')
-            self.opts.set_dry_run()
+            if self.opts.keep_going():
+                err = 'error: %s' % (self._name_line_msg(msg))
+                log.stderr(err)
+                log.output(err)
+                self.in_error = True
+                log.stderr('warning: switched to dry run due to errors')
+                self.opts.set_dry_run()
+        raise error.general(self._name_line_msg(msg))
 
     def _label(self, name):
-        if name.startswith('%{') and name[-1] is '}':
+        if name.startswith('%{') and name[-1] == '}':
             return name
         return '%{' + name.lower() + '}'
 
@@ -402,29 +419,66 @@ class file:
             print('-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=')
         return macros
 
-    def _shell(self, line):
-        sl = self.sf.findall(line)
-        if len(sl):
-            e = execute.capture_execution()
-            for s in sl:
+    def _shell(self, line, nesting = 0):
+        #
+        # Parse the line and handle nesting '()' pairs. If on Windows
+        # handle embedded '"' (double quotes) as the command is run as
+        # a double quoted string.
+        #
+        def _exec(shell_macro):
+            output = ''
+            if len(shell_macro) > 3:
+                e = execute.capture_execution()
                 if options.host_windows:
-                    cmd = '%s -c "%s"' % (self.macros.expand('%{__sh}'), s[2:-1])
+                    shell_cmd = \
+                        ''.join([c if c != '"' else '\\' + c for c in shell_macro[2:-1]])
+                    cmd = '%s -c "%s"' % (self.macros.expand('%{__sh}'), shell_cmd)
                 else:
-                    cmd = s[2:-1]
+                    cmd = shell_macro[2:-1]
                 exit_code, proc, output = e.shell(cmd)
                 log.trace('shell-output: %d %s' % (exit_code, output))
-                if exit_code == 0:
-                    line = line.replace(s, output)
-                else:
-                    raise error.general('shell macro failed: %s:%d: %s' % (s, exit_code, output))
+                if exit_code != 0:
+                    raise error.general('shell macro failed: %s: %d: %s' % (cmd,
+                                                                            exit_code,
+                                                                            output))
+            return output
+
+        if nesting > 200:
+            raise error.general('shell macro failed: too many nesting levels')
+
+        updating = True
+        while updating:
+            updating = False
+            pos = line.find('%(')
+            if pos >= 0:
+                braces = 0
+                for p in range(pos + 2, len(line)):
+                    if line[p] == '(':
+                        braces += 1
+                    elif line[p] == ')':
+                        if braces > 0:
+                            braces -= 1
+                        else:
+                            shell_cmd = '%(' + \
+                                self._shell(line[pos + 2:p], nesting + 1) + ')'
+                            line = line[:pos] + _exec(shell_cmd) + line[p + 1:]
+                            updating = True
+                            break
+
         return line
 
     def _pkgconfig_check(self, test):
         # Hack to by pass pkgconfig checks when just wanting to download the
         # source.
-        if self.macros['_dry_run'] == '1' and self.macros['with_download'] == '1':
+        if self.macros['_dry_run'] == '1' and \
+           ('with_download' in self.macros and self.macros['with_download'] == '1'):
             return '0'
         ok = False
+        log.trace('pkgconfig: check: crossc=%d pkg_crossc=%d prefix=%s'
+                  % ( self._cross_compile(),
+                      self.pkgconfig_crosscompile,
+                      self.pkgconfig_prefix))
+        log.trace('pkgconfig: check: test=%s' % (test))
         if type(test) == str:
             test = test.split()
         if not self._cross_compile() or self.pkgconfig_crosscompile:
@@ -445,6 +499,7 @@ class file:
             except pkgconfig.error as pe:
                 self._error('pkgconfig: check: %s' % (pe))
             except:
+                raise
                 raise error.internal('pkgconfig failure')
         if ok:
             return '1'
@@ -462,13 +517,14 @@ class file:
                 if pkg_flags and self.pkgconfig_filter_flags:
                     fflags = []
                     for f in pkg_flags.split():
-                        if not f.startswith('-f') and not f.startswith('-W'):
+                        if not f.startswith('-W'):
                             fflags += [f]
                     pkg_flags = ' '.join(fflags)
-                log.trace('pkgconfig: %s: %s' % (flags, pkg_flags))
+                log.trace('pkgconfig: %s:  %s' % (flags, pkg_flags))
             except pkgconfig.error as pe:
-                self._error('pkgconfig: %s: %s' % (flags, pe))
+                self._error('pkgconfig: %s:  %s' % (flags, pe))
             except:
+                raise
                 raise error.internal('pkgconfig failure')
         if pkg_flags is None:
             pkg_flags = ''
@@ -543,7 +599,8 @@ class file:
                 elif m.startswith('%{expand'):
                     colon = m.find(':')
                     if colon < 8:
-                        log.warning('malformed expand macro, no colon found')
+                        log.warning(self._name_line_msg('malformed expand macro, ' \
+                                                        'no colon found'))
                     else:
                         e = self._expand(m[colon + 1:-1].strip())
                         s = s.replace(m, self._label(e))
@@ -562,7 +619,7 @@ class file:
                     mn = None
                 elif m.startswith('%{echo'):
                     if not m.endswith('}'):
-                        log.warning("malformed conditional macro '%s'" % (m))
+                        log.warning(self._name_line_msg("malformed conditional macro '%s'" % (m)))
                         mn = None
                     else:
                         e = self._expand(m[6:-1].strip())
@@ -586,31 +643,84 @@ class file:
                         s = s.replace(m, '1')
                     expanded = True
                     mn = None
+                elif m.startswith('%{triplet'):
+                    triplet = m[len('%{triplet'):-1].strip().split()
+                    ok = False
+                    if len(triplet) == 2:
+                        macro = self._expand(triplet[0])
+                        value = self._expand(triplet[1])
+                        vorig = value
+                        arch_value = ''
+                        vendor_value = ''
+                        os_value = ''
+                        dash = value.find('-')
+                        if dash >= 0:
+                            arch_value = value[:dash]
+                            value = value[dash + 1:]
+                        dash = value.find('-')
+                        if dash >= 0:
+                            vendor_value = value[:dash]
+                            value = value[dash + 1:]
+                        if len(value):
+                            os_value = value
+                        self.macros[macro] = vorig
+                        self.macros[macro + '_cpu'] = arch_value
+                        self.macros[macro + '_arch'] = arch_value
+                        self.macros[macro + '_vendor'] = vendor_value
+                        self.macros[macro + '_os'] = os_value
+                        ok = True
+                    if ok:
+                        s = s.replace(m, '')
+                    else:
+                        self._error('triplet error: %s' % (' '.join(triplet)))
+                    mn = None
                 elif m.startswith('%{path '):
                     pl = m[7:-1].strip().split()
                     ok = False
-                    if len(pl) == 2:
-                        ok = True
-                        epl = []
-                        for p in pl[1:]:
-                            epl += [self._expand(p)]
-                        p = ' '.join(epl)
-                        if pl[0].lower() == 'prepend':
+                    result = ''
+                    pl_0 = pl[0].lower()
+                    if pl_0 == 'prepend':
+                        if len(pl) == 2:
+                            ok = True
+                            p = ' '.join([self._expand(pp) for pp in pl[1:]])
                             if len(self.macros['_pathprepend']):
                                 self.macros['_pathprepend'] = \
                                     '%s:%s' % (p, self.macros['_pathprepend'])
                             else:
                                 self.macros['_pathprepend'] = p
-                        elif pl[0].lower() == 'postpend':
+                    elif pl_0 == 'postpend':
+                        if len(pl) == 2:
+                            ok = True
+                            p = ' '.join([self._expand(pp) for pp in pl[1:]])
                             if len(self.macros['_pathprepend']):
                                 self.macros['_pathprepend'] = \
                                     '%s:%s' % (self.macros['_pathprepend'], p)
                             else:
                                 self.macros['_pathprepend'] = p
-                        else:
-                            ok = False
+                    elif pl_0 == 'check':
+                        if len(pl) == 3:
+                            pl_1 = pl[1].lower()
+                            p = ' '.join([self._expand(pp) for pp in pl[2:]])
+                            if pl_1 == 'exists':
+                                ok = True
+                                if path.exists(p):
+                                    result = '1'
+                                else:
+                                    result = '0'
+                            elif pl_1 == 'isdir':
+                                ok = True
+                                if path.isdir(p):
+                                    result = '1'
+                                else:
+                                    result = '0'
+                            elif pl_1 == 'isfile':
+                                ok = True
+                                if path.isfile(p):
+                                    result = '1'
+                                else:
+                                    result = '0'
                     if ok:
-                        s = s.replace(m, '')
+                        s = s.replace(m, result)
                     else:
                         self._error('path error: %s' % (' '.join(pl)))
                     mn = None
@@ -634,7 +744,7 @@ class file:
                     colon = m[start:].find(':')
                     if colon < 0:
                         if not m.endswith('}'):
-                            log.warning("malformed conditional macro '%s'" % (m))
+                            log.warning(self._name_line_msg("malformed conditional macro '%s'" % (m)))
                             mn = None
                         else:
                             mn = self._label(m[start:-1])
@@ -676,21 +786,21 @@ class file:
 
     def _disable(self, config, ls):
         if len(ls) != 2:
-            log.warning('invalid disable statement')
+            log.warning(self._name_line_msg('invalid disable statement'))
         else:
             if ls[1] == 'select':
                 self.macros.lock_read_map()
-                log.trace('config: %s: %3d: _disable_select: %s' % (self.name, self.lc,
+                log.trace('config: %s: %3d:  _disable_select: %s' % (self.name, self.lc,
                                                                      ls[1]))
             else:
-                log.warning('invalid disable statement: %s' % (ls[1]))
+                log.warning(self._name_line_msg('invalid disable statement: %s' % (ls[1])))
 
     def _select(self, config, ls):
         if len(ls) != 2:
-            log.warning('invalid select statement')
+            log.warning(self._name_line_msg('invalid select statement'))
         else:
             r = self.macros.set_read_map(ls[1])
-            log.trace('config: %s: %3d: _select: %s %s %r' % \
+            log.trace('config: %s: %3d:  _select: %s %s %r' % \
                           (self.name, self.lc,
                            r, ls[1], self.macros.maps()))
 
@@ -702,7 +812,7 @@ class file:
 
     def _define(self, config, ls):
         if len(ls) <= 1:
-            log.warning('invalid macro definition')
+            log.warning(self._name_line_msg('invalid macro definition'))
         else:
             d = self._label(ls[1])
             if self.disable_macro_reassign:
@@ -713,7 +823,7 @@ class file:
                     else:
                         self.macros[d] = ' '.join([f.strip() for f in ls[2:]])
                 else:
-                    log.warning("macro '%s' already defined" % (d))
+                    log.warning(self._name_line_msg("macro '%s' already defined" % (d)))
             else:
                 if len(ls) == 2:
                     self.macros[d] = '1'
@@ -722,14 +832,14 @@ class file:
 
     def _undefine(self, config, ls):
         if len(ls) <= 1:
-            log.warning('invalid macro definition')
+            log.warning(self._name_line_msg('invalid macro definition'))
         else:
             mn = self._label(ls[1])
             if mn in self.macros:
                 del self.macros[mn]
 
     def _ifs(self, config, ls, label, iftrue, isvalid, dir, info):
-        log.trace('config: %s: %3d: _ifs[%i]: dir=%s %i %r' % \
+        log.trace('config: %s: %3d:  _ifs[%i]: dir=%s %i %r' % \
                   (self.name, self.lc, self.if_depth, str(dir), len(ls), ls))
         in_dir = dir
         in_iftrue = True
@@ -749,7 +859,7 @@ class file:
                     self._error(label + ' without %endif')
                     raise error.general('terminating build')
                 if r[1] == '%endif':
-                    log.trace('config: %s: %3d: _ifs[%i]: %%endif: dir=%s %s %s %r' % \
+                    log.trace('config: %s: %3d:  _ifs[%i]: %%endif: dir=%s %s %s %r' % \
                               (self.name, self.lc, self.if_depth,
                                str(dir), r[1], this_isvalid, data))
                     if in_dir is None:
@@ -757,7 +867,8 @@ class file:
                             dir, info, data = self._process_directive(r, dir, info, data)
                     else:
                         if in_dir != dir:
-                            self._error('directives cannot change scope across if statements')
+                            self._error('directives cannot change' \
+                                        ' scope across if statements')
 
                     return data
                 if r[1] == '%else':
@@ -785,7 +896,7 @@ class file:
         sls = reduce(add, ls[1:], '').split()
         cls = sls
 
-        log.trace('config: %s: %3d: _if[%i]: %s' % (self.name, self.lc,
+        log.trace('config: %s: %3d:  _if[%i]: %s' % (self.name, self.lc,
                                                     self.if_depth, sls))
 
         self.if_depth += 1
@@ -800,22 +911,25 @@ class file:
                 elif cls[0] == '&&':
                     join_op = 'and'
                 cls = cls[1:]
-                log.trace('config: %s: %3d: _if[%i]: joining: %s' % (self.name, self.lc,
-                                                                     self.if_depth,
-                                                                     join_op))
+                log.trace('config: %s: %3d:  _if[%i]: joining: %s' % \
+                          (self.name, self.lc,
+                           self.if_depth,
+                           join_op))
             ori = 0
             andi = 0
             i = len(cls)
             if '||' in cls:
                 ori = cls.index('||')
-                log.trace('config: %s: %3d: _if[%i}: OR found at %i' % (self.name, self.lc,
-                                                                        self.if_depth,
-                                                                        ori))
+                log.trace('config: %s: %3d:  _if[%i}: OR found at %i' % \
+                          (self.name, self.lc,
+                           self.if_depth,
+                           ori))
             if '&&' in cls:
                 andi = cls.index('&&')
-                log.trace('config: %s: %3d: _if[%i]: AND found at %i' % (self.name, self.lc,
-                                                                         self.if_depth,
-                                                                         andi))
+                log.trace('config: %s: %3d:  _if[%i]: AND found at %i' % \
+                          (self.name, self.lc,
+                           self.if_depth,
+                           andi))
             if ori > 0 or andi > 0:
                 if ori == 0:
                     i = andi
@@ -825,9 +939,10 @@ class file:
                     i = andi
                 else:
                     i = andi
-                log.trace('config: %s: %3d: _if[%i]: next OP found at %i' % (self.name, self.lc,
-                                                                             self.if_depth,
-                i))
+                log.trace('config: %s: %3d:  _if[%i]: next OP found at %i' % \
+                          (self.name, self.lc,
+                           self.if_depth,
+                           i))
             ls = cls[:i]
             if len(ls) == 0:
                 self._error('invalid if expression: ' + reduce(add, sls, ''))
@@ -924,10 +1039,10 @@ class file:
             else:
                 cistrue = istrue
 
-            log.trace('config: %s: %3d: _if[%i]:  %s %s %s %s' % (self.name, self.lc,
-                                                                  self.if_depth,
-                                                                  ifls, str(cistrue),
-                                                                  join_op, str(istrue)))
+            log.trace('config: %s: %3d:  _if[%i]:  %s %s %s %s' % (self.name, self.lc,
+                                                                   self.if_depth,
+                                                                   ifls, str(cistrue),
+                                                                   join_op, str(istrue)))
 
         if invert:
             cistrue = not cistrue
@@ -936,8 +1051,8 @@ class file:
 
         self.if_depth -= 1
 
-        log.trace('config: %s: %3d: _if[%i]: %r' % (self.name, self.lc,
-                                                    self.if_depth, ifs_return))
+        log.trace('config: %s: %3d:  _if[%i]: %r' % (self.name, self.lc,
+                                                     self.if_depth, ifs_return))
 
         return ifs_return
 
@@ -945,17 +1060,30 @@ class file:
         isos = False
         if isvalid:
             os = self.define('_os')
-            for l in ls:
+            ls = ' '.join(ls).split()
+            for l in ls[1:]:
                 if l in os:
                     isos = True
                     break
         return self._ifs(config, ls, '%ifos', isos, isvalid, dir, info)
 
+    def _ifnos(self, config, ls, isvalid, dir, info):
+        isnos = True
+        if isvalid:
+            os = self.define('_os')
+            ls = ' '.join(ls).split()
+            for l in ls[1:]:
+                if l in os:
+                    isnos = False
+                    break
+        return self._ifs(config, ls, '%ifnos', isnos, isvalid, dir, info)
+
     def _ifarch(self, config, positive, ls, isvalid, dir, info):
         isarch = False
         if isvalid:
             arch = self.define('_arch')
-            for l in ls:
+            ls = ' '.join(ls).split()
+            for l in ls[1:]:
                 if l in arch:
                     isarch = True
                     break
@@ -970,22 +1098,38 @@ class file:
             line = line[0:-1]
             b = line.find('#')
             if b >= 0:
-                line = line[1:b]
+                line = line[1:b] + ('\\' if line[-1] == '\\' else '')
             return line.strip()
 
+        def _clean_and_pack(line, last_line):
+            leading_ws = ' ' if len(line) > 0 and line[0].isspace() else ''
+            line = _clean(line)
+            if len(last_line) > 0:
+                line = last_line + leading_ws + line
+            return line
+
         #
         # Need to add code to count matching '{' and '}' and if they
         # do not match get the next line and add to the string until
         # they match. This closes an opening '{' that is on another
         # line.
         #
+        ll = ''
         for l in config:
             self.lc += 1
-            l = _clean(l)
+            l = _clean_and_pack(l, ll)
             if len(l) == 0:
                 continue
-            log.trace('config: %s: %0d: %s %s' % \
-                          (self.name, self.lc, str(isvalid), l))
+            if l[-1] == '\\':
+                ll = l[0:-1]
+                continue
+            ll = ''
+            if isvalid:
+                indicator = '>'
+            else:
+                indicator = ' '
+            log.trace('config: %s: %3d:%s%s [%s]' % \
+                          (self.name, self.lc, indicator, l, str(isvalid)))
             lo = l
             if isvalid:
                 l = self._expand(l)
@@ -1038,17 +1182,21 @@ class file:
                 elif ls[0] == '%if':
                     d = self._if(config, ls, isvalid, dir, info)
                     if len(d):
-                        log.trace('config: %s: %3d: %%if: %s' % (self.name, self.lc, d))
+                        log.trace('config: %s: %3d:  %%if: %s' % (self.name, self.lc, d))
                         return ('data', d)
                 elif ls[0] == '%ifn':
                     d = self._if(config, ls, isvalid, dir, info, True)
                     if len(d):
-                        log.trace('config: %s: %3d: %%ifn: %s' % (self.name, self.lc, d))
+                        log.trace('config: %s: %3d:  %%ifn: %s' % (self.name, self.lc, d))
                         return ('data', d)
                 elif ls[0] == '%ifos':
                     d = self._ifos(config, ls, isvalid, dir, info)
                     if len(d):
                         return ('data', d)
+                elif ls[0] == '%ifnos':
+                    d = self._ifnos(config, ls, isvalid, dir, info)
+                    if len(d):
+                        return ('data', d)
                 elif ls[0] == '%ifarch':
                     d = self._ifarch(config, True, ls, isvalid, dir, info)
                     if len(d):
@@ -1060,11 +1208,11 @@ class file:
                 elif ls[0] == '%endif':
                     if roc:
                         return ('control', '%endif', '%endif')
-                    log.warning("unexpected '" + ls[0] + "'")
+                    log.warning(self._name_line_msg("unexpected '" + ls[0] + "'"))
                 elif ls[0] == '%else':
                     if roc:
                         return ('control', '%else', '%else')
-                    log.warning("unexpected '" + ls[0] + "'")
+                    log.warning(self._name_line_msg("unexpected '" + ls[0] + "'"))
                 elif ls[0].startswith('%defattr'):
                     return ('data', [l])
                 elif ls[0] == '%bcond_with':
@@ -1086,10 +1234,11 @@ class file:
                     if isvalid:
                         for d in self._directive:
                             if ls[0].strip() == d:
-                                log.trace('config: %s: %0d: _parse: directive: %s' % \
+                                log.trace('config: %s: %3d:  _parse: directive: %s' % \
                                           (self.name, self.lc, ls[0].strip()))
                                 return ('directive', ls[0].strip(), ls[1:])
-                        log.warning("unknown directive: '" + ls[0] + "'")
+                        log.warning(self._name_line_msg("unknown directive: '" + \
+                                                        ls[0] + "'"))
                         return ('data', [lo])
             else:
                 return ('data', [lo])
@@ -1110,7 +1259,8 @@ class file:
                 _package = results[2][0]
             else:
                 if results[2][0].strip() != '-n':
-                    log.warning("unknown directive option: '%s'" % (' '.join(results[2])))
+                    log.warning(self._name_line_msg("unknown directive option: '%s'" % \
+                                                    (' '.join(results[2]))))
                 _package = results[2][1].strip()
             self._set_package(_package)
         if directive and directive != results[1]:
@@ -1120,7 +1270,8 @@ class file:
         return (directive, info, data)
 
     def _process_data(self, results, directive, info, data):
-        log.trace('config: %s: %3d: _process_data: result=#%r# directive=#%s# info=#%r# data=#%r#' % \
+        log.trace('config: %s: %3d:  _process_data: result=#%r# ' \
+                  'directive=#%s# info=#%r# data=#%r#' % \
                   (self.name, self.lc, results, directive, info, data))
         new_data = []
         for l in results[1]:
@@ -1132,11 +1283,11 @@ class file:
                 log.output(l[4:])
             elif l.startswith('%warning'):
                 l = self._expand(l)
-                log.warning(l[9:])
+                log.warning(self._name_line_msg(l[9:]))
             if not directive:
                 l = self._expand(l)
                 ls = self.tags.split(l, 1)
-                log.trace('config: %s: %3d: _tag: %s %s' % (self.name, self.lc, l, ls))
+                log.trace('config: %s: %3d:  _tag: %s %s' % (self.name, self.lc, l, ls))
                 if len(ls) > 1:
                     info = ls[0].lower()
                     if info[-1] == ':':
@@ -1147,10 +1298,12 @@ class file:
                 if info is not None:
                     self._info_append(info, info_data)
                 else:
-                    log.warning("invalid format: '%s'" % (info_data[:-1]))
+                    log.warning(self._name_line_msg("invalid format: '%s'" % \
+                                                    (info_data[:-1])))
             else:
                 l = self._expand(l)
-                log.trace('config: %s: %3d: _data: %s %s' % (self.name, self.lc, l, new_data))
+                log.trace('config: %s: %3d:  _data: %s %s' % \
+                          (self.name, self.lc, l, new_data))
                 new_data.append(l)
         return (directive, info, data + new_data)
 
@@ -1166,7 +1319,8 @@ class file:
         self.package = _package
 
     def _directive_extend(self, dir, data):
-        log.trace('config: %s: %3d: _directive_extend: %s: %r' % (self.name, self.lc, dir, data))
+        log.trace('config: %s: %3d:  _directive_extend: %s: %r' % \
+                  (self.name, self.lc, dir, data))
         self._packages[self.package].directive_extend(dir, data)
 
     def _info_append(self, info, data):
@@ -1191,7 +1345,6 @@ class file:
             return end
 
         if self.load_depth == 0:
-            self._reset(name)
             self._packages[self.package] = package(self.package,
                                                    self.define('%{_arch}'),
                                                    self)
@@ -1199,6 +1352,7 @@ class file:
         self.load_depth += 1
 
         save_name = self.name
+        save_parent = self.parent
         save_lc = self.lc
 
         #
@@ -1239,13 +1393,15 @@ class file:
                 raise error.general('no config file found: %s' % (cfgname))
 
         try:
-            log.trace('config: %s: _open: %s' % (self.name, path.host(configname)))
+            log.trace('config: %s:  _open: %s' % (self.name, path.host(configname)))
             config = open(path.host(configname), 'r')
         except IOError as err:
             raise error.general('error opening config file: %s' % (path.host(configname)))
 
         self.configpath += [configname]
-        self._includes += [configname]
+
+        self._includes += [configname + ':' + self.parent]
+        self.parent = configname
 
         self.name = self._relative_path(configname)
         self.lc = 0
@@ -1261,7 +1417,7 @@ class file:
                 elif r[0] == 'control':
                     if r[1] == '%end':
                         break
-                    log.warning("unexpected '%s'" % (r[1]))
+                    log.warning(self._name_line_msg("unexpected '%s'" % (r[1])))
                 elif r[0] == 'directive':
                     if r[1] == '%include':
                         self.load(r[2][0])
@@ -1276,13 +1432,12 @@ class file:
         except:
             config.close()
             raise
-
-        config.close()
-
-        self.name = save_name
-        self.lc = save_lc
-
-        self.load_depth -= 1
+        finally:
+            config.close()
+            self.name = save_name
+            self.parent = save_parent
+            self.lc = save_lc
+            self.load_depth -= 1
 
     def defined(self, name):
         return name in self.macros
@@ -1319,7 +1474,7 @@ class file:
             raise error.general('package "' + _package + '" not found')
         if name not in self._packages[_package].directives:
             raise error.general('directive "' + name + \
-                                    '" not found in package "' + _package + '"')
+                                '" not found in package "' + _package + '"')
         return self._packages[_package].directives[name]
 
     def abspath(self, rpath):
diff --git a/source-builder/sb/cvs.py b/source-builder/sb/cvs.py
index 2ff7417..673488e 100644
--- a/source-builder/sb/cvs.py
+++ b/source-builder/sb/cvs.py
@@ -25,11 +25,10 @@ from __future__ import print_function
 
 import os
 
-import error
-import execute
-import log
-import options
-import path
+from . import error
+from . import execute
+from . import log
+from . import path
 
 class repo:
     """An object to manage a cvs repo."""
@@ -147,6 +146,7 @@ class repo:
 
 if __name__ == '__main__':
     import sys
+    from . import options
     opts = options.load(sys.argv, defaults = 'defaults.mc')
     ldir = 'cvs-test-rm-me'
     c = repo(ldir, opts)
diff --git a/source-builder/sb/darwin.py b/source-builder/sb/darwin.py
index d79cf47..95a2a4e 100644
--- a/source-builder/sb/darwin.py
+++ b/source-builder/sb/darwin.py
@@ -24,7 +24,7 @@
 
 import os
 
-import execute
+from . import execute
 
 def load():
     uname = os.uname()
@@ -52,6 +52,7 @@ def load():
         '_var':             ('dir',     'optional', '/usr/local/var'),
         '_prefix':          ('dir',     'optional', '%{_usr}'),
         '__ldconfig':       ('exe',     'none',     ''),
+        '__cmake':          ('exe',     'optional', 'cmake'),
         '__cvs':            ('exe',     'optional', 'cvs'),
         '__xz':             ('exe',     'required', 'xz'),
         'with_zlib':        ('none',    'none',     '--with-zlib=no'),
diff --git a/source-builder/sb/download.py b/source-builder/sb/download.py
index f8dd305..9b76c8d 100644
--- a/source-builder/sb/download.py
+++ b/source-builder/sb/download.py
@@ -24,6 +24,7 @@
 
 from __future__ import print_function
 
+import base64
 import hashlib
 import os
 import re
@@ -36,13 +37,13 @@ except ImportError:
     import urllib2 as urllib_request
     import urlparse as urllib_parse
 
-import cvs
-import error
-import git
-import log
-import path
-import sources
-import version
+from . import cvs
+from . import error
+from . import git
+from . import log
+from . import path
+from . import sources
+from . import version
 
 def _do_download(opts):
     download = True
@@ -108,8 +109,13 @@ def _hash_check(file_, absfile, macros, remove = True):
             raise
         if _in is not None:
             _in.close()
-        log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1]))
-        if hasher.hexdigest() != hash[1]:
+        hash_hex = hasher.hexdigest()
+        hash_base64 = base64.b64encode(hasher.digest()).decode('utf-8')
+        log.output('checksums: %s: (hex: %s) (b64: %s) => %s' % (file_,
+                                                                 hash_hex,
+                                                                 hash_base64,
+                                                                 hash[1]))
+        if hash_hex != hash[1] and hash_base64 != hash[1]:
             log.warning('checksum error: %s' % (file_))
             failed = True
         if failed and remove:
@@ -181,12 +187,13 @@ def _http_parser(source, pathkey, config, opts):
                 raise error.general('gitweb.cgi path missing p or h: %s' % (url))
             source['file'] = '%s-%s.patch' % (p, h)
         #
-        # Check the source file name for any extra request query data and remove if
-        # found. Some hosts do not like file names containing them.
+        # Wipe out everything special in the file name.
         #
-        if '?' in source['file']:
-            qmark = source['file'].find('?')
-            source['file'] = source['file'][:qmark]
+        source['file'] = re.sub(r'[^a-zA-Z0-9.\-]+', '-', source['file'])
+        max_file_len = 127
+        if len(source['file']) > max_file_len:
+            raise error.general('file name length is greater than %i (maybe use --rsb-file=FILE option): %s' % \
+                                (max_file_len, source['file']))
     #
     # Check local path
     #
@@ -327,6 +334,9 @@ def parse_url(url, pathkey, config, opts, file_override = None):
         log.output('download: file-override: %s' % (file_override))
         source['file'] = file_override
         source['options'] += ['file-override']
+    question_mark = source['file'].find('?')
+    if question_mark >= 0:
+        source['file'] = source['file'][:question_mark]
     source['name'], source['ext'] = path.splitext(source['file'])
     if source['name'].endswith('.tar'):
         source['name'] = source['name'][:-4]
@@ -506,6 +516,15 @@ def _git_downloader(url, local, config, opts):
             log.notice('git: reset: %s' % (us[0]))
             if _do_download(opts):
                 repo.reset(arg)
+                repo.submodule_foreach(['reset'] + arg)
+        elif _as[0] == 'clean':
+            arg = []
+            if len(_as) > 1:
+                arg = ['--%s' % (_as[1])]
+            log.notice('git: clean: %s' % (us[0]))
+            if _do_download(opts):
+                repo.clean(arg)
+                repo.submodule_foreach(['clean'] + arg)
         elif _as[0] == 'protocol':
             pass
         else:
@@ -590,16 +609,19 @@ def get_file(url, local, opts, config):
         raise error.general('source not found: %s' % (path.host(local)))
     #
     # Check if a URL has been provided on the command line. If the package is
-    # released push to the start the RTEMS URL unless overrided by the command
-    # line option --with-release-url. The variant --without-release-url can
-    # override the released check.
+    # released push the release path URLs to the start the RTEMS URL list
+    # unless overriden by the command line option --without-release-url. The
+    # variant --without-release-url can override the released check.
     #
     url_bases = opts.urls()
+    if url_bases is None:
+        url_bases = []
     try:
         rtems_release_url_value = config.macros.expand('%{release_path}')
     except:
         rtems_release_url_value = None
     rtems_release_url = None
+    rtems_release_urls = []
     if version.released() and rtems_release_url_value:
         rtems_release_url = rtems_release_url_value
     with_rel_url = opts.with_arg('release-url')
@@ -618,18 +640,17 @@ def get_file(url, local, opts, config):
     elif with_rel_url[0] == 'without_release-url' and with_rel_url[1] == 'yes':
         rtems_release_url = None
     if rtems_release_url is not None:
-        log.trace('release url: %s' % (rtems_release_url))
-        #
-        # If the URL being fetched is under the release path do not add the
-        # sources release path because it is already there.
-        #
-        if not url.startswith(rtems_release_url):
-            if url_bases is None:
-                url_bases = [rtems_release_url]
-            else:
-                url_bases.append(rtems_release_url)
+        rtems_release_urls = rtems_release_url.split(',')
+        for release_url in rtems_release_urls:
+            log.trace('release url: %s' % (release_url))
+            #
+            # If the URL being fetched is under the release path do not add
+            # the sources release path because it is already there.
+            #
+            if not url.startswith(release_url):
+                url_bases = [release_url] + url_bases
     urls = []
-    if url_bases is not None:
+    if len(url_bases) > 0:
         #
         # Split up the URL we are being asked to download.
         #
@@ -645,7 +666,7 @@ def get_file(url, local, opts, config):
             # Hack to fix #3064 where --rsb-file is being used. This code is a
             # mess and should be refactored.
             #
-            if version.released() and base == rtems_release_url:
+            if version.released() and base in rtems_release_urls:
                 url_file = path.basename(local)
             if base[-1:] != '/':
                 base += '/'
diff --git a/source-builder/sb/ereport.py b/source-builder/sb/ereport.py
index 88aaa16..d8fb5f6 100755
--- a/source-builder/sb/ereport.py
+++ b/source-builder/sb/ereport.py
@@ -1,6 +1,6 @@
 #
 # RTEMS Tools Project (http://www.rtems.org/)
-# Copyright 2010-2014 Chris Johns (chrisj at rtems.org)
+# Copyright 2010-2017 Chris Johns (chrisj at rtems.org)
 # All rights reserved.
 #
 # This file is part of the RTEMS Tools package in 'rtems-testing'.
@@ -21,36 +21,40 @@
 # Create an error log.
 #
 
+from __future__ import print_function
+
 import os
 
-import error
-import log
+from . import error
+from . import log
 
 def generate(name, opts, header = None, footer = None):
-    r = ['RTEMS Tools Project - Source Builder Error Report'] + []
-    if header:
-        r += [' %s' % (header)]
-    r += [opts.info()]
-    if opts.defaults.get_value('%{_sbgit_valid}') == '1':
-        r += [' %s/%s' % (opts.defaults.get_value('%{_sbgit_remotes}'),
-                          opts.defaults.get_value('%{_sbgit_id}'))]
-    else:
-        r += [' RSB: not a valid repo']
-    if os.name == 'nt':
-        r += [' Windows']
-    else:
-        r += [' %s' % (' '.join(os.uname()))]
-    r += []
-    r += ['Tail of the build log:']
-    r += log.tail()
-    if footer:
-        r += [footer]
-    try:
-        name = name.replace('/', '-')
-        l = open(name, 'w')
-        l.write(os.linesep.join(r))
-        l.close()
-        log.notice('  See error report: %s' % (name))
-    except:
-        log.stderr('error: failure to create error report')
-        raise
+    label, result = opts.with_arg('error-report')
+    if (label.startswith('without_') and result != 'yes') or \
+       (label.startswith('with_') and result != 'no'):
+        r = ['RTEMS Tools Project - Source Builder Error Report'] + []
+        if header:
+            r += [' %s' % (header)]
+        r += [opts.info()]
+        if opts.defaults.get_value('%{_sbgit_valid}') == '1':
+            r += [' %s/%s' % (opts.defaults.get_value('%{_sbgit_remotes}'),
+                              opts.defaults.get_value('%{_sbgit_id}'))]
+        else:
+            r += [' RSB: not a valid repo']
+        if os.name == 'nt':
+            r += [' Windows']
+        else:
+            r += [' %s' % (' '.join(os.uname()))]
+        r += []
+        r += ['Tail of the build log:']
+        r += log.tail()
+        if footer:
+            r += [footer]
+        try:
+            name = name.replace('/', '-')
+            with open(name, 'w') as l:
+                l.write(os.linesep.join(r))
+            log.notice('  See error report: %s' % (name))
+        except:
+            log.stderr('error: failure to create error report')
+            raise
diff --git a/source-builder/sb/error.py b/source-builder/sb/error.py
index 6a99b0e..5ea19d3 100644
--- a/source-builder/sb/error.py
+++ b/source-builder/sb/error.py
@@ -33,12 +33,12 @@ class error(Exception):
 class general(error):
     """Raise for a general error."""
     def __init__(self, what):
-        self.set_output('error: ' + what)
+        self.set_output('error: ' + str(what))
 
 class internal(error):
     """Raise for an internal error."""
     def __init__(self, what):
-        self.set_output('internal error: ' + what)
+        self.set_output('internal error: ' + str(what))
 
 class exit(error):
     """Raise for to exit."""
diff --git a/source-builder/sb/execute.py b/source-builder/sb/execute.py
index 12d8114..3db9abc 100755
--- a/source-builder/sb/execute.py
+++ b/source-builder/sb/execute.py
@@ -1,6 +1,6 @@
 #
 # RTEMS Tools Project (http://www.rtems.org/)
-# Copyright 2010-2016 Chris Johns (chrisj at rtems.org)
+# Copyright 2010-2017 Chris Johns (chrisj at rtems.org)
 # All rights reserved.
 #
 # This file is part of the RTEMS Tools package in 'rtems-tools'.
@@ -8,7 +8,7 @@
 # Permission to use, copy, modify, and/or distribute this software for any
 # purpose with or without fee is hereby granted, provided that the above
 # copyright notice and this permission notice appear in all copies.
-#
+ #
 # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
 # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
 # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
@@ -16,6 +16,7 @@
 # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
 # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
 # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
 
 #
 # Execute commands or scripts.
@@ -26,14 +27,20 @@
 from __future__ import print_function
 
 import functools
+import io
 import os
 import re
 import sys
 import subprocess
 import threading
+import time
+import traceback
+
+from . import error
+from . import log
 
-import error
-import log
+# Trace exceptions
+trace_threads = False
 
 # Redefine the PIPE from subprocess
 PIPE = subprocess.PIPE
@@ -86,89 +93,271 @@ def arg_subst_str(command, subst):
     def add(x, y): return x + ' ' + str(y)
     return functools.reduce(add, cmd, '')
 
-class execute:
-    """Execute commands or scripts. The 'output' is a funtion
-    that handles the output from the process."""
-    def __init__(self, output = None, error_prefix = '', verbose = False):
+class execute(object):
+    """Execute commands or scripts. The 'output' is a funtion that handles the
+    output from the process. The 'input' is a function that blocks and returns
+    data to be written to stdin"""
+    def __init__(self, output = None, input = None, cleanup = None,
+                 error_prefix = '', verbose = False):
+        self.lock = threading.Lock()
         self.output = output
+        self.input = input
+        self.cleanup = cleanup
         self.error_prefix = error_prefix
         self.verbose = verbose
         self.shell_exe = None
         self.shell_commands = False
         self.path = None
         self.environment = None
-
-    def capture(self, proc, timeout = None):
-        """Create 2 threads to read stdout and stderr and send to the
-        output handler. Based on the 'communicate' code in the subprocess
-        module."""
-        def _readthread(fh, out, prefix = ''):
+        self.outputting = False
+        self.timing_out = False
+        self.proc = None
+
+    def capture(self, proc, command = 'pipe', timeout = None):
+        """Create 3 threads to read stdout and stderr and send to the output handler
+        and call an input handler is provided. Based on the 'communicate' code
+        in the subprocess module."""
+        def _writethread(exe, fh, input):
+            """Call the input handler and write it to the stdin. The input handler should
+            block and return None or False if this thread is to exit and True if this
+            is a timeout check."""
+            if trace_threads:
+                print('execute:_writethread: start')
+            encoding = True
+            try:
+                tmp = bytes('temp', sys.stdin.encoding)
+            except:
+                encoding = False
+            input_types = [str, bytes]
+            try:
+                # Unicode is not valid in python3, not added to the list
+                input_types += [unicode]
+            except:
+                pass
+            try:
+                while True:
+                    if trace_threads:
+                        print('execute:_writethread: call input', input)
+                    lines = input()
+                    if trace_threads:
+                        print('execute:_writethread: input returned:', type(lines))
+                    if type(lines) in input_types:
+                        try:
+                            if encoding:
+                                lines = bytes(lines, sys.stdin.encoding)
+                            fh.write(lines)
+                            fh.flush()
+                        except:
+                            break
+                    if lines == None or \
+                       lines == False or \
+                       (lines == True and fh.closed):
+                        break
+            except:
+                if trace_threads:
+                    print('execute:_writethread: exception')
+                    print(traceback.format_exc())
+                pass
+            try:
+                fh.close()
+            except:
+                pass
+            if trace_threads:
+                print('execute:_writethread: finished')
+
+        def _readthread(exe, fh, out, prefix = ''):
             """Read from a file handle and write to the output handler
             until the file closes."""
-            count = 0
-            while True:
-                line = fh.readline()
-                # str and bytes are the same type in Python2
-                if type(line) is not str and type(line) is bytes:
-                    line = line.decode(sys.stdout.encoding)
-                count += 1
-                if len(line) == 0:
-                    break
+            def _output_line(line, exe, prefix, out, count):
+                #exe.lock.acquire()
+                #exe.outputting = True
+                #exe.lock.release()
                 if out:
                     out(prefix + line)
                 else:
                     log.output(prefix + line)
                     if count > 10:
                         log.flush()
-                        count = 0
 
-        def _timerthread(proc, timer):
-            """Timer thread calls the timer handler if one
-            is present once a second. The user provides a handler
-            and returns False to kill the process or True continue."""
-            while True:
+            if trace_threads:
+                print('execute:_readthread: start')
+            count = 0
+            line = ''
+            try:
+                while True:
+                    #
+                    # The io module file handling return up to the size passed
+                    # in to the read call. The io handle has the default
+                    # buffering size. On any error assume the handle has gone
+                    # and the process is shutting down.
+                    #
+                    try:
+                        data = fh.read1(4096)
+                    except:
+                        data = ''
+                    if len(data) == 0:
+                        if len(line) > 0:
+                            _output_line(line + '\n', exe, prefix, out, count)
+                        break
+                    # str and bytes are the same type in Python2
+                    if type(data) is not str and type(data) is bytes:
+                        data = data.decode(sys.stdout.encoding)
+                    last_ch = data[-1]
+                    sd = (line + data).split('\n')
+                    if last_ch != '\n':
+                        line = sd[-1]
+                    else:
+                        line = ''
+                    sd = sd[:-1]
+                    if len(sd) > 0:
+                        for l in sd:
+                            if trace_threads:
+                                print('execute:_readthread: output-line:',
+                                      count, type(l))
+                            _output_line(l + '\n', exe, prefix, out, count)
+                            count += 1
+                        if count > 10:
+                            count -= 10
+            except:
+                raise
+                if trace_threads:
+                    print('execute:_readthread: exception')
+                    print(traceback.format_exc())
+                pass
+            try:
+                fh.close()
+            except:
+                pass
+            if len(line):
+                _output_line(line, exe, prefix, out, 100)
+            if trace_threads:
+                print('execute:_readthread: finished')
+
+        def _timerthread(exe, interval, function):
+            """Timer thread is used to timeout a process if no output is
+            produced for the timeout interval."""
+            count = interval
+            while exe.timing_out:
                 time.sleep(1)
-                if not timer(proc):
-                    proc.stdout.close()
-                    proc.stderr.close()
+                if count > 0:
+                    count -= 1
+                exe.lock.acquire()
+                if exe.outputting:
+                    count = interval
+                    exe.outputting = False
+                exe.lock.release()
+                if count == 0:
+                    try:
+                        proc.kill()
+                    except:
+                        pass
+                    else:
+                        function()
+                    break
+
+        name = os.path.basename(command[0])
+
+        stdin_thread = None
+        stdout_thread = None
+        stderr_thread = None
+        timeout_thread = None
 
         if proc.stdout:
             stdout_thread = threading.Thread(target = _readthread,
-                                             args = (proc.stdout,
+                                             name = '_stdout[%s]' % (name),
+                                             args = (self,
+                                                     io.open(proc.stdout.fileno(),
+                                                             mode = 'rb',
+                                                             closefd = False),
                                                      self.output,
                                                      ''))
-            stdout_thread.setDaemon(True)
+            stdout_thread.daemon = True
             stdout_thread.start()
         if proc.stderr:
             stderr_thread = threading.Thread(target = _readthread,
-                                             args = (proc.stderr,
+                                             name = '_stderr[%s]' % (name),
+                                             args = (self,
+                                                     io.open(proc.stderr.fileno(),
+                                                             mode = 'rb',
+                                                             closefd = False),
                                                      self.output,
                                                      self.error_prefix))
-            stderr_thread.setDaemon(True)
+            stderr_thread.daemon = True
             stderr_thread.start()
-        if proc.stdout:
-            stdout_thread.join()
-        if proc.stderr:
-            stderr_thread.join()
-        return proc.wait()
+        if self.input and proc.stdin:
+            stdin_thread = threading.Thread(target = _writethread,
+                                            name = '_stdin[%s]' % (name),
+                                            args = (self,
+                                                    proc.stdin,
+                                                    self.input))
+            stdin_thread.daemon = True
+            stdin_thread.start()
+        if timeout:
+            self.timing_out = True
+            timeout_thread = threading.Thread(target = _timerthread,
+                                              name = '_timeout[%s]' % (name),
+                                              args = (self,
+                                                      timeout[0],
+                                                      timeout[1]))
+            timeout_thread.daemon = True
+            timeout_thread.start()
+        try:
+            self.lock.acquire()
+            try:
+                self.proc = proc
+            except:
+                raise
+            finally:
+                self.lock.release()
+            exitcode = proc.wait()
+        except:
+            proc.kill()
+            raise
+        finally:
+            self.lock.acquire()
+            try:
+                self.proc = None
+            except:
+                raise
+            finally:
+                self.lock.release()
+            if self.cleanup:
+                self.cleanup(proc)
+            if timeout_thread:
+                self.timing_out = False
+                timeout_thread.join(10)
+            if stdin_thread:
+                stdin_thread.join(2)
+            if stdout_thread:
+                stdout_thread.join(2)
+            if stderr_thread:
+                stderr_thread.join(2)
+        return exitcode
 
     def open(self, command, capture = True, shell = False,
              cwd = None, env = None,
-             stdin = None, stdout = None, stderr = None):
+             stdin = None, stdout = None, stderr = None,
+             timeout = None):
         """Open a command with arguments. Provide the arguments as a list or
         a string."""
+        if self.output is None:
+            raise error.general('capture needs an output handler')
+        cs = command
+        if type(command) is list:
+            def add(x, y): return x + ' ' + str(y)
+            cs = functools.reduce(add, command, '')[1:]
+        what = 'spawn'
+        if shell:
+            what = 'shell'
+        cs = what + ': ' + cs
         if self.verbose:
-            s = command
-            if type(command) is list:
-                def add(x, y): return x + ' ' + str(y)
-                s = functools.reduce(add, command, '')[1:]
-            what = 'spawn'
-            if shell:
-                what = 'shell'
-            log.output(what + ': ' + s)
+            log.output(what + ': ' + cs)
+        log.trace('exe: %s' % (cs))
         if shell and self.shell_exe:
             command = arg_list(command)
             command[:0] = self.shell_exe
+        if not stdin and self.input:
+            stdin = subprocess.PIPE
         if not stdout:
             stdout = subprocess.PIPE
         if not stderr:
@@ -191,10 +380,13 @@ class execute:
             proc = subprocess.Popen(command, shell = shell,
                                     cwd = cwd, env = env,
                                     stdin = stdin, stdout = stdout,
-                                    stderr = stderr)
+                                    stderr = stderr,
+                                    close_fds = False)
             if not capture:
                 return (0, proc)
-            exit_code = self.capture(proc)
+            if self.output is None:
+                raise error.general('capture needs an output handler')
+            exit_code = self.capture(proc, command, timeout)
             if self.verbose:
                 log.output('exit: ' + str(exit_code))
         except OSError as ose:
@@ -204,23 +396,26 @@ class execute:
         return (exit_code, proc)
 
     def spawn(self, command, capture = True, cwd = None, env = None,
-              stdin = None, stdout = None, stderr = None):
+              stdin = None, stdout = None, stderr = None,
+              timeout = None):
         """Spawn a command with arguments. Provide the arguments as a list or
         a string."""
         return self.open(command, capture, False, cwd, env,
-                         stdin, stdout, stderr)
+                         stdin, stdout, stderr, timeout)
 
     def shell(self, command, capture = True, cwd = None, env = None,
-              stdin = None, stdout = None, stderr = None):
+              stdin = None, stdout = None, stderr = None,
+              timeout = None):
         """Execute a command within a shell context. The command can contain
         argumments. The shell is specific to the operating system. For example
         it is cmd.exe on Windows XP."""
         return self.open(command, capture, True, cwd, env,
-                         stdin, stdout, stderr)
+                         stdin, stdout, stderr, timeout)
 
     def command(self, command, args = None, capture = True, shell = False,
                 cwd = None, env = None,
-                stdin = None, stdout = None, stderr = None):
+                stdin = None, stdout = None, stderr = None,
+                timeout = None):
         """Run the command with the args. The args can be a list
         or a string."""
         if args and not type(args) is list:
@@ -230,18 +425,21 @@ class execute:
             cmd.extend(args)
         return self.open(cmd, capture = capture, shell = shell,
                          cwd = cwd, env = env,
-                         stdin = stdin, stdout = stdout, stderr = stderr)
+                         stdin = stdin, stdout = stdout, stderr = stderr,
+                         timeout = timeout)
 
     def command_subst(self, command, substs, capture = True, shell = False,
                       cwd = None, env = None,
-                      stdin = None, stdout = None, stderr = None):
+                      stdin = None, stdout = None, stderr = None,
+                      timeout = None):
         """Run the command from the config data with the
         option format string subsituted with the subst variables."""
         args = arg_subst(command, substs)
         return self.command(args[0], args[1:], capture = capture,
                             shell = shell or self.shell_commands,
                             cwd = cwd, env = env,
-                            stdin = stdin, stdout = stdout, stderr = stderr)
+                            stdin = stdin, stdout = stdout, stderr = stderr,
+                            timeout = timeout)
 
     def set_shell(self, execute):
         """Set the shell to execute when issuing a shell command."""
@@ -275,6 +473,37 @@ class execute:
         self.environment = environment
         return old_environment
 
+    def kill(self):
+        self.lock.acquire()
+        try:
+            if self.proc is not None:
+                self.proc.kill()
+        except:
+            raise
+        finally:
+            self.lock.release()
+
+    def terminate(self):
+        self.lock.acquire()
+        try:
+            if self.proc is not None:
+                self.proc.terminate()
+        except:
+            raise
+        finally:
+            self.lock.release()
+
+    def send_signal(self, signal):
+        self.lock.acquire()
+        try:
+            if self.proc is not None:
+                print("sending sig")
+                self.proc.send_signal(signal)
+        except:
+            raise
+        finally:
+            self.lock.release()
+
 class capture_execution(execute):
     """Capture all output as a string and return it."""
 
@@ -303,13 +532,14 @@ class capture_execution(execute):
                          verbose = verbose)
 
     def open(self, command, capture = True, shell = False, cwd = None, env = None,
-             stdin = None, stdout = None, stderr = None):
+             stdin = None, stdout = None, stderr = None, timeout = None):
         if not capture:
             raise error.general('output capture must be true; leave as default')
         #self.snapper.get_and_clear()
         exit_code, proc = execute.open(self, command, capture = True, shell = shell,
                                        cwd = cwd, env = env,
-                                       stdin = stdin, stdout = stdout, stderr = stderr)
+                                       stdin = stdin, stdout = stdout, stderr = stderr,
+                                       timeout = timeout)
         return (exit_code, proc, self.snapper.get_and_clear())
 
     def set_output(self, output):
@@ -333,11 +563,18 @@ if __name__ == "__main__":
         if ec == 0:
             print('piping input into ' + commands['pipe'][0] + ': ' + \
                   commands['pipe'][2])
-            proc.stdin.write(bytes(commands['pipe'][2], sys.stdin.encoding))
+            try:
+                out = bytes(commands['pipe'][2], sys.stdin.encoding)
+            except:
+                out = commands['pipe'][2]
+            proc.stdin.write(out)
             proc.stdin.close()
             e.capture(proc)
             del proc
 
+    def capture_output(text):
+        print(text, end = '')
+
     cmd_shell_test = 'if "%OS%" == "Windows_NT" (echo It is WinNT) else echo Is is not WinNT'
     sh_shell_test = 'x="me"; if [ $x = "me" ]; then echo "It was me"; else "It was him"; fi'
 
@@ -363,7 +600,7 @@ if __name__ == "__main__":
     print(arg_subst(['nothing', 'xx-%0-yyy', '%1', '%2-something'],
                     ['subst0', 'subst1', 'subst2']))
 
-    e = execute(error_prefix = 'ERR: ', verbose = True)
+    e = execute(error_prefix = 'ERR: ', output = capture_output, verbose = True)
     if sys.platform == "win32":
         run_tests(e, commands['windows'], False)
         if os.path.exists('c:\\msys\\1.0\\bin\\sh.exe'):
diff --git a/source-builder/sb/freebsd.py b/source-builder/sb/freebsd.py
index 87d4f41..8d08b4b 100644
--- a/source-builder/sb/freebsd.py
+++ b/source-builder/sb/freebsd.py
@@ -27,9 +27,9 @@
 import pprint
 import os
 
-import check
-import error
-import execute
+from . import check
+from . import error
+from . import execute
 
 def load():
     uname = os.uname()
@@ -57,16 +57,20 @@ def load():
         '_host_cpu':        ('none',    'none',     cpu),
         '_host_alias':      ('none',    'none',     '%{nil}'),
         '_host_arch':       ('none',    'none',     cpu),
+        'host_includes':    ('none',    'convert',  '-I%{_usr}/include'),
+        'host_ldflags':     ('none',    'convert',  '-L%{_usr}/lib'),
         '_usr':             ('dir',     'required', '/usr/local'),
         '_var':             ('dir',     'optional', '/usr/local/var'),
         '__bash':           ('exe',     'optional', '/usr/local/bin/bash'),
         '__bison':          ('exe',     'required', '/usr/local/bin/bison'),
+        '__cmake':          ('exe',     'optional', '/usr/local/bin/cmake'),
         '__git':            ('exe',     'required', '/usr/local/bin/git'),
-        '__svn':            ('exe',     'required', '/usr/local/bin/svn'),
+        '__svn':            ('exe',     'optional', '/usr/local/bin/svn'),
+        '__unzip':          ('exe',     'optional', '/usr/local/bin/unzip'),
         '__xz':             ('exe',     'optional', '/usr/bin/xz'),
         '__make':           ('exe',     'required', 'gmake'),
         '__patch_opts':     ('none',     'none',    '-E')
-        }
+    }
 
     defines['_build']        = defines['_host']
     defines['_build_vendor'] = defines['_host_vendor']
@@ -107,9 +111,11 @@ def load():
         if check.check_exe(cvs, cvs):
             defines['__cvs'] = cvs
         #
-        # Fix the mess iconv is on FreeBSD 10.0.
+        # Fix the mess iconv is on FreeBSD 10.0 and higher.
         #
-        defines['iconv_includes'] = ('none', 'none', '-I/usr/local/include -L/usr/local/lib')
+        defines['iconv_includes'] = ('none', 'none', '%{host_includes} %{host_ldflags}')
+        if fb_version >= 12:
+            defines['iconv_prefix'] = ('none', 'none', '%{_usr}')
 
         #
         # On 11.0+ makeinfo and install-info have moved to /usr/local/...
@@ -117,6 +123,11 @@ def load():
         if fb_version >= 11:
             defines['__install_info'] = ('exe', 'optional', '/usr/local/bin/install-info')
             defines['__makeinfo']     = ('exe', 'required', '/usr/local/bin/makeinfo')
+        #
+        # On 12.0+ unzip is in /usr/bin
+        #
+        if fb_version >= 12:
+            defines['__unzip'] = ('exe', 'optional', '/usr/bin/unzip')
     else:
         for gv in ['49', '48', '47']:
             gcc = '%s-portbld-freebsd%s-gcc%s' % (cpu, version, gv)
diff --git a/source-builder/sb/getsources.py b/source-builder/sb/getsources.py
new file mode 100644
index 0000000..d348da4
--- /dev/null
+++ b/source-builder/sb/getsources.py
@@ -0,0 +1,138 @@
+#
+# RTEMS Tools Project (http://www.rtems.org/)
+# Copyright 2010-2019 Chris Johns (chrisj at rtems.org)
+# All rights reserved.
+#
+# This file is part of the RTEMS Tools package in 'rtems-tools'.
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+#
+# This code builds a package compiler tool suite given a tool set. A tool
+# set lists the various tools. These are specific tool configurations.
+#
+
+from __future__ import print_function
+
+import argparse
+import copy
+import datetime
+import os
+import sys
+
+try:
+    from . import build
+    from . import error
+    from . import log
+    from . import simhost
+    from . import version
+except KeyboardInterrupt:
+    print('abort: user terminated', file = sys.stderr)
+    sys.exit(1)
+except:
+    raise
+
+def run(args = sys.argv):
+    ec = 0
+    get_sources_error = True
+    try:
+        #
+        # The RSB options support cannot be used because it loads the defaults
+        # for the host which we cannot do here.
+        #
+        description  = 'RTEMS Get Sources downloads all the source a build set '
+        description += 'references for all hosts.'
+
+        argsp = argparse.ArgumentParser(prog = 'rtems-get-sources',
+                                        description = description)
+        argsp.add_argument('--rtems-version', help = 'Set the RTEMS version.',
+                           type = str,
+                           default = version.version())
+        argsp.add_argument('--list-hosts', help = 'List the hosts.',
+                           action = 'store_true')
+        argsp.add_argument('--list-bsets', help = 'List the hosts.',
+                           action = 'store_true')
+        argsp.add_argument('--download-dir', help = 'Download directory.',
+                           type = str)
+        argsp.add_argument('--clean', help = 'Clean the download directory.',
+                           action = 'store_true')
+        argsp.add_argument('--tar', help = 'Create a tarball of all the source.',
+                           action = 'store_true')
+        argsp.add_argument('--log', help = 'Log file.',
+                           type = str,
+                           default = simhost.log_default('getsource'))
+        argsp.add_argument('--trace', help = 'Enable trace logging for debugging.',
+                           action = 'store_true')
+        argsp.add_argument('bsets', nargs='*', help = 'Build sets.')
+
+        argopts = argsp.parse_args(args[1:])
+
+        simhost.load_log(argopts.log)
+        log.notice('RTEMS Source Builder - Get Sources, %s' % (version.string()))
+        log.tracing = argopts.trace
+
+        opts = simhost.load_options(args, argopts, extras = ['--with-download'])
+        configs = build.get_configs(opts)
+
+        if argopts.list_hosts:
+            simhost.list_hosts()
+        elif argopts.list_bsets:
+            simhost.list_bset_files(opts, configs)
+        else:
+            if argopts.clean:
+                if argopts.download_dir is None:
+                    raise error.general('cleaning of the default download directories is not supported')
+                if path.exists(argopts.download_dir):
+                    log.notice('Cleaning source directory: %s' % (argopts.download_dir))
+                    path.removeall(argopts.download_dir)
+            all_bsets = simhost.get_bset_files(configs)
+            if len(argopts.bsets) == 0:
+                bsets = all_bsets
+            else:
+                bsets = argopts.bsets
+            for bset in bsets:
+                b = None
+                try:
+                    for host in simhost.profiles:
+                        get_sources_error = True
+                        b = simhost.buildset(bset, configs, opts)
+                        get_sources_error = False
+                        b.build(host)
+                        del b
+                except error.general as gerr:
+                    log.stderr(str(gerr))
+                    log.stderr('Build FAILED')
+                b = None
+    except error.general as gerr:
+        if get_sources_error:
+            log.stderr(str(gerr))
+        log.stderr('Build FAILED')
+        ec = 1
+    except error.internal as ierr:
+        if get_sources_error:
+            log.stderr(str(ierr))
+        log.stderr('Internal Build FAILED')
+        ec = 1
+    except error.exit as eerr:
+        pass
+    except KeyboardInterrupt:
+        log.notice('abort: user terminated')
+        ec = 1
+    except:
+        raise
+        log.notice('abort: unknown error')
+        ec = 1
+    sys.exit(ec)
+
+if __name__ == "__main__":
+    run()
diff --git a/source-builder/sb/git.py b/source-builder/sb/git.py
index f35c335..237e690 100644
--- a/source-builder/sb/git.py
+++ b/source-builder/sb/git.py
@@ -25,11 +25,10 @@ from __future__ import print_function
 
 import os
 
-import error
-import execute
-import log
-import options
-import path
+from . import error
+from . import execute
+from . import log
+from . import path
 
 class repo:
     """An object to manage a git repo."""
@@ -70,9 +69,9 @@ class repo:
         if len(gvs) < 3:
             raise error.general('invalid version string from git: %s' % (output))
         vs = gvs[2].split('.')
-        if len(vs) != 4:
+        if len(vs) not in [3, 4]:
             raise error.general('invalid version number from git: %s' % (gvs[2]))
-        return (int(vs[0]), int(vs[1]), int(vs[2]), int(vs[3]))
+        return tuple(map(int, vs))
 
     def clone(self, url, _path):
         ec, output = self._run(['clone', url, path.host(_path)], check = True)
@@ -105,14 +104,36 @@ class repo:
     def submodule(self, module):
         ec, output = self._run(['submodule', 'update', '--init', module], check = True)
 
+    def submodule_foreach(self, args = []):
+        if type(args) == str:
+            args = [args.split(args)]
+        ec, output = self._run(['submodule',
+                                'foreach',
+                                '--recursive',
+                                self.git] + args, check = True)
+
+    def submodules(self):
+        smodules = {}
+        ec, output = self._run(['submodule'], check = True)
+        if ec == 0:
+            for l in output.split('\n'):
+                ms = l.split()
+                if len(ms) == 3:
+                    smodules[ms[1]] = (ms[0], ms[2][1:-1])
+        return smodules
+
     def clean(self, args = []):
         if type(args) == str:
             args = [args]
         ec, output = self._run(['clean'] + args, check = True)
 
-    def status(self):
+    def status(self, submodules_always_clean = False):
         _status = {}
         if path.exists(self.path):
+            if submodules_always_clean:
+                submodules = self.submodules()
+            else:
+                submodules = {}
             ec, output = self._run(['status'])
             if ec == 0:
                 state = 'none'
@@ -133,16 +154,22 @@ class repo:
                         if l[0].isspace():
                             l = l.strip()
                             if l[0] != '(':
-                                if state not in _status:
-                                    _status[state] = []
-                                l = l[1:]
                                 if ':' in l:
                                     l = l.split(':')[1]
-                                _status[state] += [l.strip()]
+                                if len(l.strip()) > 0:
+                                    l = l.strip()
+                                    ls = l.split()
+                                    if state != 'unstaged' or ls[0] not in submodules:
+                                        if state not in _status:
+                                            _status[state] = [l]
+                                        else:
+                                            _status[state] += [l]
         return _status
 
     def dirty(self):
         _status = self.status()
+        _status.pop('untracked', None)
+        _status.pop('detached', None)
         return not (len(_status) == 1 and 'branch' in _status)
 
     def valid(self):
@@ -200,13 +227,19 @@ class repo:
         return hash
 
 if __name__ == '__main__':
+    import os.path
     import sys
-    opts = options.load(sys.argv)
+    from . import options
+    defaults = path.join(path.dirname(path.dirname(path.shell(sys.argv[0]))),
+                         'defaults.mc')
+    opts = options.load(sys.argv, defaults = defaults)
     g = repo('.', opts)
-    print(g.git_version())
-    print(g.valid())
-    print(g.status())
-    print(g.clean())
-    print(g.remotes())
-    print(g.email())
-    print(g.head())
+    print('g.git_version():', g.git_version())
+    print('g.valid():', g.valid())
+    print('g.submodules():', g.submodules())
+    print('g.status():', g.status())
+    print('g.status():', g.status(True))
+    print('g.dirty():', g.dirty())
+    print('g.remotes():', g.remotes())
+    print('g.email():', g.email())
+    print('g.head():', g.head())
diff --git a/source-builder/sb/linux.py b/source-builder/sb/linux.py
index e6c5470..d89377b 100644
--- a/source-builder/sb/linux.py
+++ b/source-builder/sb/linux.py
@@ -22,29 +22,15 @@
 # RTEMS project's spec files.
 #
 
+import multiprocessing
+import platform
 import pprint
 import os
 
-import platform
-import execute
-import path
+from . import path
 
 def load():
     uname = os.uname()
-    smp_mflags = ''
-    processors = '/bin/grep processor /proc/cpuinfo'
-    e = execute.capture_execution()
-    exit_code, proc, output = e.shell(processors)
-    ncpus = 0
-    if exit_code == 0:
-        try:
-            for l in output.split('\n'):
-                count = l.split(':')[1].strip()
-                if int(count) > ncpus:
-                    ncpus = int(count)
-        except:
-            pass
-    ncpus = str(ncpus + 1)
     if uname[4].startswith('arm'):
         cpu = 'arm'
     else:
@@ -52,7 +38,7 @@ def load():
 
     version = uname[2]
     defines = {
-        '_ncpus':           ('none',    'none',     ncpus),
+        '_ncpus':           ('none',    'none',     str(multiprocessing.cpu_count())),
         '_os':              ('none',    'none',     'linux'),
         '_host':            ('triplet', 'required', cpu + '-linux-gnu'),
         '_host_vendor':     ('none',    'none',     'gnu'),
@@ -69,13 +55,17 @@ def load():
         '__tar':            ('exe',     'required', '/bin/tar')
         }
 
-    # Works for LSB distros
-    try:
-        distro = platform.dist()[0]
-        distro_ver = float(platform.dist()[1])
-    except ValueError:
-        # Non LSB distro found, use failover"
-        pass
+    # platform.dist() was removed in Python 3.8
+    if hasattr(platform, 'dist'):
+        # Works for LSB distros
+        try:
+            distro = platform.dist()[0]
+            distro_ver = float(platform.dist()[1])
+        except ValueError:
+         # Non LSB distro found, use failover"
+         pass
+    else:
+         distro = ''
 
     # Non LSB - fail over to issue
     if distro == '':
@@ -94,7 +84,7 @@ def load():
             distro = 'redhat'
     elif distro in ['centos', 'fedora']:
         distro = 'redhat'
-    elif distro in ['Ubuntu', 'ubuntu', 'LinuxMint', 'linuxmint']:
+    elif distro in ['Ubuntu', 'ubuntu', 'MX', 'LinuxMint', 'linuxmint']:
         distro = 'debian'
     elif distro in ['Arch']:
         distro = 'arch'
@@ -120,6 +110,12 @@ def load():
                      '__chown':        ('exe',     'required', '/usr/bin/chown') },
         'suse'   : { '__chgrp':        ('exe',     'required', '/usr/bin/chgrp'),
                      '__chown':        ('exe',     'required', '/usr/sbin/chown') },
+        'gentoo' : { '__bzip2':        ('exe',     'required', '/bin/bzip2'),
+                     '__chgrp':        ('exe',     'required', '/bin/chgrp'),
+                     '__chown':        ('exe',     'required', '/bin/chown'),
+                     '__gzip':         ('exe',     'required', '/bin/gzip'),
+                     '__grep':         ('exe',     'required', '/bin/grep'),
+                     '__sed':          ('exe',     'required', '/bin/sed') },
         }
 
     if distro in variations:
diff --git a/source-builder/sb/log.py b/source-builder/sb/log.py
index 301bd96..7ad4763 100755
--- a/source-builder/sb/log.py
+++ b/source-builder/sb/log.py
@@ -1,6 +1,6 @@
 #
 # RTEMS Tools Project (http://www.rtems.org/)
-# Copyright 2010-2012 Chris Johns (chrisj at rtems.org)
+# Copyright 2010-2017 Chris Johns (chrisj at rtems.org)
 # All rights reserved.
 #
 # This file is part of the RTEMS Tools package in 'rtems-testing'.
@@ -26,13 +26,18 @@ from __future__ import print_function
 import os
 import sys
 
-import error
+from . import error
 
 #
 # A global log.
 #
 default = None
 
+#
+# A global capture handler.
+#
+capture = None
+
 #
 # Global parameters.
 #
@@ -70,6 +75,8 @@ def stderr(text = os.linesep, log = None):
     for l in text.replace(chr(13), '').splitlines():
         print(l, file = sys.stderr)
         sys.stderr.flush()
+    if capture is not None:
+        capture(text)
 
 def output(text = os.linesep, log = None):
     if not quiet:
@@ -80,10 +87,12 @@ def notice(text = os.linesep, log = None):
         for l in text.replace(chr(13), '').splitlines():
             print(l)
         sys.stdout.flush()
+        if capture is not None:
+            capture(text)
     _output(text, log)
 
 def trace(text = os.linesep, log = None):
-    if tracing:
+    if not quiet and tracing:
         _output(text, log)
 
 def warning(text = os.linesep, log = None):
@@ -105,7 +114,7 @@ def tail(log = None):
 
 class log:
     """Log output to stdout or a file."""
-    def __init__(self, streams = None, tail_size = 200):
+    def __init__(self, streams = None, tail_size = 400):
         self.tail = []
         self.tail_size = tail_size
         self.fhs = [None, None]
diff --git a/source-builder/sb/macros.py b/source-builder/sb/macros.py
index cf25783..6ca87ed 100644
--- a/source-builder/sb/macros.py
+++ b/source-builder/sb/macros.py
@@ -27,8 +27,8 @@ import re
 import os
 import string
 
-import error
-import path
+from . import error
+from . import path
 
 #
 # Macro tables
@@ -53,7 +53,8 @@ class macros:
         def iterkeys(self):
             return self.keys
 
-    def _unicode_to_str(self, us):
+    @staticmethod
+    def _unicode_to_str(us):
         try:
             if type(us) == unicode:
                 return us.encode('ascii', 'replace')
@@ -122,6 +123,7 @@ class macros:
                 lc = 0
                 for l in ds:
                     lc += 1
+                    l = self._unicode_to_str(l)
                     while len(l):
                         if indent:
                             text += ' %21s %10s %12s' % (' ', ' ', ' ')
@@ -174,14 +176,14 @@ class macros:
                             'override', 'undefine', 'convert']:
             raise TypeError('bad value tuple (attrib field): %s' % (value[1]))
         if value[1] == 'convert':
-            value = self.expand(value)
+            value = (value[0], value[1], self.expand(value[2]))
         self.macros[self.write_map][self.key_filter(key)] = value
 
     def __delitem__(self, key):
         self.undefine(key)
 
     def __contains__(self, key):
-        return self.has_key(key)
+        return self.has_key(self._unicode_to_str(key))
 
     def __len__(self):
         return len(list(self.keys()))
@@ -201,6 +203,7 @@ class macros:
         return sorted(set(keys))
 
     def has_key(self, key):
+        key = self._unicode_to_str(key)
         if type(key) is not str:
             raise TypeError('bad key type (want str): %s' % (type(key)))
         if self.key_filter(key) not in list(self.keys()):
@@ -230,7 +233,7 @@ class macros:
         return [rm[5:] for rm in self.read_maps]
 
     def key_filter(self, key):
-        if key.startswith('%{') and key[-1] is '}':
+        if key.startswith('%{') and key[-1] == '}':
             key = key[2:-1]
         return key.lower()
 
@@ -266,28 +269,28 @@ class macros:
                     print(']]]]]]]] c:%s(%d) s:%s t:"%s" m:%r M:%s' % \
                         (c, ord(c), state, token, macro, map))
                 l_remaining = l_remaining[1:]
-                if c is '#' and not state.startswith('value'):
+                if c == '#' and not state.startswith('value'):
                     break
                 if c == '\n' or c == '\r':
-                    if not (state is 'key' and len(token) == 0) and \
+                    if not (state == 'key' and len(token) == 0) and \
                             not state.startswith('value-multiline'):
                         raise error.general('malformed macro line:%d: %s' % (lc, l))
-                if state is 'key':
+                if state == 'key':
                     if c not in string.whitespace:
-                        if c is '[':
+                        if c == '[':
                             state = 'map'
-                        elif c is '%':
+                        elif c == '%':
                             state = 'directive'
-                        elif c is ':':
+                        elif c == ':':
                             macro += [token]
                             token = ''
                             state = 'attribs'
-                        elif c is '#':
+                        elif c == '#':
                             break
                         else:
                             token += c
-                elif state is 'map':
-                    if c is ']':
+                elif state == 'map':
+                    if c == ']':
                         if token not in macros:
                             macros[token] = {}
                         map = token
@@ -297,7 +300,7 @@ class macros:
                         token += c
                     else:
                         raise error.general('invalid macro map:%d: %s' % (lc, l))
-                elif state is 'directive':
+                elif state == 'directive':
                     if c in string.whitespace:
                         if token == 'include':
                             self.load(_clean(l_remaining))
@@ -308,7 +311,7 @@ class macros:
                         token += c
                     else:
                         raise error.general('invalid macro directive:%d: %s' % (lc, l))
-                elif state is 'include':
+                elif state == 'include':
                     if c is string.whitespace:
                         if token == 'include':
                             state = 'include'
@@ -316,49 +319,49 @@ class macros:
                         token += c
                     else:
                         raise error.general('invalid macro directive:%d: %s' % (lc, l))
-                elif state is 'attribs':
+                elif state == 'attribs':
                     if c not in string.whitespace:
-                        if c is ',':
+                        if c == ',':
                             macro += [token]
                             token = ''
                             if len(macro) == 3:
                                 state = 'value-start'
                         else:
                             token += c
-                elif state is 'value-start':
-                    if c is "'":
+                elif state == 'value-start':
+                    if c == "'":
                         state = 'value-line-start'
-                elif state is 'value-line-start':
-                    if c is "'":
+                elif state == 'value-line-start':
+                    if c == "'":
                         state = 'value-multiline-start'
                     else:
                         state = 'value-line'
                         token += c
-                elif state is 'value-multiline-start':
-                    if c is "'":
+                elif state == 'value-multiline-start':
+                    if c == "'":
                         state = 'value-multiline'
                     else:
                         macro += [token]
                         state = 'macro'
-                elif state is 'value-line':
-                    if c is "'":
+                elif state == 'value-line':
+                    if c == "'":
                         macro += [token]
                         state = 'macro'
                     else:
                         token += c
-                elif state is 'value-multiline':
-                    if c is "'":
+                elif state == 'value-multiline':
+                    if c == "'":
                         state = 'value-multiline-end'
                     else:
                         token += c
-                elif state is 'value-multiline-end':
-                    if c is "'":
+                elif state == 'value-multiline-end':
+                    if c == "'":
                         state = 'value-multiline-end-end'
                     else:
                         state = 'value-multiline'
                         token += "'" + c
-                elif state is 'value-multiline-end-end':
-                    if c is "'":
+                elif state == 'value-multiline-end-end':
+                    if c == "'":
                         macro += [token]
                         state = 'macro'
                     else:
@@ -366,7 +369,7 @@ class macros:
                         token += "''" + c
                 else:
                     raise error.internal('bad state: %s' % (state))
-                if state is 'macro':
+                if state == 'macro':
                     macros[map][self._unicode_to_str(macro[0].lower())] = \
                                 (self._unicode_to_str(macro[1]),
                                  self._unicode_to_str(macro[2]),
@@ -452,6 +455,7 @@ class macros:
 
     def expand(self, _str):
         """Simple basic expander of config file macros."""
+        _str = self._unicode_to_str(_str)
         expanded = True
         while expanded:
             expanded = False
diff --git a/source-builder/sb/mailer.py b/source-builder/sb/mailer.py
index b9ef812..ff25df5 100644
--- a/source-builder/sb/mailer.py
+++ b/source-builder/sb/mailer.py
@@ -27,9 +27,9 @@ import os
 import smtplib
 import socket
 
-import error
-import options
-import path
+from . import error
+from . import options
+from . import path
 
 def append_options(opts):
     opts['--mail'] = 'Send email report or results.'
@@ -92,9 +92,13 @@ class mail:
         from_addr = self.from_address()
         msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n" % \
             (from_addr, to_addr, subject) + body
+        if type(to_addr) is str:
+            to_addr = to_addr.split(',')
+        if type(to_addr) is not list:
+            raise error.general('invalid to_addr type')
         try:
             s = smtplib.SMTP(self.smtp_host())
-            s.sendmail(from_addr, [to_addr], msg)
+            s.sendmail(from_addr, to_addr, msg)
         except smtplib.SMTPException as se:
             raise error.general('sending mail: %s' % (str(se)))
         except socket.error as se:
diff --git a/source-builder/sb/netbsd.py b/source-builder/sb/netbsd.py
index c7420c3..3398290 100644
--- a/source-builder/sb/netbsd.py
+++ b/source-builder/sb/netbsd.py
@@ -27,8 +27,8 @@
 import pprint
 import os
 
-import check
-import execute
+from . import check
+from . import execute
 
 def load():
     uname = os.uname()
diff --git a/source-builder/sb/options.py b/source-builder/sb/options.py
index 7d1856b..d6bffd0 100644
--- a/source-builder/sb/options.py
+++ b/source-builder/sb/options.py
@@ -29,16 +29,17 @@ import pprint
 import re
 import os
 import string
-
-import error
-import execute
-import git
-import log
-import macros
-import path
 import sys
 
-import version
+from . import download
+from . import error
+from . import execute
+from . import git
+from . import log
+from . import macros
+from . import path
+from . import sources
+from . import version
 
 basepath = 'sb'
 
@@ -53,12 +54,13 @@ class command_line:
 
     def __init__(self, argv, optargs, _defaults, command_path):
         self._long_opts = {
-            # key                 macro                handler            param  defs   init
+            # key                       macro                handler            param  defs   init
             '--prefix'               : ('_prefix',           self._lo_path,     True,  None,  False),
             '--topdir'               : ('_topdir',           self._lo_path,     True,  None,  False),
             '--configdir'            : ('_configdir',        self._lo_path,     True,  None,  False),
             '--builddir'             : ('_builddir',         self._lo_path,     True,  None,  False),
             '--sourcedir'            : ('_sourcedir',        self._lo_path,     True,  None,  False),
+            '--patchdir'             : ('_patchdir',         self._lo_path,     True,  None,  False),
             '--tmppath'              : ('_tmppath',          self._lo_path,     True,  None,  False),
             '--jobs'                 : ('_jobs',             self._lo_jobs,     True,  'max', True),
             '--log'                  : ('_logfile',          self._lo_string,   True,  None,  False),
@@ -82,6 +84,9 @@ class command_line:
             '--host'                 : ('_host',             self._lo_triplets, True,  None,  False),
             '--build'                : ('_build',            self._lo_triplets, True,  None,  False),
             '--target'               : ('_target',           self._lo_triplets, True,  None,  False),
+            '--rtems-tools'          : ('_rtems_tools',      self._lo_string,   True,  None,  False),
+            '--rtems-bsp'            : ('_rtems_bsp',        self._lo_string,   True,  None,  False),
+            '--rtems-version'        : ('_rtems_version',    self._lo_string,   True,  None,  False),
             '--help'                 : (None,                self._lo_help,     False, None,  False)
             }
 
@@ -95,7 +100,9 @@ class command_line:
         for lo in self._long_opts:
             self.opts[lo[2:]] = self._long_opts[lo][3]
             if self._long_opts[lo][4]:
-                self.defaults[self._long_opts[lo][0]] = ('none', 'none', self._long_opts[lo][3])
+                self.defaults[self._long_opts[lo][0]] = ('none',
+                                                         'none',
+                                                         self._long_opts[lo][3])
 
     def __str__(self):
         def _dict(dd):
@@ -194,7 +201,7 @@ class command_line:
 
     def help(self):
         print('%s: [options] [args]' % (self.command_name))
-        print('RTEMS Source Builder, an RTEMS Tools Project (c) 2012-2015 Chris Johns')
+        print('RTEMS Source Builder, an RTEMS Tools Project (c) 2012-2019 Chris Johns')
         print('Options and arguments:')
         print('--force                : Force the build to proceed')
         print('--quiet                : Quiet output (not used)')
@@ -214,6 +221,7 @@ class command_line:
         print('--configdir path       : Path to the configuration directory, default: ./config')
         print('--builddir path        : Path to the build directory, default: ./build')
         print('--sourcedir path       : Path to the source directory, default: ./source')
+        print('--patchdir path        : Path to the patches directory, default: ./patches')
         print('--tmppath path         : Path to the temp directory, default: ./tmp')
         print('--macros file[,[file]  : Macro format files to load after the defaults')
         print('--log file             : Log file where all build out is written too')
@@ -235,20 +243,22 @@ class command_line:
         raise error.exit()
 
     def process(self):
+        for a in self.args:
+            if a == '-?' or a == '--help':
+                self.help()
         arg = 0
         while arg < len(self.args):
             a = self.args[arg]
-            if a == '-?':
-                self.help()
-            elif a.startswith('--'):
-                los = a.split('=')
+            if a.startswith('--'):
+                los = a.split('=', 1)
                 lo = los[0]
                 if lo in self._long_opts:
                     long_opt = self._long_opts[lo]
                     if len(los) == 1:
                         if long_opt[2]:
                             if arg == len(self.args) - 1:
-                                raise error.general('option requires a parameter: %s' % (lo))
+                                raise error.general('option requires a parameter: %s' % \
+                                                    (lo))
                             arg += 1
                             value = self.args[arg]
                         else:
@@ -262,8 +272,14 @@ class command_line:
                             value = los[1]
                         else:
                             value = '1'
-                        self.defaults[los[0][2:].replace('-', '_').lower()] = ('none', 'none', value)
+                        self.defaults[los[0][2:].replace('-', '_').lower()] = \
+                            ('none', 'none', value)
+                    else:
+                        if lo not in self.optargs:
+                            raise error.general('unknown option: %s' % (lo))
             else:
+                if a.startswith('-'):
+                    raise error.general('short options not supported; only "-?"')
                 self.opts['params'].append(a)
             arg += 1
 
@@ -276,8 +292,9 @@ class command_line:
                               '--with-download',
                               '--quiet',
                               '--without-log',
-                              '--without-error-report',
-                              '--without-release-url']
+                              '--without-error-report']
+            if a == '--dry-run':
+                self.args += ['--without-error-report']
             arg += 1
 
     def post_process(self, logfile = True):
@@ -302,7 +319,9 @@ class command_line:
         # Default prefix
         prefix = self.parse_args('--prefix')
         if prefix is None:
-            value = path.join(self.defaults['_prefix'], 'rtems', str(version.version()))
+            value = path.join(self.defaults['_prefix'],
+                              'rtems',
+                              str(self.defaults['rtems_version']))
             self.opts['prefix'] = value
             self.defaults['_prefix'] = value
         # Manage the regression option
@@ -326,7 +345,8 @@ class command_line:
         if um:
             checked = path.exists(um)
             if False in checked:
-                raise error.general('macro file not found: %s' % (um[checked.index(False)]))
+                raise error.general('macro file not found: %s' % \
+                                    (um[checked.index(False)]))
             for m in um:
                 self.defaults.load(m)
         # Check if the user has a private set of macros to load
@@ -341,7 +361,7 @@ class command_line:
     def sb_released(self):
         if version.released():
             self.defaults['rsb_released'] = '1'
-        self.defaults['rsb_version'] = version.str()
+        self.defaults['rsb_version'] = version.string()
 
     def sb_git(self):
         repo = git.repo(self.defaults.expand('%{_sbdir}'), self)
@@ -409,10 +429,13 @@ class command_line:
         _host = self.defaults.expand('%{_host}')
         _build = self.defaults.expand('%{_build}')
         _target = self.defaults.expand('%{_target}')
-        if len(_target):
-            return len(_host) and len(_build) and (_target) and \
-                _host != _build and _host != _target
-        return len(_host) and len(_build) and _host != _build
+        #
+        # The removed fix has been put back. I suspect
+        # this was done as a result of another issue that
+        # has been fixed.
+        #
+        return len(_target) and len(_host) and len(_build) \
+            and _host != _build and _host != _target
 
     def user_macros(self):
         #
@@ -474,7 +497,7 @@ class command_line:
                 lhs = None
                 rhs = None
                 if '=' in self.args[a]:
-                    eqs = self.args[a].split('=')
+                    eqs = self.args[a].split('=', 1)
                     lhs = eqs[0]
                     if len(eqs) > 2:
                         rhs = '='.join(eqs[1:])
@@ -573,6 +596,12 @@ class command_line:
             if self.get_arg('--with-tools') is not None:
                 raise error.general('--rtems-tools and --with-tools cannot be used together')
             self.args.append('--with-tools=%s' % (rtems_tools[1]))
+        rtems_version = self.parse_args('--rtems-version')
+        if rtems_version is None:
+            rtems_version = str(version.version())
+        else:
+            rtems_version = rtems_version[1]
+        self.defaults['rtems_version'] = rtems_version
         rtems_arch_bsp = self.parse_args('--rtems-bsp')
         if rtems_arch_bsp is not None:
             if self.get_arg('--target') is not None:
@@ -580,11 +609,6 @@ class command_line:
             ab = rtems_arch_bsp[1].split('/')
             if len(ab) != 2:
                 raise error.general('invalid --rtems-bsp option')
-            rtems_version = self.parse_args('--rtems-version')
-            if rtems_version is None:
-                rtems_version = version.version()
-            else:
-                rtems_version = rtems_version[1]
             self.args.append('--target=%s-rtems%s' % (ab[0], rtems_version))
             self.args.append('--with-rtems-bsp=%s' % (ab[1]))
 
@@ -602,7 +626,7 @@ def load(args, optargs = None, defaults = '%{_sbdir}/defaults.mc', logfile = Tru
     #
     # The path to this command.
     #
-    command_path = path.dirname(args[0])
+    command_path = path.dirname(path.abspath(args[0]))
     if len(command_path) == 0:
         command_path = '.'
 
@@ -619,7 +643,7 @@ def load(args, optargs = None, defaults = '%{_sbdir}/defaults.mc', logfile = Tru
     overrides = None
     if os.name == 'nt':
         try:
-            import windows
+            from . import windows
             overrides = windows.load()
             host_windows = True
             host_posix = False
@@ -629,26 +653,26 @@ def load(args, optargs = None, defaults = '%{_sbdir}/defaults.mc', logfile = Tru
         uname = os.uname()
         try:
             if uname[0].startswith('MINGW64_NT'):
-                import windows
+                from . import windows
                 overrides = windows.load()
                 host_windows = True
             elif uname[0].startswith('CYGWIN_NT'):
-                import windows
+                from . import windows
                 overrides = windows.load()
             elif uname[0] == 'Darwin':
-                import darwin
+                from . import darwin
                 overrides = darwin.load()
             elif uname[0] == 'FreeBSD':
-                import freebsd
+                from . import freebsd
                 overrides = freebsd.load()
             elif uname[0] == 'NetBSD':
-                import netbsd
+                from . import netbsd
                 overrides = netbsd.load()
             elif uname[0] == 'Linux':
-                import linux
+                from . import linux
                 overrides = linux.load()
             elif uname[0] == 'SunOS':
-                import solaris
+                from . import solaris
                 overrides = solaris.load()
         except error.general as ge:
             raise error.general('failed to load %s host support: %s' % (uname[0], ge))
@@ -671,14 +695,30 @@ def load(args, optargs = None, defaults = '%{_sbdir}/defaults.mc', logfile = Tru
     #
     # Load the release settings
     #
-    version.load_release_settings(o.defaults)
-
+    def setting_error(msg):
+        raise error.general(msg)
+    hashes = version.load_release_settings('hashes')
+    for hash in hashes:
+        hs = hash[1].split()
+        if len(hs) != 2:
+            raise error.general('invalid release hash in VERSION')
+        sources.hash((hs[0], hash[0], hs[1]), o.defaults, setting_error)
+    release_path = version.load_release_setting('version', 'release_path',
+                                                raw = True)
+    if release_path is not None:
+        try:
+            release_path = ','.join([rp.strip() for rp in release_path.split(',')])
+        except:
+            raise error.general('invalid release path in VERSION')
+        download.set_release_path(release_path, o.defaults)
     return o
 
 def run(args):
     try:
-        _opts = load(args = args, defaults = 'defaults.mc')
-        log.notice('RTEMS Source Builder - Defaults, %s' % (version.str()))
+        dpath = path.dirname(args[0])
+        _opts = load(args = args,
+                     defaults = path.join(dpath, 'defaults.mc'))
+        log.notice('RTEMS Source Builder - Defaults, %s' % (version.string()))
         _opts.log_info()
         log.notice('Options:')
         log.notice(str(_opts))
diff --git a/source-builder/sb/path.py b/source-builder/sb/path.py
index b86df0d..b27cf14 100644
--- a/source-builder/sb/path.py
+++ b/source-builder/sb/path.py
@@ -1,6 +1,6 @@
 #
 # RTEMS Tools Project (http://www.rtems.org/)
-# Copyright 2010-2016 Chris Johns (chrisj at rtems.org)
+# Copyright 2010-2018 Chris Johns (chrisj at rtems.org)
 # All rights reserved.
 #
 # This file is part of the RTEMS Tools package in 'rtems-tools'.
@@ -25,15 +25,18 @@
 
 from __future__ import print_function
 
-import log
 import os
 import shutil
 import stat
 import string
+import sys
 
-import error
+from . import error
+from . import log
 
+windows_posix = sys.platform == 'msys'
 windows = os.name == 'nt'
+
 win_maxpath = 254
 
 def host(path):
@@ -53,19 +56,16 @@ def host(path):
                 path = u'\\'.join([u'\\\\?', path])
     return path
 
-def is_abspath(path):
-    if path is not None:
-        return '/' == path[0]
-    return False
-
 def shell(path):
+    if isinstance(path, bytes):
+        path = path.decode('ascii')
     if path is not None:
-        if windows:
-            path = path.encode('ascii', 'ignore')
+        if windows or windows_posix:
+            path = path.encode('ascii', 'ignore').decode('ascii')
             if path.startswith('\\\\?\\'):
                 path = path[4:]
             if len(path) > 1 and path[1] == ':':
-                path = '/%s%s' % (path[0], path[2:])
+                path = '/%s%s' % (path[0].lower(), path[2:])
             path = path.replace('\\', '/')
         while '//' in path:
             path = path.replace('//', '/')
@@ -79,6 +79,11 @@ def dirname(path):
     path = shell(path)
     return shell(os.path.dirname(path))
 
+def is_abspath(path):
+    if path is not None and len(path) > 0:
+        return '/' == path[0]
+    return False
+
 def join(path, *args):
     path = shell(path)
     for arg in args:
@@ -218,7 +223,6 @@ def removeall(path):
         _remove_node(path)
 
 def expand(name, paths):
-    path = shell(path)
     l = []
     for p in paths:
         l += [join(shell(p), name)]
@@ -305,6 +309,58 @@ def copy_tree(src, dst):
         else:
             raise error.general('copying tree (4): %s -> %s: %s' % (hsrc, hdst, str(why)))
 
+def get_size(path, depth = -1):
+    #
+    # Get the size the directory tree manually to the required depth.
+    # This makes sure on Windows the files are correctly encoded to avoid
+    # the file name size limit. On Windows the os.walk fails once we
+    # get to the max path length on Windows.
+    #
+    def _isdir(path):
+        hpath = host(path)
+        return os.path.isdir(hpath) and not os.path.islink(hpath)
+
+    def _node_size(path):
+        hpath = host(path)
+        size = 0
+        if not os.path.islink(hpath):
+            size = os.path.getsize(hpath)
+        return size
+
+    def _get_size(path, depth, level = 0):
+        level += 1
+        dirs = []
+        size = 0
+        for name in listdir(path):
+            path_ = join(path, shell(name))
+            hname = host(path_)
+            if _isdir(path_):
+                dirs += [shell(name)]
+            else:
+                size += _node_size(path_)
+        if depth < 0 or level < depth:
+            for name in dirs:
+                dir = join(path, name)
+                size += _get_size(dir, depth, level)
+        return size
+
+    path = shell(path)
+    hpath = host(path)
+    size = 0
+
+    if os.path.exists(hpath):
+        size = _get_size(path, depth)
+
+    return size
+
+def get_humanize_size(path, depth = -1):
+    size = get_size(path, depth)
+    for unit in ['','K','M','G','T','P','E','Z']:
+        if abs(size) < 1024.0:
+            return "%5.3f%sB" % (size, unit)
+        size /= 1024.0
+    return "%.3f%sB" % (size, 'Y')
+
 if __name__ == '__main__':
     print(host('/a/b/c/d-e-f'))
     print(host('//a/b//c/d-e-f'))
@@ -312,6 +368,10 @@ if __name__ == '__main__':
     print(basename('/as/sd/df/fg/me.txt'))
     print(dirname('/as/sd/df/fg/me.txt'))
     print(join('/d', 'g', '/tyty/fgfg'))
+    print('size of . depth all: ', get_size('.'))
+    print('size of . depth   1: ', get_size('.', 1))
+    print('size of . depth   2: ', get_size('.', 2))
+    print('size of . as human : ', get_humanize_size('.'))
     windows = True
     print(host('/a/b/c/d-e-f'))
     print(host('//a/b//c/d-e-f'))
diff --git a/source-builder/sb/pkgconfig.py b/source-builder/sb/pkgconfig.py
index 519e858..198ec80 100755
--- a/source-builder/sb/pkgconfig.py
+++ b/source-builder/sb/pkgconfig.py
@@ -43,7 +43,7 @@ import re
 import shlex
 import sys
 
-import path
+from . import path
 
 def default_prefix(common = True):
     paths = []
@@ -91,6 +91,7 @@ class package(object):
     no_dup_flags = ['-I', '-l', '-L']
     dual_opts = ['-D', '-U', '-I', '-l', '-L']
     lib_list_splitter = re.compile('[\s,]+')
+    loaded_prefixes = None
     loaded = {}
 
     @staticmethod
@@ -101,6 +102,22 @@ class package(object):
         dst.fields = copy.copy(src.fields)
         dst.nodes = copy.copy(src.nodes)
 
+    @staticmethod
+    def _is_string(us):
+        if type(us) == str:
+            return True
+        try:
+            if type(us) == unicode:
+                return True
+        except:
+            pass
+        try:
+            if type(us) == bytes:
+                return True
+        except:
+            pass
+        return False
+
     @staticmethod
     def is_version(v):
         for n in v.split('.'):
@@ -213,7 +230,8 @@ class package(object):
             prefix = default_prefix()
         if prefix:
             self._log('prefix: %s' % (prefix))
-            if type(prefix) is str:
+            if self._is_string(prefix):
+                prefix = str(prefix)
                 self.prefix = []
                 for p in prefix.split(os.pathsep):
                     self.prefix += [path.shell(p)]
@@ -416,15 +434,15 @@ class package(object):
         return ok
 
     def load(self, name):
-        if name in package.loaded:
-            package._copy(package.loaded[name], self)
-            return
         self._log('loading: %s' % (name))
         if self.name_:
             self._clean()
         self.name_ = name
         file = self._find_package(name)
         if file:
+            if file in package.loaded:
+                package._copy(package.loaded[file], self)
+                return
             self._log('load: %s (%s)' % (name, file))
             if self.src:
                 self.src('==%s%s' % ('=' * 80, os.linesep))
@@ -474,8 +492,9 @@ class package(object):
             if requires:
                 for r in package.splitter(requires):
                     if r[0] not in self.nodes[nt]:
-                        if r[0] in package.loaded:
-                            pkg = package.loaded[r[0]]
+                        file = self._find_package(r[0])
+                        if file in package.loaded:
+                            pkg = package.loaded[file]
                         else:
                             pkg = package(r[0], self.prefix, self.output)
                         ver = pkg.get('version')
@@ -486,8 +505,8 @@ class package(object):
                             self._log('failed: %s (%s %s %s)' % (r[0], ver, r[1], r[2]))
                             self.nodes['failed'][r[0]] = pkg
         if self.exists():
-            self._log('load: exists')
-            package.loaded[name] = self
+            self._log('load: exists and loaded; cache as loaded')
+            package.loaded[self.file_] = self
 
     def get(self, label, private = True):
         self._log('get: %s (%s)' % (label, ','.join(self.fields)))
diff --git a/source-builder/sb/reports.py b/source-builder/sb/reports.py
index 9d3a342..a20b29e 100644
--- a/source-builder/sb/reports.py
+++ b/source-builder/sb/reports.py
@@ -30,27 +30,22 @@ import datetime
 import os
 import sys
 
-import pprint
-pp = pprint.PrettyPrinter(indent = 2)
-
 try:
-    import build
-    import check
-    import config
-    import error
-    import git
-    import log
-    import options
-    import path
-    import setbuilder
-    import sources
-    import version
+    from . import build
+    from . import check
+    from . import config
+    from . import error
+    from . import git
+    from . import log
+    from . import options
+    from . import path
+    from . import sources
+    from . import version
 except KeyboardInterrupt:
     print('user terminated', file = sys.stderr)
     sys.exit(1)
 except:
-    print('error: unknown application load error', file = sys.stderr)
-    sys.exit(1)
+    raise
 
 _line_len = 78
 
@@ -241,13 +236,16 @@ class markdown_formatter(formatter):
             self.line(self._strong('Remotes:'))
             self.line('')
             rc = 1
-            for r in remotes:
-                if 'url' in remotes[r]:
-                    text = remotes[r]['url']
-                else:
-                    text = 'no URL found'
-                self.line('%d. %s: %s' % (rc, r, text))
-                rc += 1
+            if not remotes:
+                self.line('[ remotes removed, contact sender for details ]')
+            else:
+                for r in remotes:
+                    if 'url' in remotes[r]:
+                        text = remotes[r]['url']
+                    else:
+                        text = 'no URL found'
+                    self.line('%d. %s: %s' % (rc, r, text))
+                    rc += 1
             self.line('')
             self.line(self._strong('Status:'))
             self.line('')
@@ -427,14 +425,17 @@ class text_formatter(formatter):
         if valid:
             self.line('%s Remotes:' % (self.cini))
             rc = 0
-            for r in remotes:
-                rc += 1
-                if 'url' in remotes[r]:
-                    text = remotes[r]['url']
-                else:
-                    text = 'no URL found'
-                text = '%s: %s' % (r, text)
-                self.line('%s  %2d: %s' % (self.cini, rc, text))
+            if not remotes:
+                self.line('[ remotes removed, contact sender for details ]')
+            else:
+                for r in remotes:
+                    rc += 1
+                    if 'url' in remotes[r]:
+                        text = remotes[r]['url']
+                    else:
+                        text = 'no URL found'
+                    text = '%s: %s' % (r, text)
+                    self.line('%s  %2d: %s' % (self.cini, rc, text))
             self.line('%s Status:' % (self.cini))
             if dirty:
                 self.line('%s  Repository is dirty' % (self.cini))
@@ -603,7 +604,7 @@ def _merge(_dict, new):
 class report:
     """Report the build details about a package given a config file."""
 
-    def __init__(self, formatter, _configs, opts, macros = None):
+    def __init__(self, formatter, sanitize, _configs, opts, macros = None):
         if type(formatter) == str:
             if formatter == 'text':
                 self.formatter = text_formatter()
@@ -621,6 +622,7 @@ class report:
             self.formatter = formatter
         self.configs = _configs
         self.opts = opts
+        self.sanitize = sanitize
         if macros is None:
             self.macros = opts.defaults
         else:
@@ -645,11 +647,14 @@ class report:
         pass
 
     def release_status(self):
-        self.formatter.release_status(version.str())
+        self.formatter.release_status(version.string())
 
     def git_status(self):
         r = git.repo('.', self.opts, self.macros)
-        self.formatter.git_status(r.valid(), r.dirty(), r.head(), r.remotes())
+        if self.sanitize:
+            self.formatter.git_status(r.valid(), r.dirty(), r.head(), None)
+        else:
+            self.formatter.git_status(r.valid(), r.dirty(), r.head(), r.remotes())
 
     def introduction(self, name, intro_text = None):
         now = datetime.datetime.now().ctime()
@@ -855,6 +860,7 @@ class report:
                 raise error.general('writing output file: %s: %s' % (name, err))
 
     def generate(self, name, tree = None, opts = None, macros = None):
+        from . import setbuilder
         self.buildset_start(name)
         if tree is None:
             tree = self.tree
@@ -889,14 +895,16 @@ class report:
 
 def run(args):
     try:
+        from . import setbuilder
         optargs = { '--list-bsets':   'List available build sets',
                     '--list-configs': 'List available configurations',
                     '--format':       'Output format (text, html, markdown, ini, xml)',
-                    '--output':       'File name to output the report' }
+                    '--output':       'File name to output the report',
+                    '--sanitize':     'Remove Remotes information from report'}
         opts = options.load(args, optargs, logfile = False)
         if opts.get_arg('--output') and len(opts.params()) > 1:
             raise error.general('--output can only be used with a single config')
-        print('RTEMS Source Builder, Reporter, %s' % (version.str()))
+        print('RTEMS Source Builder, Reporter, %s' % (version.string()))
         opts.log_info()
         if not check.host_setup(opts):
             log.warning('forcing build with known host setup problems')
@@ -922,7 +930,10 @@ def run(args):
                     formatter = xml_formatter()
                 else:
                     raise error.general('invalid format: %s' % (format_opt[1]))
-            r = report(formatter, configs, opts)
+            sanitize = False
+            if opts.get_arg('--sanitize'):
+                sanitize = True
+            r = report(formatter, sanitize, configs, opts)
             for _config in opts.params():
                 if output is None:
                     outname = path.splitext(_config)[0] + formatter.ext()
@@ -934,8 +945,6 @@ def run(args):
                     raise error.general('config file not found: %s' % (_config))
                 r.create(config, outname)
             del r
-        else:
-            raise error.general('invalid config type: %s' % (config))
     except error.general as gerr:
         print(gerr)
         sys.exit(1)
diff --git a/source-builder/sb/rtems-build-dep b/source-builder/sb/rtems-build-dep
new file mode 100755
index 0000000..48a0823
--- /dev/null
+++ b/source-builder/sb/rtems-build-dep
@@ -0,0 +1,169 @@
+#! /bin/sh
+#
+# RTEMS Tools Project (http://www.rtems.org/)
+# Copyright 2018 Chris Johns (chrisj at rtems.org)
+# All rights reserved.
+#
+# This file is part of the RTEMS Tools package in 'rtems-tools'.
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+#
+# Host Build Dependence
+#
+# This script finds a file that is part of the compiler's default
+# build environment. The file can be header or a library.
+#
+# Header files:
+#  - Get the list of include directories from the compiler.
+#  - Search the include paths for the header file.
+#
+# Library:
+#  - Ask the compiler to print the library paths, add on any user
+#    paths and search with a wilecard.
+#
+
+set -e
+
+op=
+name=
+includes=
+libraries=
+compile=
+verbose=no
+debug=no
+
+if [ $# -eq 0 ]; then
+    echo 'Usage: rtems-build-dep [-c compiler] [-H header] [-I header-paths]
+                       [-l library] [-L library-paths] [-v] [-d]'
+    exit 2
+fi
+while [ $# -gt 0 ]
+do
+    case "$1"
+    in
+	-c)
+	    if [ $# -eq 1 ]; then
+		echo 'error: no compiler (-c) provided'
+		exit 2
+	    fi
+	    compiler="$2"; shift;
+            shift;;
+	-H)
+	    if [ $# -eq 1 ]; then
+		echo 'error: no header (-H) provided'
+		exit 2
+	    fi
+	    op="header"
+            name="$2"; shift;
+            shift;;
+	-I)
+	    if [ $# -eq 1 ]; then
+		echo 'error: no header path (-I) provided'
+		exit 2
+	    fi
+            includes="${includes} $2"; shift;
+            shift;;
+	-l)
+	    if [ $# -eq 1 ]; then
+		echo 'error: no library (-l) provided'
+		exit 2
+	    fi
+	    op="library"
+            name="$2"; shift;
+            shift;;
+	-L)
+	    if [ $# -eq 1 ]; then
+		echo 'error: no library path (-L) provided'
+		exit 2
+	    fi
+            libraries="$2"; shift;
+            shift;;
+	-v)
+	    verbose=yes
+            shift;;
+	-d)
+	    debug=yes
+            shift;;
+	*)
+	    break;
+    esac
+done
+
+if [ ${debug} = yes ]; then
+    set -x
+fi
+
+if [ -z "${op}" ]; then
+    echo "error: no header or library file to find found."
+    exit 2
+fi
+if [ -z "${compiler}" ]; then
+    echo "error: no compiler provided."
+    exit 2
+fi
+if [ -z "${name}" ]; then
+    echo "error: no name found."
+    exit 2
+fi
+
+#
+# Header file.
+#
+if [ ${op} = "header" ]; then
+    inc_paths=$(echo | LC_ALL=C ${compiler} ${includes} -xc -E -v - 2>&1 | tr -d '\r' | \
+	       awk 'BEGIN {flag=0;} /starts here/{flag=1;next}/End/{flag=0}flag')
+    for p in ${inc_paths}
+    do
+	if [ ${verbose} = yes ]; then
+	    echo "Include: ${p}"
+	fi
+	if [ -f "${p}/${name}" ]; then
+	    echo "found"
+	    exit 0
+	fi
+    done
+    echo "not-found"
+    exit 0
+fi
+
+#
+# Library file
+#
+if [ ${op} = "library" ]; then
+    if [ "${OS}" = "Windows_NT" -a "${OSTYPE}" != "cygwin" ]; then
+	sep=';'
+    else
+	sep=':'
+    fi
+    lib_paths_1=$(LC_ALL=C ${compiler} -print-search-dirs 2>&1 | tr -d '\r' | \
+		      grep libraries | \
+		      sed -e 's/libraries:.*=//' | \
+		      awk 'BEGIN {FS="'${sep}'"} {for (i=0;++i<=NF;) print $i;}')
+    lib_paths_2=$(echo ${libraries} | \
+		      awk 'BEGIN {FS="-L"} {for (i=0;++i<=NF;) if (length($i) > 0) print $i;}')
+    for p in ${lib_paths_1} ${lib_paths_2}
+    do
+	if [ ${verbose} = yes ]; then
+	    echo "Library: ${p}/${name}"
+	fi
+	if ls ${p}/${name} 1> /dev/null 2>&1; then
+	    echo "found"
+	    exit 0
+	fi
+    done
+    echo "not-found"
+    exit 0
+fi
+
+exit 1
diff --git a/source-builder/sb/rtems-check-command b/source-builder/sb/rtems-check-command
new file mode 100755
index 0000000..1cbe5c9
--- /dev/null
+++ b/source-builder/sb/rtems-check-command
@@ -0,0 +1,26 @@
+#! /bin/sh
+#
+# RTEMS Tools Project (http://www.rtems.org/)
+# Copyright 2019 Chris Johns (chrisj at rtems.org)
+#
+# This file is part of the RTEMS Tools package in 'rtems-tools'.
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+#
+# Check is a command exists
+#
+
+set +e
+command -v $*
+exit 0
diff --git a/source-builder/sb/rtemsconfig.py b/source-builder/sb/rtemsconfig.py
index 6ac894e..0ad6c5c 100644
--- a/source-builder/sb/rtemsconfig.py
+++ b/source-builder/sb/rtemsconfig.py
@@ -28,11 +28,11 @@ import sys
 import threading
 import time
 
-import error
-import log
-import options
-import path
-import version
+from . import error
+from . import log
+from . import options
+from . import path
+from . import version
 
 def _collect(path_, file):
     confs = []
@@ -193,7 +193,7 @@ def run(args):
         bsp = bsp_config(opts, prefix, opts.get_arg('--rtems-bsp')[1])
 
         if opts.get_arg('--list'):
-            log.notice('RTEMS Source Builder - RTEMS Configuration, %s' % (version.str()))
+            log.notice('RTEMS Source Builder - RTEMS Configuration, %s' % (version.string()))
             opts.log_info()
             configs = list(bsp.keys())
             for c in sorted(configs.keys()):
diff --git a/source-builder/sb/setbuilder.py b/source-builder/sb/setbuilder.py
index 17b781a..b0e2b23 100644
--- a/source-builder/sb/setbuilder.py
+++ b/source-builder/sb/setbuilder.py
@@ -1,6 +1,6 @@
 #
 # RTEMS Tools Project (http://www.rtems.org/)
-# Copyright 2010-2016 Chris Johns (chrisj at rtems.org)
+# Copyright 2010-2018 Chris Johns (chrisj at rtems.org)
 # All rights reserved.
 #
 # This file is part of the RTEMS Tools package in 'rtems-tools'.
@@ -33,22 +33,30 @@ import sys
 import textwrap
 
 try:
-    import build
-    import check
-    import error
-    import log
-    import mailer
-    import options
-    import path
-    import reports
-    import sources
-    import version
+    from . import build
+    from . import check
+    from . import error
+    from . import log
+    from . import mailer
+    from . import options
+    from . import path
+    from . import reports
+    from . import shell
+    from . import sources
+    from . import version
 except KeyboardInterrupt:
     print('abort: user terminated', file = sys.stderr)
     sys.exit(1)
 except:
-    print('error: unknown application load error', file = sys.stderr)
-    sys.exit(1)
+    raise
+
+def macro_expand(macros, _str):
+    cstr = None
+    while cstr != _str:
+        cstr = _str
+        _str = macros.expand(_str)
+        _str = shell.expand(macros, _str)
+    return _str
 
 class log_capture(object):
     def __init__(self):
@@ -71,21 +79,21 @@ class buildset:
     """Build a set builds a set of packages."""
 
     def __init__(self, bset, _configs, opts, macros = None):
-        log.trace('_bset: %s: init' % (bset))
+        log.trace('_bset:   : %s: init' % (bset))
         self.configs = _configs
         self.opts = opts
         if macros is None:
             self.macros = copy.copy(opts.defaults)
         else:
             self.macros = copy.copy(macros)
-        log.trace('_bset: %s: macro defaults' % (bset))
+        log.trace('_bset:   : %s: macro defaults' % (bset))
         log.trace(str(self.macros))
         self.bset = bset
-        _target = self.macros.expand('%{_target}')
+        _target = macro_expand(self.macros, '%{_target}')
         if len(_target):
             pkg_prefix = _target
         else:
-            pkg_prefix = self.macros.expand('%{_host}')
+            pkg_prefix = macro_expand(self.macros, '%{_host}')
         self.bset_pkg = '%s-%s-set' % (pkg_prefix, self.bset)
         self.mail_header = ''
         self.mail_report = ''
@@ -120,6 +128,25 @@ class buildset:
     def get_mail_report(self):
         return self.mail_report
 
+    def mail_single_report(self):
+        return self.macros.get('%{mail_single_report}') != 0
+
+    def mail_active(self, mail, nesting_count = 1):
+        return mail is not None and not (self.mail_single_report() and nesting_count > 1)
+
+    def mail_send(self, mail):
+        if True: #not self.opts.dry_run():
+            mail_subject = '%s on %s' % (self.bset, self.macros.expand('%{_host}'))
+            if mail['failure'] is not None:
+                mail_subject = 'FAILED %s (%s)' % (mail_subject, mail['failure'])
+            else:
+                mail_subject = 'PASSED %s' % (mail_subject)
+            mail_subject = 'Build %s: %s' % (reports.platform(mode = 'system'),
+                                             mail_subject)
+            body = mail['log']
+            body += (os.linesep * 2).join(mail['reports'])
+            mail['mail'].send(mail['to'], mail_subject, body)
+
     def copy(self, src, dst):
         log.output('copy: %s => %s' % (path.host(src), path.host(dst)))
         if not self.opts.dry_run():
@@ -152,7 +179,7 @@ class buildset:
             else:
                 raise error.general('invalid report format: %s' % (format))
             buildroot = _build.config.abspath('%{buildroot}')
-            prefix = _build.macros.expand('%{_prefix}')
+            prefix = macro_expand(_build.macros, '%{_prefix}')
             name = _build.main_package().name() + ext
             log.notice('reporting: %s -> %s' % (_config, name))
             if not _build.opts.get_arg('--no-report'):
@@ -161,7 +188,7 @@ class buildset:
                     outname = path.host(path.join(outpath, name))
                 else:
                     outname = None
-                r = reports.report(format, self.configs,
+                r = reports.report(format, False, self.configs,
                                    copy.copy(opts), copy.copy(macros))
                 r.introduction(_build.config.file_name())
                 r.generate(_build.config.file_name())
@@ -171,7 +198,7 @@ class buildset:
                     r.write(outname)
                 del r
             if mail:
-                r = reports.report('text', self.configs,
+                r = reports.report('text', True, self.configs,
                                    copy.copy(opts), copy.copy(macros))
                 r.introduction(_build.config.file_name())
                 r.generate(_build.config.file_name())
@@ -182,17 +209,25 @@ class buildset:
     def root_copy(self, src, dst):
         what = '%s -> %s' % \
             (os.path.relpath(path.host(src)), os.path.relpath(path.host(dst)))
-        log.trace('_bset: %s: collecting: %s' % (self.bset, what))
+        log.trace('_bset:   : %s: collecting: %s' % (self.bset, what))
         self.copy(src, dst)
 
-    def install(self, name, buildroot, prefix):
-        dst = prefix
-        src = path.join(buildroot, prefix)
-        log.notice('installing: %s -> %s' % (name, path.host(dst)))
+    def install(self, mode, name, src, dst):
+        log.trace('_bset:   : %s: copy %s -> %s' % (mode, src, dst))
+        log.notice('%s: %s -> %s' % (mode, name, path.host(dst)))
         self.copy(src, dst)
 
+    def install_mode(self):
+        return macro_expand(self.macros, '%{install_mode}')
+
+    def installing(self):
+        return self.install_mode() == 'installing'
+
+    def staging(self):
+        return not self.installing()
+
     def canadian_cross(self, _build):
-        log.trace('_bset: Cxc for build machine: _build => _host')
+        log.trace('_bset:   : Cxc for build machine: _build => _host')
         macros_to_copy = [('%{_host}',        '%{_build}'),
                           ('%{_host_alias}',  '%{_build_alias}'),
                           ('%{_host_arch}',   '%{_build_arch}'),
@@ -204,7 +239,7 @@ class buildset:
                           ('%{_builddir}',    '%{_buildcxcdir}')]
         cxc_macros = _build.copy_init_macros()
         for m in macros_to_copy:
-            log.trace('_bset: Cxc: %s <= %s' % (m[0], cxc_macros[m[1]]))
+            log.trace('_bset:   : Cxc: %s <= %s' % (m[0], cxc_macros[m[1]]))
             cxc_macros[m[0]] = cxc_macros[m[1]]
         _build.set_macros(cxc_macros)
         _build.reload()
@@ -229,12 +264,15 @@ class buildset:
         if (self.opts.get_arg('--bset-tar-file') or self.opts.canadian_cross()) \
            and not _build.macros.get('%{_disable_packaging}'):
             path.mkdir(tardir)
-            tar = path.join(tardir, _build.config.expand('%s.tar.bz2' % (_build.main_package().name())))
+            tar = path.join(tardir,
+                            _build.config.expand('%s.tar.bz2' % \
+                                                 (_build.main_package().name())))
             log.notice('tarball: %s' % (os.path.relpath(path.host(tar))))
             if not self.opts.dry_run():
                 tmproot = _build.config.expand('%{_tmproot}')
                 cmd = _build.config.expand('"cd ' + tmproot + \
-                                               ' && %{__tar} -cf - . | %{__bzip2} > ' + tar + '"')
+                                           ' && %{__tar} -cf - . | %{__bzip2} > ' + \
+                                           tar + '"')
                 _build.run(cmd, shell_opts = '-c', cwd = tmproot)
 
     def parse(self, bset):
@@ -249,7 +287,7 @@ class buildset:
         bsetname = bset
 
         if not path.exists(bsetname):
-            for cp in self.macros.expand('%{_configdir}').split(':'):
+            for cp in macro_expand(self.macros, '%{_configdir}').split(':'):
                 configdir = path.abspath(cp)
                 bsetname = path.join(configdir, bset)
                 if path.exists(bsetname):
@@ -258,7 +296,7 @@ class buildset:
             if bsetname is None:
                 raise error.general('no build set file found: %s' % (bset))
         try:
-            log.trace('_bset: %s: open: %s' % (self.bset, bsetname))
+            log.trace('_bset:   : %s: open: %s' % (self.bset, bsetname))
             bset = open(path.host(bsetname), 'r')
         except IOError as err:
             raise error.general('error opening bset file: %s' % (bsetname))
@@ -272,7 +310,7 @@ class buildset:
                 l = _clean(l)
                 if len(l) == 0:
                     continue
-                log.trace('_bset: %s: %03d: %s' % (self.bset, lc, l))
+                log.trace('_bset:   : %s: %03d: %s' % (self.bset, lc, l))
                 ls = l.split()
                 if ls[0][-1] == ':' and ls[0][:-1] == 'package':
                     self.bset_pkg = ls[1].strip()
@@ -288,8 +326,8 @@ class buildset:
                             self.macros.define(ls[1].strip())
                     elif ls[0] == '%undefine':
                         if len(ls) > 2:
-                            raise error.general('%s:%d: %undefine requires just the name' % \
-                                                    (self.bset, lc))
+                            raise error.general('%s:%d: %undefine requires ' \
+                                                'just the name' % (self.bset, lc))
                         self.macros.undefine(ls[1].strip())
                     elif ls[0] == '%include':
                         configs += self.parse(ls[1].strip())
@@ -301,7 +339,8 @@ class buildset:
                     l = l.strip()
                     c = build.find_config(l, self.configs)
                     if c is None:
-                        raise error.general('%s:%d: cannot find file: %s' % (self.bset, lc, l))
+                        raise error.general('%s:%d: cannot find file: %s' % (self.bset,
+                                                                             lc, l))
                     configs += [c]
         except:
             bset.close()
@@ -320,8 +359,12 @@ class buildset:
         if self.bset.endswith('.cfg'):
             configs = [self.bset]
         else:
-            exbset = self.macros.expand(self.bset)
+            exbset = macro_expand(self.macros, self.bset)
             self.macros['_bset'] = exbset
+            bset_tmp = build.short_name(exbset)
+            if bset_tmp.endswith('.bset'):
+                bset_tmp = bset_tmp[:-5]
+            self.macros['_bset_tmp'] = bset_tmp
             root, ext = path.splitext(exbset)
             if exbset.endswith('.bset'):
                 bset = exbset
@@ -336,30 +379,59 @@ class buildset:
 
         nesting_count += 1
 
-        if mail:
+        if self.mail_active(mail, nesting_count):
             mail['output'].clear()
+            mail['log'] = ''
+            mail['reports'] = []
+            mail['failure'] = None
 
-        log.trace('_bset: %s: make' % (self.bset))
+        log.trace('_bset: %2d: %s: make' % (nesting_count, self.bset))
         log.notice('Build Set: %s' % (self.bset))
 
-        mail_subject = '%s on %s' % (self.bset,
-                                     self.macros.expand('%{_host}'))
-
         current_path = os.environ['PATH']
 
         start = datetime.datetime.now()
 
         mail_report = False
         have_errors = False
+        interrupted = False
 
-        if mail:
-            mail['output'].clear()
+        #
+        # If this is the outter most buildset it's files are installed. Nested
+        # build sets staged their installed file. The staged files are install
+        # when the outtter most build finishes.
+        #
+        if nesting_count != 1:
+            if self.installing():
+                self.macros['install_mode'] = 'staging'
+
+        #
+        # Only the outter build set can have staging to install. Get the staging
+        # root via the config because it could require a valid config.
+        #
+        have_staging = False
 
         try:
             configs = self.load()
 
-            log.trace('_bset: %s: configs: %s'  % (self.bset, ','.join(configs)))
+            log.trace('_bset: %2d: %s: configs: %s'  % (nesting_count,
+                                                        self.bset, ', '.join(configs)))
+
+            if nesting_count == 1 and len(configs) > 1:
+                #
+                # Prepend staging areas, bin directory to the
+                # path. Lets the later package depend on the earlier
+                # ones.
+                #
+                pathprepend = ['%{stagingroot}/bin'] + \
+                    macro_expand(self.macros, '%{_pathprepend}').split(':')
+                pathprepend = [pp for pp in pathprepend if len(pp)]
+                if len(pathprepend) == 1:
+                    self.macros['_pathprepend'] = pathprepend[0]
+                else:
+                    self.macros['_pathprepend'] = ':'.join(pathprepend)
 
+            sizes_valid = False
             builds = []
             for s in range(0, len(configs)):
                 b = None
@@ -372,14 +444,20 @@ class buildset:
                     opts = copy.copy(self.opts)
                     macros = copy.copy(self.macros)
                     if configs[s].endswith('.bset'):
-                        log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75))
+                        log.trace('_bset: %2d: %s %s' % (nesting_count,
+                                                         configs[s],
+                                                         '=' * (74 - len(configs[s]))))
                         bs = buildset(configs[s], self.configs, opts, macros)
                         bs.build(deps, nesting_count, mail)
+                        if self.installing():
+                            have_staging = True
                         del bs
                     elif configs[s].endswith('.cfg'):
                         if mail:
                             mail_report = True
-                        log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75))
+                        log.trace('_bset: %2d: %s %s' % (nesting_count,
+                                                         configs[s],
+                                                         '=' * (74 - len(configs[s]))))
                         try:
                             b = build.build(configs[s],
                                             self.opts.get_arg('--pkg-tar-files'),
@@ -410,8 +488,8 @@ class buildset:
                         #
                         # Dump post build macros.
                         #
-                        log.trace('_bset: macros post-build')
-                        log.trace(str(macros))
+                        log.trace('_bset:   : macros post-build')
+                        log.trace(str(b.macros))
                     else:
                         raise error.general('invalid config type: %s' % (configs[s]))
                 except error.general as gerr:
@@ -434,65 +512,151 @@ class buildset:
                     else:
                         raise
             #
-            # Installing ...
+            # Installing or staging ...
             #
-            log.trace('_bset: installing: deps:%r no-install:%r' % \
-                      (deps is None, self.opts.no_install()))
-            if deps is None \
-               and not self.opts.no_install() \
-               and not have_errors:
+            log.trace('_bset: %2d: %s: deps:%r no-install:%r' % \
+                      (nesting_count, self.install_mode(),
+                       deps is None, self.opts.no_install()))
+            log.trace('_bset: %2d: %s: builds: %s' % \
+                      (nesting_count, self.install_mode(),
+                       ', '.join([b.name() for b in builds])))
+            if deps is None and not self.opts.no_install() and not have_errors:
                 for b in builds:
-                    log.trace('_bset: installing: %r' % b.installable())
+                    log.trace('_bset:   : %s: %r' % (self.install_mode(),
+                                                     b.installable()))
                     if b.installable():
-                        self.install(b.name(),
-                                     b.config.expand('%{buildroot}'),
-                                     b.config.expand('%{_prefix}'))
+                        prefix = b.config.expand('%{_prefix}')
+                        buildroot = path.join(b.config.expand('%{buildroot}'), prefix)
+                        if self.staging():
+                            prefix = b.config.expand('%{stagingroot}')
+                        self.install(self.install_mode(), b.name(), buildroot, prefix)
 
+            #
+            # Sizes ...
+            #
+            if len(builds) > 1:
+                size_build = 0
+                size_installed = 0
+                size_build_max = 0
+                for b in builds:
+                    s = b.get_build_size()
+                    size_build += s
+                    if s > size_build_max:
+                        size_build_max = s
+                    size_installed += b.get_installed_size()
+                size_sources = 0
+                for p in builds[0].config.expand('%{_sourcedir}').split(':'):
+                    size_sources += path.get_size(p)
+                size_patches = 0
+                for p in builds[0].config.expand('%{_patchdir}').split(':'):
+                    size_patches += path.get_size(p)
+                size_total = size_sources + size_patches + size_installed
+                build_max_size_human = build.humanize_number(size_build_max +
+                                                             size_installed, 'B')
+                build_total_size_human = build.humanize_number(size_total, 'B')
+                build_sources_size_human = build.humanize_number(size_sources, 'B')
+                build_patches_size_human = build.humanize_number(size_patches, 'B')
+                build_installed_size_human = build.humanize_number(size_installed, 'B')
+                build_size = 'usage: %s' % (build_max_size_human)
+                build_size += ' total: %s' % (build_total_size_human)
+                build_size += ' (sources: %s' % (build_sources_size_human)
+                build_size += ', patches: %s' % (build_patches_size_human)
+                build_size += ', installed %s)' % (build_installed_size_human)
+                sizes_valid = True
+            #
+            # Cleaning ...
+            #
             if deps is None and \
                     (not self.opts.no_clean() or self.opts.always_clean()):
                 for b in builds:
                     if not b.disabled():
                         log.notice('cleaning: %s' % (b.name()))
                         b.cleanup()
+            #
+            # Log the build size message
+            #
+            if len(builds) > 1:
+                log.notice('Build Sizes: %s' % (build_size))
+            #
+            # Clear out the builds ...
+            #
             for b in builds:
                 del b
+
+            #
+            # If builds have been staged install into the finaly prefix.
+            #
+            if have_staging and not self.opts.no_install() and not have_errors:
+                stagingroot = macro_expand(self.macros, '%{stagingroot}')
+                have_stagingroot = path.exists(stagingroot)
+                log.trace('_bset: %2d: install staging, present: %s' % \
+                          (nesting_count, have_stagingroot))
+                if have_stagingroot:
+                    prefix = macro_expand(self.macros, '%{_prefix}')
+                    self.install(self.install_mode(), self.bset, stagingroot, prefix)
+                    staging_size = path.get_size(stagingroot)
+                    if not self.opts.no_clean() or self.opts.always_clean():
+                        log.notice('clean staging: %s' % (self.bset))
+                        log.trace('removing: %s' % (stagingroot))
+                        if not self.opts.dry_run():
+                            if path.exists(stagingroot):
+                                path.removeall(stagingroot)
+                    log.notice('Staging Size: %s' % \
+                               (build.humanize_number(staging_size, 'B')))
         except error.general as gerr:
             if not build_error:
                 log.stderr(str(gerr))
             raise
         except KeyboardInterrupt:
-            mail_report = False
+            interrupted = True
             raise
         except:
             self.build_failure = 'RSB general failure'
+            interrupted = True
             raise
         finally:
             end = datetime.datetime.now()
             os.environ['PATH'] = current_path
             build_time = str(end - start)
-            if mail_report and not self.macros.defined('mail_disable'):
-                self.write_mail_header('Build Time: %s' % (build_time), True)
-                self.write_mail_header('', True)
+            if self.mail_single_report() and nesting_count == 1:
+                mail_report = True
+            if interrupted or self.macros.defined('mail_disable'):
+                mail_report = False
+            if mail_report and mail is not None:
+                if self.installing():
+                    self.write_mail_header('Build Time: %s' % (build_time), True)
+                    self.write_mail_header('', True)
+                    self.write_mail_header(mail['header'], True)
+                    self.write_mail_header('')
+                    log.notice('Mailing report: %s' % (mail['to']))
+                    mail['log'] += self.get_mail_header()
+                    if sizes_valid:
+                        mail['log'] += 'Sizes' + os.linesep
+                        mail['log'] += '=====' + os.linesep + os.linesep
+                        mail['log'] += \
+                            'Maximum build usage: ' + build_max_size_human + os.linesep
+                        mail['log'] += \
+                            'Total size: ' + build_total_size_human + os.linesep
+                        mail['log'] += \
+                            'Installed : ' + build_installed_size_human + os.linesep
+                        mail['log'] += 'Sources: ' + build_sources_size_human + os.linesep
+                        mail['log'] += 'Patches: ' + build_patches_size_human + os.linesep
+                    mail['log'] += os.linesep
+                    mail['log'] += 'Output' + os.linesep
+                    mail['log'] += '======' + os.linesep + os.linesep
+                    mail['log'] += os.linesep.join(mail['output'].get())
+                    mail['log'] += os.linesep + os.linesep
+                    mail['log'] += 'Report' + os.linesep
+                    mail['log'] += '======' + os.linesep + os.linesep
+                mail['reports'] += [self.get_mail_report()]
                 if self.build_failure is not None:
-                    mail_subject = 'FAILED %s (%s)' % \
-                        (mail_subject, self.build_failure)
-                else:
-                    mail_subject = 'PASSED %s' % (mail_subject)
-                mail_subject = 'Build %s: %s' % (reports.platform(mode = 'system'),
-                                                 mail_subject)
-                self.write_mail_header(mail['header'], True)
-                self.write_mail_header('')
-                log.notice('Mailing report: %s' % (mail['to']))
-                body = self.get_mail_header()
-                body += 'Output' + os.linesep
-                body += '======' + os.linesep + os.linesep
-                body += os.linesep.join(mail['output'].get())
-                body += os.linesep + os.linesep
-                body += 'Report' + os.linesep
-                body += '======' + os.linesep + os.linesep
-                body += self.get_mail_report()
-                if not opts.dry_run():
-                    mail['mail'].send(mail['to'], mail_subject, body)
+                    mail['failure'] = self.build_failure
+                if self.mail_active(mail, nesting_count):
+                    try:
+                        self.mail_send(mail)
+                    except error.general as gerr:
+                        log.notice('Mail Send Failure: %s' % (gerr))
+
             log.notice('Build Set: Time %s' % (build_time))
 
 def list_bset_cfg_files(opts, configs):
@@ -517,6 +681,7 @@ def run():
     try:
         optargs = { '--list-configs':  'List available configurations',
                     '--list-bsets':    'List available build sets',
+                    '--list-configs':  'List available configuration files.',
                     '--list-deps':     'List the dependent files.',
                     '--bset-tar-file': 'Create a build set tar file',
                     '--pkg-tar-files': 'Create package tar files',
@@ -525,15 +690,18 @@ def run():
         mailer.append_options(optargs)
         opts = options.load(sys.argv, optargs)
         if opts.get_arg('--mail'):
-            mail = { 'mail'  : mailer.mail(opts),
-                     'output': log_capture() }
+            mail = { 'mail'   : mailer.mail(opts),
+                     'output' : log_capture(),
+                     'log'    : '',
+                     'reports': [],
+                     'failure': None }
             to_addr = opts.get_arg('--mail-to')
             if to_addr is not None:
                 mail['to'] = to_addr[1]
             else:
-                mail['to'] = opts.defaults.expand('%{_mail_tools_to}')
+                mail['to'] = macro_expand(opts.defaults, '%{_mail_tools_to}')
             mail['from'] = mail['mail'].from_address()
-        log.notice('RTEMS Source Builder - Set Builder, %s' % (version.str()))
+        log.notice('RTEMS Source Builder - Set Builder, %s' % (version.string()))
         opts.log_info()
         if not check.host_setup(opts):
             raise error.general('host build environment is not set up correctly')
@@ -551,7 +719,7 @@ def run():
         else:
             deps = None
         if not list_bset_cfg_files(opts, configs):
-            prefix = opts.defaults.expand('%{_prefix}')
+            prefix = macro_expand(opts.defaults, '%{_prefix}')
             if opts.canadian_cross():
                 opts.disable_install()
 
diff --git a/source-builder/sb/shell.py b/source-builder/sb/shell.py
new file mode 100644
index 0000000..ecb8188
--- /dev/null
+++ b/source-builder/sb/shell.py
@@ -0,0 +1,74 @@
+#
+# RTEMS Tools Project (http://www.rtems.org/)
+# Copyright 2019 Chris Johns (chrisj at rtems.org)
+# All rights reserved.
+#
+# This file is part of the RTEMS Tools package in 'rtems-tools'.
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+#
+# This code builds a package compiler tool suite given a tool set. A tool
+# set lists the various tools. These are specific tool configurations.
+#
+
+from __future__ import print_function
+
+try:
+    from . import error
+    from . import execute
+    from . import log
+    from . import options
+except KeyboardInterrupt:
+    print('abort: user terminated', file = sys.stderr)
+    sys.exit(1)
+except:
+    raise
+
+def expand(macros, line):
+    #
+    # Parse the line and handle nesting '()' pairs.
+    #
+    def _exec(shell_macro):
+        output = ''
+        if len(shell_macro) > 3:
+            e = execute.capture_execution()
+            if options.host_windows:
+                cmd = '%s -c "%s"' % (macros.expand('%{__sh}'), shell_macro[2:-1])
+            else:
+                cmd = shell_macro[2:-1]
+            exit_code, proc, output = e.shell(cmd)
+            log.trace('shell-output: %d %s' % (exit_code, output))
+            if exit_code != 0:
+                raise error.general('shell macro failed: %s: %d: %s' % (cmd,
+                                                                        exit_code,
+                                                                        output))
+        return output
+
+    updating = True
+    while updating:
+        updating = False
+        pos = line.find('%(')
+        if pos >= 0:
+            braces = 0
+            for p in range(pos + 2, len(line)):
+                if line[p] == '(':
+                    braces += 1
+                elif line[p] == ')':
+                    if braces > 0:
+                        braces -= 1
+                    else:
+                        line = line[:pos] + _exec(line[pos:p + 1]) + line[p + 1:]
+                        updating = True
+                        break
+    return line
diff --git a/source-builder/sb/simhost.py b/source-builder/sb/simhost.py
new file mode 100644
index 0000000..1ff98e8
--- /dev/null
+++ b/source-builder/sb/simhost.py
@@ -0,0 +1,656 @@
+#
+# RTEMS Tools Project (http://www.rtems.org/)
+# Copyright 2010-2020 Chris Johns (chrisj at rtems.org)
+# All rights reserved.
+#
+# This file is part of the RTEMS Tools package in 'rtems-tools'.
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+#
+# This code builds a package compiler tool suite given a tool set. A tool
+# set lists the various tools. These are specific tool configurations.
+#
+
+from __future__ import print_function
+
+import copy
+import datetime
+import os
+
+try:
+    from . import build
+    from . import check
+    from . import error
+    from . import git
+    from . import log
+    from . import macros
+    from . import path
+    from . import sources
+    from . import version
+except KeyboardInterrupt:
+    print('abort: user terminated', file = sys.stderr)
+    sys.exit(1)
+except:
+    raise
+
+#
+# Define host profiles so it can simulated on another host.
+#
+profiles = {
+    'darwin':  { '_os':              ('none',    'none',     'darwin'),
+                 '_host':            ('triplet', 'required', 'x86_64-apple-darwin18.5.0'),
+                 '_host_vendor':     ('none',    'none',     'apple'),
+                 '_host_os':         ('none',    'none',     'darwin'),
+                 '_host_os_version': ('none',    'none',     '18.5.0'),
+                 '_host_cpu':        ('none',    'none',     'x86_64'),
+                 '_host_alias':      ('none',    'none',     '%{nil}'),
+                 '_host_arch':       ('none',    'none',     'x86_64'),
+                 '_usr':             ('dir',     'optional', '/usr/local'),
+                 '_var':             ('dir',     'optional', '/usr/local/var') },
+    'freebsd': { '_os':              ('none',    'none',     'freebsd'),
+                 '_host':            ('triplet', 'required', 'x86_64-freebsd12.0-RELEASE-p3'),
+                 '_host_vendor':     ('none',    'none',     'pc'),
+                 '_host_os':         ('none',    'none',     'freebsd'),
+                 '_host_os_version': ('none',    'none',     '12.0-RELEASE-p3'),
+                 '_host_cpu':        ('none',    'none',     'x86_64'),
+                 '_host_alias':      ('none',    'none',     '%{nil}'),
+                 '_host_arch':       ('none',    'none',     'x86_64'),
+                 '_usr':             ('dir',     'optional', '/usr/local'),
+                 '_var':             ('dir',     'optional', '/usr/local/var') },
+    'linux':   { '_os':              ('none',    'none',     'linux'),
+                 '_host':            ('triplet', 'required', 'x86_64-linux-gnu'),
+                 '_host_vendor':     ('none',    'none',     'gnu'),
+                 '_host_os':         ('none',    'none',     'linux'),
+                 '_host_os_version': ('none',    'none',     '4.18.0-16'),
+                 '_host_cpu':        ('none',    'none',     'x86_64'),
+                 '_host_alias':      ('none',    'none',     '%{nil}'),
+                 '_host_arch':       ('none',    'none',     'x86_64'),
+                 '_usr':             ('dir',     'optional', '/usr/local'),
+                 '_var':             ('dir',     'optional', '/usr/local/var') },
+    'netbsd':  { '_os':              ('none',    'none',     'netbsd'),
+                 '_host':            ('triplet', 'required', 'x86_64-netbsd8.0'),
+                 '_host_vendor':     ('none',    'none',     'pc'),
+                 '_host_os':         ('none',    'none',     'netbsd'),
+                 '_host_os_version': ('none',    'none',     '8.0'),
+                 '_host_cpu':        ('none',    'none',     'x86_64'),
+                 '_host_alias':      ('none',    'none',     '%{nil}'),
+                 '_host_arch':       ('none',    'none',     'x86_64'),
+                 '_usr':             ('dir',     'optional', '/usr/local'),
+                 '_var':             ('dir',     'optional', '/usr/local/var') },
+    'solaris': { '_os':              ('none',    'none',     'solaris'),
+                 '_host':            ('triplet', 'required', 'x86_64-pc-solaris2'),
+                 '_host_vendor':     ('none',    'none',     'pc'),
+                 '_host_os':         ('none',    'none',     'solaris'),
+                 '_host_os_version': ('none',    'none',     '2'),
+                 '_host_cpu':        ('none',    'none',     'x86_64'),
+                 '_host_alias':      ('none',    'none',     '%{nil}'),
+                 '_host_arch':       ('none',    'none',     'x86_64'),
+                 '_usr':             ('dir',     'optional', '/usr/local'),
+                 '_var':             ('dir',     'optional', '/usr/local/var') },
+    'win32':   { '_os':              ('none',    'none',     'win32'),
+                 '_windows_os':      ('none',    'none',     'mingw32'),
+                 '_host':            ('triplet', 'required', 'x86_64-w64-mingw32'),
+                 '_host_vendor':     ('none',    'none',     'pc'),
+                 '_host_os':         ('none',    'none',     'win32'),
+                 '_host_os_version': ('none',    'none',     '10'),
+                 '_host_cpu':        ('none',    'none',     'x86_64'),
+                 '_host_alias':      ('none',    'none',     '%{nil}'),
+                 '_host_arch':       ('none',    'none',     'x86_64'),
+                 '_usr':             ('dir',     'optional', '/usr/local'),
+                 '_var':             ('dir',     'optional', '/usr/local/var') },
+    'cygwin':  { '_os':              ('none',    'none',     'win32'),
+                 '_windows_os':      ('none',    'none',     'cygwin'),
+                 '_host':            ('triplet', 'required', 'x86_64-w64-cygwin'),
+                 '_host_vendor':     ('none',    'none',     'microsoft'),
+                 '_host_os':         ('none',    'none',     'win32'),
+                 '_host_os_version': ('none',    'none',     '10'),
+                 '_host_cpu':        ('none',    'none',     'x86_64'),
+                 '_host_alias':      ('none',    'none',     '%{nil}'),
+                 '_host_arch':       ('none',    'none',     'x86_64'),
+                 '_usr':             ('dir',     'optional', '/usr/local'),
+                 '_var':             ('dir',     'optional', '/usr/local/var') },
+}
+
+
+class log_capture(object):
+    def __init__(self):
+        self.log = []
+        log.capture = self.capture
+
+    def __str__(self):
+        return os.linesep.join(self.log)
+
+    def capture(self, text):
+        self.log += [l for l in text.replace(chr(13), '').splitlines()]
+
+    def get(self):
+        return self.log
+
+    def clear(self):
+        self.log = []
+
+def find_bset_config(bset_config, macros):
+    '''Find the build set or config file using the macro config defined path.'''
+    name = bset_config
+    if not path.exists(name):
+        for cp in macros.expand('%{_configdir}').split(':'):
+            configdir = path.abspath(cp)
+            name = path.join(configdir, bset_config)
+            if path.exists(name):
+                break
+            name = None
+        if name is None:
+            raise error.general('no build set file found: %s' % (bset_config))
+    return name
+
+#
+# A skinny options command line class to get the configs to load.
+#
+class options(object):
+    def __init__(self, argv, argopts, defaults, extras):
+        command_path = path.dirname(path.abspath(argv[0]))
+        if len(command_path) == 0:
+            command_path = '.'
+        self.command_path = command_path
+        self.command_name = path.basename(argv[0])
+        extras += ['--dry-run',
+                   '--quiet',
+                   '--without-log',
+                   '--without-error-report',
+                   '--without-release-url']
+        self.argv = argv
+        self.args = argv[1:] + extras
+        self.defaults = macros.macros(name = defaults,
+                                      sbdir = command_path)
+        self.load_overrides()
+        self.opts = { 'params' :  extras }
+        self.sb_git()
+        self.rtems_bsp()
+        if 'download_dir' in argopts and argopts.download_dir is not None:
+            self.defaults['_sourcedir'] = ('dir',
+                                           'optional',
+                                           path.abspath(argopts.download_dir))
+            self.defaults['_patchdir'] = ('dir',
+                                          'optional',
+                                          path.abspath(argopts.download_dir))
+
+    def load_overrides(self):
+        overrides = None
+        if os.name == 'nt':
+            try:
+                from . import windows
+                overrides = windows.load()
+                host_windows = True
+                host_posix = False
+            except:
+                raise error.general('failed to load Windows host support')
+        elif os.name == 'posix':
+            uname = os.uname()
+            try:
+                if uname[0].startswith('MINGW64_NT'):
+                    from . import windows
+                    overrides = windows.load()
+                    host_windows = True
+                elif uname[0].startswith('CYGWIN_NT'):
+                    from . import windows
+                    overrides = windows.load()
+                elif uname[0] == 'Darwin':
+                    from . import darwin
+                    overrides = darwin.load()
+                elif uname[0] == 'FreeBSD':
+                    from . import freebsd
+                    overrides = freebsd.load()
+                elif uname[0] == 'NetBSD':
+                    from . import netbsd
+                    overrides = netbsd.load()
+                elif uname[0] == 'Linux':
+                    from . import linux
+                    overrides = linux.load()
+                elif uname[0] == 'SunOS':
+                    from . import solaris
+                    overrides = solaris.load()
+            except error.general as ge:
+                raise error.general('failed to load %s host support: %s' % (uname[0], ge))
+            except:
+                raise error.general('failed to load %s host support' % (uname[0]))
+        else:
+            raise error.general('unsupported host type; please add')
+        if overrides is None:
+            raise error.general('no hosts defaults found; please add')
+        for k in overrides:
+            self.defaults[k] = overrides[k]
+
+    def parse_args(self, arg, error = True, extra = True):
+        for a in range(0, len(self.args)):
+            if self.args[a].startswith(arg):
+                lhs = None
+                rhs = None
+                if '=' in self.args[a]:
+                    eqs = self.args[a].split('=')
+                    lhs = eqs[0]
+                    if len(eqs) > 2:
+                        rhs = '='.join(eqs[1:])
+                    else:
+                        rhs = eqs[1]
+                elif extra:
+                    lhs = self.args[a]
+                    a += 1
+                    if a < len(self.args):
+                        rhs = self.args[a]
+                return [lhs, rhs]
+            a += 1
+        return None
+
+    def rtems_bsp(self):
+        self.defaults['rtems_version'] = str(version.version())
+        self.defaults['_target'] = 'arch-rtems'
+        self.defaults['rtems_host'] = 'rtems-arch'
+        self.defaults['with_rtems_bsp'] = 'rtems-bsp'
+
+    def sb_git(self):
+        repo = git.repo(self.defaults.expand('%{_sbdir}'), self)
+        repo_mail = None
+        if repo.valid():
+            repo_valid = '1'
+            repo_head = repo.head()
+            repo_clean = not repo.dirty()
+            repo_remotes = '%{nil}'
+            remotes = repo.remotes()
+            if 'origin' in remotes:
+                repo_remotes = '%s/origin' % (remotes['origin']['url'])
+                repo_id = repo_head
+            if not repo_clean:
+                repo_id += '-modified'
+                repo_mail = repo.email()
+        else:
+            repo_valid = '0'
+            repo_head = '%{nil}'
+            repo_clean = '%{nil}'
+            repo_remotes = '%{nil}'
+            repo_id = 'no-repo'
+        self.defaults['_sbgit_valid'] = repo_valid
+        self.defaults['_sbgit_head']  = repo_head
+        self.defaults['_sbgit_clean'] = str(repo_clean)
+        self.defaults['_sbgit_remotes'] = str(repo_remotes)
+        self.defaults['_sbgit_id']    = repo_id
+        if repo_mail is not None:
+            self.defaults['_sbgit_mail'] = repo_mail
+
+    def get_arg(self, arg):
+        if self.optargs is None or arg not in self.optargs:
+            return None
+        return self.parse_args(arg)
+
+    def with_arg(self, label, default = 'not-found'):
+        # the default if there is no option for without.
+        result = default
+        for pre in ['with', 'without']:
+            arg_str = '--%s-%s' % (pre, label)
+            arg_label = '%s_%s' % (pre, label)
+            arg = self.parse_args(arg_str, error = False, extra = False)
+            if arg is not None:
+                if arg[1] is  None:
+                    result = 'yes'
+                else:
+                    result = arg[1]
+                break
+        return [arg_label, result]
+
+    def dry_run(self):
+        return True
+
+    def keep_going(self):
+        return False
+
+    def quiet(self):
+            return True
+
+    def no_clean(self):
+        return True
+
+    def always_clean(self):
+        return False
+
+    def no_install(self):
+        return True
+
+    def download_disabled(self):
+        return False
+
+    def disable_install(self):
+        return True
+
+    def urls(self):
+        return None
+
+    def info(self):
+        s = ' Command Line: %s%s' % (' '.join(self.argv), os.linesep)
+        s += ' Python: %s' % (sys.version.replace('\n', ''))
+        return s
+
+class buildset:
+    """Build a set builds a set of packages."""
+
+    def __init__(self, bset, _configs, opts, macros = None):
+        log.trace('_bset: %s: init' % (bset))
+        self.parent = 'root'
+        self._includes = []
+        self._errors = []
+        self.configs = _configs
+        self.opts = opts
+        if macros is None:
+            self.macros = copy.copy(opts.defaults)
+        else:
+            self.macros = copy.copy(macros)
+        self.macros.define('_rsb_getting_source')
+        log.trace('_bset: %s: macro defaults' % (bset))
+        log.trace(str(self.macros))
+        self.bset = bset
+        _target = self.macros.expand('%{_target}')
+        if len(_target):
+            pkg_prefix = _target
+        else:
+            pkg_prefix = self.macros.expand('%{_host}')
+        self.bset_pkg = '%s-%s-set' % (pkg_prefix, self.bset)
+        self.build_failure = None
+
+    def _add_includes(self, includes, parent = None):
+        if parent is None:
+            parent = self.parent
+        if not isinstance(includes, list):
+            includes = [includes]
+        self._includes += [i + ':' + parent for i in includes]
+
+    def _rebase_includes(self, includes, parent):
+        if not isinstance(includes, list):
+            includes = [includes]
+        rebased = []
+        for i in includes:
+            if i.split(':', 2)[1] == 'root':
+                rebased += [i.split(':', 2)[0] + ':' + parent]
+            else:
+                rebased += [i]
+        return rebased
+
+    def includes(self):
+        return sorted(list(set(self._includes)))
+
+    def errors(self):
+        return sorted(list(set(self._errors)))
+
+    def build_package(self, _config, _build):
+        if not _build.disabled():
+            _build.make()
+
+    def parse(self, bset):
+
+        #
+        # Ouch, this is a copy of the setbuilder.py code.
+        #
+
+        def _clean(line):
+            line = line[0:-1]
+            b = line.find('#')
+            if b >= 0:
+                line = line[1:b]
+            return line.strip()
+
+        bsetname = find_bset_config(bset, self.macros)
+
+        try:
+            log.trace('_bset: %s: open: %s' % (self.bset, bsetname))
+            bsetf = open(path.host(bsetname), 'r')
+        except IOError as err:
+            raise error.general('error opening bset file: %s' % (bsetname))
+
+        self._add_includes(bsetname)
+        parent = self.parent
+        self.parent = bsetname
+
+        configs = []
+
+        try:
+            lc = 0
+            for l in bsetf:
+                lc += 1
+                l = _clean(l)
+                if len(l) == 0:
+                    continue
+                log.trace('_bset: %s: %03d: %s' % (self.bset, lc, l))
+                ls = l.split()
+                if ls[0][-1] == ':' and ls[0][:-1] == 'package':
+                    self.bset_pkg = ls[1].strip()
+                    self.macros['package'] = self.bset_pkg
+                elif ls[0][0] == '%':
+                    def err(msg):
+                        raise error.general('%s:%d: %s' % (self.bset, lc, msg))
+                    if ls[0] == '%define':
+                        if len(ls) > 2:
+                            self.macros.define(ls[1].strip(),
+                                               ' '.join([f.strip() for f in ls[2:]]))
+                        else:
+                            self.macros.define(ls[1].strip())
+                    elif ls[0] == '%undefine':
+                        if len(ls) > 2:
+                            raise error.general('%s:%d: %undefine requires just the name' \
+                                                % (self.bset, lc))
+                        self.macros.undefine(ls[1].strip())
+                    elif ls[0] == '%include':
+                        configs += self.parse(ls[1].strip())
+                    elif ls[0] in ['%patch', '%source']:
+                        sources.process(ls[0][1:], ls[1:], self.macros, err)
+                    elif ls[0] == '%hash':
+                        sources.hash(ls[1:], self.macros, err)
+                else:
+                    l = l.strip()
+                    c = build.find_config(l, self.configs)
+                    if c is None:
+                        raise error.general('%s:%d: cannot find file: %s'
+                                            % (self.bset, lc, l))
+                    configs += [c + ':' + self.parent]
+        finally:
+            bsetf.close()
+            self.parent = parent
+
+        return configs
+
+    def load(self):
+        #
+        # If the build set file ends with .cfg the user has passed to the
+        # buildset builder a configuration so we just return it.
+        #
+        if self.bset.endswith('.cfg'):
+            self._add_includes(self.bset)
+            configs = [self.bset]
+        else:
+            exbset = self.macros.expand(self.bset)
+            self.macros['_bset'] = exbset
+            self.macros['_bset_tmp'] = build.short_name(exbset)
+            root, ext = path.splitext(exbset)
+            if exbset.endswith('.bset'):
+                bset = exbset
+            else:
+                bset = '%s.bset' % (exbset)
+            configs = self.parse(bset)
+        return configs
+
+    def set_host_details(self, host, opts, macros):
+        if host not in profiles:
+            raise error.general('invalid host: ' + host)
+        for m in profiles[host]:
+            opts.defaults[m] = profiles[host][m]
+            macros[m] = profiles[host][m]
+        macros_to_copy = [('%{_build}',        '%{_host}'),
+                          ('%{_build_alias}',  '%{_host_alias}'),
+                          ('%{_build_arch}',   '%{_host_arch}'),
+                          ('%{_build_cpu}',    '%{_host_cpu}'),
+                          ('%{_build_os}',     '%{_host_os}'),
+                          ('%{_build_vendor}', '%{_host_vendor}')]
+        for m in macros_to_copy:
+            opts.defaults[m[0]] = opts.defaults[m[1]]
+            macros[m[0]] = macros[m[1]]
+        #
+        # Look for a valid cc and cxx.
+        #
+        for cc in ['/usr/bin/cc', '/usr/bin/clang', '/usr/bin/gcc']:
+            if check.check_exe(cc, cc):
+                opts.defaults['__cc'] = cc
+                macros['__cc'] = cc
+                break
+        if not macros.defined('__cc'):
+            raise error.general('no valid cc found')
+        for cxx in ['/usr/bin/c++', '/usr/bin/clang++', '/usr/bin/g++']:
+            if check.check_exe(cxx, cxx):
+                opts.defaults['__cxx'] = cxx
+                macros['__cxx'] = cxx
+        if not macros.defined('__cxx'):
+            raise error.general('no valid c++ found')
+
+    def build(self, host, nesting_count = 0):
+
+        build_error = False
+
+        nesting_count += 1
+
+        log.trace('_bset: %s for %s: make' % (self.bset, host))
+        log.notice('Build Set: %s for %s' % (self.bset, host))
+
+        mail_subject = '%s on %s' % (self.bset,
+                                     self.macros.expand('%{_host}'))
+
+        current_path = os.environ['PATH']
+
+        start = datetime.datetime.now()
+
+        have_errors = False
+
+        try:
+            configs = self.load()
+
+            log.trace('_bset: %s: configs: %s'  % (self.bset, ','.join(configs)))
+
+            sizes_valid = False
+            builds = []
+            for s in range(0, len(configs)):
+                bs = None
+                b = None
+                try:
+                    #
+                    # Each section of the build set gets a separate set of
+                    # macros so we do not contaminate one configuration with
+                    # another.
+                    #
+                    opts = copy.copy(self.opts)
+                    macros = copy.copy(self.macros)
+                    self.set_host_details(host, opts, macros)
+                    config, parent = configs[s].split(':', 2)
+                    if config.endswith('.bset'):
+                        log.trace('_bset: == %2d %s' % (nesting_count + 1, '=' * 75))
+                        bs = buildset(config, self.configs, opts, macros)
+                        bs.build(host, nesting_count)
+                        self._includes += \
+                            self._rebase_includes(bs.includes(), parent)
+                        del bs
+                    elif config.endswith('.cfg'):
+                        log.trace('_bset: -- %2d %s' % (nesting_count + 1, '-' * 75))
+                        try:
+                            b = build.build(config,
+                                            False,
+                                            opts,
+                                            macros)
+                            self._includes += \
+                                self._rebase_includes(b.includes(), parent)
+                        except:
+                            build_error = True
+                            raise
+                        self.build_package(config, b)
+                        builds += [b]
+                        #
+                        # Dump post build macros.
+                        #
+                        log.trace('_bset: macros post-build')
+                        log.trace(str(macros))
+                    else:
+                        raise error.general('invalid config type: %s' % (config))
+                except error.general as gerr:
+                    have_errors = True
+                    if b is not None:
+                        if self.build_failure is None:
+                            self.build_failure = b.name()
+                        self._includes += b.includes()
+                    self._errors += [find_bset_config(config, opts.defaults) + ':' + parent] + self._includes
+                    raise
+            #
+            # Clear out the builds ...
+            #
+            for b in builds:
+                del b
+        except error.general as gerr:
+            if not build_error:
+                log.stderr(str(gerr))
+            raise
+        except KeyboardInterrupt:
+            raise
+        except:
+            self.build_failure = 'RSB general failure'
+            raise
+        finally:
+            end = datetime.datetime.now()
+            os.environ['PATH'] = current_path
+            build_time = str(end - start)
+            log.notice('Build Set: Time %s' % (build_time))
+
+def list_hosts():
+    hosts = sorted(profiles.keys())
+    max_os_len = max(len(h) for h in hosts)
+    max_host_len = max(len(profiles[h]['_host'][2]) for h in hosts)
+    for h in hosts:
+        print('%*s: %-*s %s' % (max_os_len, h, max_host_len,
+                                profiles[h]['_host'][2],
+                                profiles[h]['_host'][2]))
+
+def get_files(configs, ext, localpath):
+    files = []
+    if localpath:
+        for cp in configs['localpaths']:
+            files += [c for c in configs[cp] if c.endswith(ext)]
+    else:
+        files = [c for c in configs['files'] if c.endswith(ext)]
+    return files
+
+def get_config_files(configs, localpath = False):
+    return get_files(configs, '.cfg', localpath)
+
+def get_bset_files(configs, localpath = False):
+    return get_files(configs, '.bset', localpath)
+
+def get_root(configs):
+    return configs['root']
+
+def list_bset_files(opts, configs):
+    for p in configs['paths']:
+        print('Examining: %s' % (os.path.relpath(p)))
+    for b in get_bset_files(configs):
+        print(' %s' % (b[:b.rfind('.')]))
+
+def load_log(logfile):
+    log.default = log.log(streams = [logfile])
+
+def log_default(name):
+    return 'rsb-log-%s-%s.txt' % (name, datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))
+
+def load_options(argv, argopts, defaults = '%{_sbdir}/defaults.mc', extras = []):
+    opts = options(argv, argopts, defaults, extras)
+    opts.defaults['rtems_version'] = str(argopts.rtems_version)
+    return opts
diff --git a/source-builder/sb/solaris.py b/source-builder/sb/solaris.py
index 8d9498a..231fd34 100644
--- a/source-builder/sb/solaris.py
+++ b/source-builder/sb/solaris.py
@@ -25,9 +25,9 @@
 import pprint
 import os
 
-import check
-import error
-import execute
+from . import check
+from . import error
+from . import execute
 
 def load():
     uname = os.uname()
diff --git a/source-builder/sb/sources.py b/source-builder/sb/sources.py
index fb6d819..1d62cb7 100644
--- a/source-builder/sb/sources.py
+++ b/source-builder/sb/sources.py
@@ -21,7 +21,7 @@
 # Manage sources and patches
 #
 
-import log
+from . import log
 
 def _args(args):
     return [i for s in [ii.split() for ii in args] for i in s]
@@ -34,23 +34,29 @@ def add(label, args, macros, error):
     if len(args) < 2:
         error('%%%s requires at least 2 arguments' % (label))
     _map = '%s-%s' % (label, args[0])
+    _value = ' '.join(args[1:])
     macros.create_map(_map)
     index = 0
     while True:
         key = _make_key(label, index)
         if key not in macros.map_keys(_map):
             break
+        macros.set_read_map(_map)
+        value = macros.get_value(key)
+        macros.unset_read_map(_map)
+        if value == _value:
+            error('%%%s duplicate add: %s' % (label, _value))
         index += 1
     macros.set_write_map(_map)
-    macros.define(key, ' '.join(args[1:]))
+    macros.define(key, _value)
     macros.unset_write_map()
     return None
 
 def set(label, args, macros, error):
     args = _args(args)
     if len(args) < 2:
-        error('%%%s requires at least 2 arguments' % (label))
-        return
+        error('%%%s set requires at least 2 arguments' % (label))
+        return []
     _map = '%s-%s' % (label, args[0])
     macros.create_map(_map)
     key = _make_key(label, 0)
@@ -63,12 +69,26 @@ def set(label, args, macros, error):
 def setup(label, args, macros, error):
     args = _args(args)
     if len(args) < 2:
-        error('%%%s requires at least 2 arguments: %s' % (label, ' '.join(args)))
+        error('%%%s setup requires at least 2 arguments: %s' % (label, ' '.join(args)))
     ss = '%%setup %s %s' % (label, ' '.join(args))
     _map = '%s-%s' % (label, args[0])
     if 'setup' in macros.map_keys(_map):
         error('%%%s already setup source: %s' % (label, ' '.join(args)))
-        return
+        return []
+    macros.set_write_map(_map)
+    macros.define('setup', ss)
+    macros.unset_write_map()
+    return [ss]
+
+def download(label, args, macros, error):
+    args = _args(args)
+    if len(args) != 1:
+        error('%%%s download requires 1 argument: %s' % (label, ' '.join(args)))
+    ss = '%%setup %s %s -g' % (label, ' '.join(args))
+    _map = '%s-%s' % (label, args[0])
+    if 'setup' in macros.map_keys(_map):
+        error('%%%s already setup source: %s' % (label, ' '.join(args)))
+        return []
     macros.set_write_map(_map)
     macros.define('setup', ss)
     macros.unset_write_map()
@@ -79,15 +99,14 @@ def process(label, args, macros, error):
         error('invalid source type: %s' % (label))
     args = _args(args)
     log.trace('sources: %s' % (' '.join(args)))
-    if len(args) < 3:
-        error('%%%s requires at least 3 arguments: %s' % (label, ' '.join(args)))
-        return
     if args[0] == 'set':
         return set(label, args[1:], macros, error)
     elif args[0] == 'add':
         return add(label, args[1:], macros, error)
     elif args[0] == 'setup':
         return setup(label, args[1:], macros, error)
+    elif args[0] == 'download':
+        return download(label, args[1:], macros, error)
     error('invalid %%%s command: %s' % (label, args[0]))
 
 def hash(args, macros, error):
@@ -97,13 +116,17 @@ def hash(args, macros, error):
         return
     _map = 'hashes'
     _file = macros.expand(args[1])
-    if _file in macros.map_keys(_map):
-        error('hash already set: %s' % (args[1]))
-        return
-    macros.create_map(_map)
-    macros.set_write_map(_map)
-    macros.define(_file, '%s %s' % (args[0], args[2]))
-    macros.unset_write_map()
+    new_value = '%s %s' % (args[0], args[2])
+    existing_value = get_hash(_file, macros)
+    if existing_value is not None:
+        if existing_value != new_value:
+            error('conflicting hash definitions for: %s' % (args[1]))
+            return
+    else:
+        macros.create_map(_map)
+        macros.set_write_map(_map)
+        macros.define(_file, new_value)
+        macros.unset_write_map()
     return None
 
 def get(label, name, macros, error):
diff --git a/source-builder/sb/track.py b/source-builder/sb/track.py
new file mode 100644
index 0000000..cf33a00
--- /dev/null
+++ b/source-builder/sb/track.py
@@ -0,0 +1,250 @@
+#
+# RTEMS Tools Project (http://www.rtems.org/)
+# Copyright 2020 Chris Johns (chrisj at rtems.org)
+# All rights reserved.
+#
+# This file is part of the RTEMS Tools package in 'rtems-tools'.
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+#
+# This code builds a package compiler tool suite given a tool set. A tool
+# set lists the various tools. These are specific tool configurations.
+#
+
+from __future__ import print_function
+
+import argparse
+import copy
+import datetime
+import os
+import sys
+
+try:
+    from . import build
+    from . import error
+    from . import git
+    from . import log
+    from . import simhost
+    from . import version
+except KeyboardInterrupt:
+    print('abort: user terminated', file = sys.stderr)
+    sys.exit(1)
+except:
+    raise
+
+def unique(l):
+    return sorted(list(set(l)))
+
+def filter_deps(deps, ext):
+    rdeps = []
+    for d in deps:
+        ds = d.split(':', 2)
+        if ds[0].endswith(ext):
+            rdeps += [ds[0] + ':' + ds[1]]
+    return sorted(rdeps)
+
+def normalise_paths(includes, root):
+    normalised = []
+    for inc in unique(includes):
+        config, parent = inc.split(':', 2)
+        if config.startswith(root):
+            config = config[len(root):]
+        if parent.startswith(root):
+            parent = parent[len(root):]
+        normalised += [config + ':' + parent]
+    return normalised
+
+def process_dependencies(includes):
+    deps = {}
+    incs = [i.split(':', 2) for i in includes]
+    for config, parent in incs:
+        if parent not in deps:
+            deps[parent] = []
+        for inc in incs:
+            if inc[1] == parent:
+                deps[parent] += [inc[0]]
+    for d in deps:
+        deps[d] = unique(deps[d])
+    return deps
+
+def includes_str(includes):
+    o = []
+    deps = [i.split(':', 2) for i in includes]
+    ll = max([len(d[1]) for d in deps])
+    for d in deps:
+        o += ['%*s %s' % (ll, d[1], d[0])]
+    return o
+
+def deps_str(deps):
+    def print_node(deps, node, level = 0, prefix = '', indent = ''):
+        o = []
+        if node != 'root':
+            level += 1
+            if level == 1:
+                o += ['']
+            o += [prefix + '+-- ' +  node]
+        if node in deps:
+            prefix += indent
+            for c, child in enumerate(deps[node], start = 1):
+                if c < len(deps[node]) and level > 1:
+                    indent = '|    '
+                else:
+                    indent = '     '
+                o += print_node(deps, child, level, prefix, indent)
+        return o
+    return print_node(deps, 'root')
+
+def run(args = sys.argv):
+    ec = 0
+    output = []
+    try:
+        #
+        # The RSB options support cannot be used because it loads the defaults
+        # for the host which we cannot do here.
+        #
+        description  = 'RTEMS Track Dependencies a build set has for all hosts.'
+
+        argsp = argparse.ArgumentParser(prog = 'sb-dep-check',
+                                        description = description)
+        argsp.add_argument('--rtems-version', help = 'Set the RTEMS version.',
+                           type = str,
+                           default = version.version())
+        argsp.add_argument('--list-hosts', help = 'List the hosts.',
+                           action = 'store_true')
+        argsp.add_argument('--list-bsets', help = 'List the hosts.',
+                           action = 'store_true')
+        argsp.add_argument('--output', help = 'Output file.',
+                           type = str,
+                           default = None)
+        argsp.add_argument('--log', help = 'Log file.',
+                           type = str,
+                           default = simhost.log_default('trackdeps'))
+        argsp.add_argument('--trace', help = 'Enable trace logging for debugging.',
+                           action = 'store_true')
+        argsp.add_argument('--not-referenced',
+                           help = 'Write out the list of config files not referenced.',
+                           action = 'store_true')
+        argsp.add_argument('bsets', nargs='*', help = 'Build sets.')
+
+        argopts = argsp.parse_args(args[1:])
+
+        simhost.load_log(argopts.log)
+        log.notice('RTEMS Source Builder - Track Dependencies, %s' % (version.string()))
+        log.tracing = argopts.trace
+
+        opts = simhost.load_options(args, argopts, extras = ['---keep-going'])
+        configs = build.get_configs(opts)
+
+        if argopts.list_hosts:
+            simhost.list_hosts()
+        elif argopts.list_bsets:
+            simhost.list_bset_files(opts, configs)
+        else:
+            all_bsets = simhost.get_bset_files(configs)
+            if len(argopts.bsets) == 0:
+                bsets = all_bsets
+            else:
+                bsets = argopts.bsets
+            includes = []
+            errors = []
+            for bset in bsets:
+                b = None
+                try:
+                    for host in simhost.profiles:
+                        b = simhost.buildset(bset, configs, opts)
+                        b.build(host)
+                        includes += b.includes()
+                        errors += b.errors()
+                        del b
+                except error.general as gerr:
+                    log.stderr(str(gerr))
+                    log.stderr('Build FAILED')
+                    if b:
+                        includes += b.includes()
+                        errors += b.errors()
+                b = None
+            root = simhost.get_root(configs)
+            all_configs = simhost.get_config_files(configs, True)
+            includes = normalise_paths(includes, root)
+            bsets = filter_deps(includes, '.bset')
+            configs = filter_deps(includes, '.cfg')
+            deps_tree = deps_str(process_dependencies(bsets + configs))
+            bsets = unique([b.split(':', 2)[0] for b in bsets])
+            configs = unique([i.split(':', 2)[0] for i in configs])
+            not_used_configs = [c for c in all_configs if c not in configs]
+            if len(errors) > 0:
+                errors = [e.split(':', 2)[0] for e in normalise_paths(errors, root)]
+                errs = []
+                for e in errors:
+                    if e not in bsets + configs:
+                        errs += [e]
+                errors = errs
+            output = ['RSB Dependency Tracker',
+                      '',
+                      'Total buildsets: %d' % (len(all_bsets)),
+                      'Total configs: %d' % (len(all_configs)),
+                      '']
+            if len(errors) > 0:
+                output += ['Errored File Set (%d):' % (len(errors)),
+                           ''] + \
+                           errors + \
+                           ['']
+            if len(configs) > 0:
+                output += ['Include Tree(s):',
+                           ''] + \
+                           deps_tree + \
+                           ['']
+            if len(bsets) > 0:
+                output += ['Buildsets (%d):' % (len(bsets)),
+                           ''] + \
+                           bsets + \
+                           ['']
+            if len(configs) > 0:
+                output += ['Configurations (%d):' % (len(configs)),
+                           ''] + \
+                           configs + \
+                           ['']
+            if argopts.not_referenced and len(not_used_configs) > 0:
+                output += ['Not referenced (%d): ' % (len(not_used_configs)),
+                           ''] + \
+                           not_used_configs
+            output = os.linesep.join(output)
+            if argopts.output:
+                o = open(argopts.output, "w")
+                o.write(output)
+                o.close
+            else:
+                print()
+                print(output)
+    except error.general as gerr:
+        log.stderr(str(gerr))
+        log.stderr('Build FAILED')
+        ec = 1
+    except error.internal as ierr:
+        log.stderr(str(ierr))
+        log.stderr('Internal Build FAILED')
+        ec = 1
+    except error.exit as eerr:
+        pass
+    except KeyboardInterrupt:
+        log.notice('abort: user terminated')
+        ec = 1
+    except:
+        raise
+        log.notice('abort: unknown error')
+        ec = 1
+    sys.exit(ec)
+
+if __name__ == "__main__":
+    run()
diff --git a/source-builder/sb/version.py b/source-builder/sb/version.py
index ec6bde7..cfc5e1f 100644
--- a/source-builder/sb/version.py
+++ b/source-builder/sb/version.py
@@ -1,36 +1,84 @@
 #
 # RTEMS Tools Project (http://www.rtems.org/)
-# Copyright 2010-2016 Chris Johns (chrisj at rtems.org)
+# Copyright 2010-2018 Chris Johns (chrisj at rtems.org)
 # All rights reserved.
 #
 # This file is part of the RTEMS Tools package in 'rtems-tools'.
 #
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
 #
-# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 
 #
-# To release the RSB create a git archive and then add a suitable VERSION file
-# to the top directory.
+# Releasing RTEMS Tools
+# ---------------------
+#
+# Format:
+#
+#  The format is INI. The file requires a `[version`] section and a `revision`
+#  option:
+#
+#   [version]
+#   revision = <version-string>
+#
+#  The `<version-string>` has the `version` and `revision` delimited by a
+#  single `.`. An example file is:
+#
+#   [version]
+#   revision = 5.0.not_released
+#
+#  where the `version` is `5` and the revision is `0` and the package is not
+#  released. The label `not_released` is reversed to mean the package is not
+#  released. A revision string can contain extra characters after the
+#  `revision` number for example `5.0-rc1` or is deploying a package
+#  `5.0-nasa-cfs`
+#
+#  Packages can optionally add specialised sections to a version configuration
+#  files. These can be accessed via the:
+#
+#   load_release_settings: Return the items in a section
+#   load_release_setting: Return an item from a section
+#
+# User deployment:
+#
+#  Create a git archive and then add a suitable VERSION file to the top
+#  directory of the package. The package assumes your python executable is
+#  location in `bin` directory which is one below the top of the package's
+#  install prefix.
+#
+# Notes:
+#
+#  This module uses os.apth for paths and assumes all paths are in the host
+#  format.
 #
 
 from __future__ import print_function
 
+import os
 import sys
 
-import download
-import error
-import git
-import path
-import sources
+from . import error
+from . import git
+from . import path
 
 #
 # Default to an internal string.
@@ -40,6 +88,7 @@ _revision = 'not_released'
 _version_str = '%s.%s' % (_version, _revision)
 _released = False
 _git = False
+_is_loaded = False
 
 def _top():
     top = path.dirname(sys.argv[0])
@@ -48,83 +97,147 @@ def _top():
     return top
 
 def _load_released_version_config():
+    '''Local worker to load a configuration file.'''
     top = _top()
-    for ver in [top, '..']:
-        if path.exists(path.join(ver, 'VERSION')):
+    for ver in [path.join(top, 'VERSION'),
+                path.join('..', 'VERSION')]:
+        if path.exists(path.join(ver)):
             try:
                 import configparser
             except ImportError:
                 import ConfigParser as configparser
             v = configparser.SafeConfigParser()
             try:
-                v.read(path.join(ver, 'VERSION'))
-            except:
-                raise error.general('Invalid VERSION file')
-            return v
-    return None
+                v.read(path.host(ver))
+            except Exception as e:
+                raise error.general('Invalid version config format: %s: %s' % (ver,
+                                                                               e))
+            return ver, v
+    return None, None
 
 def _load_released_version():
+    '''Load the release data if present. If not found the package is not released.
+
+    A release can be made by adding a file called `VERSION` to the top level
+    directory of a package. This is useful for user deploying a package and
+    making custom releases.
+
+    The RTEMS project reserves the `rtems-version.ini` file for it's
+    releases. This is the base release and should not be touched by users
+    deploying a package.
+
+    '''
+    global _version
+    global _revision
     global _released
     global _version_str
-    v = _load_released_version_config()
-    if v is not None:
-        try:
-            _version_str = v.get('version', 'release')
-        except:
-            raise error.general('Invalid VERSION file')
-        _released = True
+    global _is_loaded
+
+    if not _is_loaded:
+        vc, v = _load_released_version_config()
+        if v is not None:
+            try:
+                ver_str = v.get('version', 'revision')
+            except Exception as e:
+                raise error.general('Invalid version file: %s: %s' % (vc, e))
+            ver_split = ver_str.split('.', 1)
+            if len(ver_split) < 2:
+                raise error.general('Invalid version release value: %s: %s' % (vc,
+                                                                               ver_str))
+            ver = ver_split[0]
+            rev = ver_split[1]
+            try:
+                _version = int(ver)
+            except:
+                raise error.general('Invalid version config value: %s: %s' % (vc,
+                                                                              ver))
+            _revision = rev
+            if 'not_released' not in ver:
+                _released = True
+            _version_str = ver_str
+            _is_loaded = True
     return _released
 
 def _load_git_version():
+    global _version
+    global _revision
     global _git
     global _version_str
-    repo = git.repo(_top())
-    if repo.valid():
-        head = repo.head()
-        if repo.dirty():
-            modified = ' modified'
-        else:
-            modified = ''
-        _version_str = '%s (%s%s)' % (_version, head[0:12], modified)
-        _git = True
+    global _is_loaded
+
+    if not _is_loaded:
+        repo = git.repo(_top())
+        if repo.valid():
+            head = repo.head()
+            if repo.dirty():
+                modified = 'modified'
+                revision_sep = '-'
+                sep = ' '
+            else:
+                modified = ''
+                revision_sep = ''
+                sep = ''
+            _revision = '%s%s%s' % (head[0:12], revision_sep, modified)
+            _version_str = '%s (%s%s%s)' % (_version, head[0:12], sep, modified)
+            _git = True
+            _is_loaded = True
     return _git
 
+def load_release_settings(section, error = False):
+    vc, v = _load_released_version_config()
+    items = []
+    if v is not None:
+        try:
+            items = v.items(section)
+        except Exception as e:
+            if not isinstance(error, bool):
+                error(e)
+            elif error:
+                raise error.general('Invalid config section: %s: %s: %s' % (vc,
+                                                                            section,
+                                                                            e))
+    return items
+
+def load_release_setting(section, option, raw = False, error = False):
+    vc, v = _load_released_version_config()
+    value = None
+    if v is not None:
+        try:
+            value = v.get(section, option, raw = raw)
+        except Exception as e:
+            if not isinstance(error, bool):
+                error(e)
+            elif error:
+                raise error.general('Invalid config section: %s: %s: %s.%s' % (vc,
+                                                                               section,
+                                                                               option,
+                                                                               e))
+    return value
+
 def released():
     return _load_released_version()
 
 def version_control():
     return _load_git_version()
 
-def str():
-    if not _released and not _git:
-        if not _load_released_version():
-            _load_git_version()
+def string():
+    _load_released_version()
+    _load_git_version()
     return _version_str
 
-def load_release_settings(macros):
-    def setting_error(msg):
-        raise error.general(msg)
-
-    if released():
-        v = _load_released_version_config()
-        if v is not None:
-            try:
-                hashes = v.items('hashes')
-            except:
-                hashes = []
-            try:
-                release_path = v.get('version', 'release_path', raw = True)
-            except:
-                release_path = None
-            for hash in hashes:
-                hs = hash[1].split()
-                if len(hs) != 2:
-                    raise error.general('invalid release hash in VERSION')
-                sources.hash((hs[0], hash[0], hs[1]), macros, setting_error)
-            download.set_release_path(release_path, macros)
-
 def version():
+    _load_released_version()
+    _load_git_version()
     return _version
 
+def revision():
+    _load_released_version()
+    _load_git_version()
+    return _revision
+
 if __name__ == '__main__':
-    print('Version: %s' % (str()))
+    print('Version: %s' % (str(version())))
+    print('Revision: %s' % (str(revision())))
+    print('String: %s' % (string()))
+    if version() == 'undefined':
+        raise Exception('version is undefined')
diff --git a/source-builder/sb/windows.py b/source-builder/sb/windows.py
index f4eb85c..ceca24f 100644
--- a/source-builder/sb/windows.py
+++ b/source-builder/sb/windows.py
@@ -1,6 +1,6 @@
 #
 # RTEMS Tools Project (http://www.rtems.org/)
-# Copyright 2010-2013 Chris Johns (chrisj at rtems.org)
+# Copyright 2010-2018 Chris Johns (chrisj at rtems.org)
 # All rights reserved.
 #
 # This file is part of the RTEMS Tools package in 'rtems-tools'.
@@ -21,12 +21,12 @@
 # Windows specific support and overrides.
 #
 
-import error
-import pprint
 import os
 import sys
 
-import execute
+from . import error
+from . import execute
+from . import path
 
 def load():
     # Default to the native Windows Python.
@@ -78,6 +78,7 @@ def load():
     defines = {
         '_ncpus':            ('none',    'none',     ncpus),
         '_os':               ('none',    'none',     'win32'),
+        '_windows_os':       ('none',    'none',     uname),
         '_build':            ('triplet', 'required', build_triple),
         '_build_vendor':     ('none',    'none',     'microsoft'),
         '_build_os':         ('none',    'none',     'win32'),
@@ -101,6 +102,7 @@ def load():
         '__chgrp':           ('exe',     'required', 'chgrp'),
         '__chmod':           ('exe',     'required', 'chmod'),
         '__chown':           ('exe',     'required', 'chown'),
+        '__cmake':           ('exe',     'optional', 'cmake'),
         '__cp':              ('exe',     'required', 'cp'),
         '__cvs':             ('exe',     'optional', 'cvs'),
         '__cxx':             ('exe',     'required', cxx),
@@ -125,7 +127,7 @@ def load():
         '__rm':              ('exe',     'required', 'rm'),
         '__sed':             ('exe',     'required', 'sed'),
         '__sh':              ('exe',     'required', 'sh'),
-        '__tar':             ('exe',     'required', 'bsdtar'),
+        '__tar':             ('exe',     'required', 'tar'),
         '__touch':           ('exe',     'required', 'touch'),
         '__unzip':           ('exe',     'required', 'unzip'),
         '__xz':              ('exe',     'required', 'xz'),
@@ -146,9 +148,15 @@ def load():
     #  6. W64/Python2 - Ok if machsize is 32
     #  7. W64/Python3 - gdb-7.9 needs python2.
     #
-    if sys.platform == 'win32' and 'MSC' in sys.version:
-        raise error.general('python.org Pythons are built with MSC and cannot be linked with GDB')
-
+    # Find a suitable python2 and python3.
+    #
+    for p in os.environ['PATH'].split(os.pathsep):
+        sh = os.path.join(p, 'sh.exe')
+        if os.path.exists(sh) and os.path.isfile(sh):
+            break
+        sh = None
+    if sh is None:
+        raise error.general('cannot find a shell (sh.exe) in the PATH')
     #
     # Search the MSYS2 install tree for a suitable python.
     #
@@ -166,23 +174,19 @@ def load():
             raise error.general('cannot locate MSYS root mount point')
         if install_point[1] != ':':
             raise error.general('invalid MSYS root mount point: %s' % install_point)
-        install_point = '/%s%s' % (install_point[0], install_point[2:])
-        bin = '/mingw%s/bin' % (machsize)
-        bin_list = os.listdir(bin)
-        exe = None
-        for python in ['python2.exe']:
-            for f in bin_list:
-                if f == python:
-                    exe = install_point + os.path.join(bin, f)
-                    break;
-            if exe is not None:
-                break
-        if exe is None:
-            raise error.general('no valid python found; you need a mingw%s python2 installed' % (machsize))
-        defines['with_python_path'] = exe
-
+        install_point = path.shell(install_point)
+        mingw = path.join(install_point, 'mingw%s' % (machsize))
+        if not path.exists(mingw) or not path.isdir(mingw):
+            raise error.general('cannot find MinGW install: %s' % (path.host(mingw)))
+        for version in ['2', '3']:
+            python = 'python%s' % (version)
+            exe = path.join(mingw, 'bin', '%s.exe' % (python))
+            if not path.exists(exe) or not path.isdir(exe):
+                defines['gdb_python%s' % (version)] = exe
+                header = path.join(mingw, python)
 
     return defines
 
 if __name__ == '__main__':
+    import pprint
     pprint.pprint(load())
diff --git a/source-builder/sha512-base64 b/source-builder/sha512-base64
new file mode 100755
index 0000000..e6c8e1a
--- /dev/null
+++ b/source-builder/sha512-base64
@@ -0,0 +1,2 @@
+#! /bin/sh
+openssl dgst -binary -sha512 $1 | openssl base64 | sed '{N;s/\n//;}'
-- 
2.24.1



More information about the devel mailing list