From 4be819c27f627d0fad0f63cb5817bfca8460541a Mon Sep 17 00:00:00 2001
From: David Robillard <d@drobilla.net>
Date: Sun, 21 Apr 2019 22:53:32 +0200
Subject: Switch to using a submodule for autowaf

---
 waflib                                      |    1 +
 waflib/extras/__init__.py                   |    3 -
 waflib/extras/autowaf.py                    | 1424 ---------------------------
 waflib/extras/batched_cc.py                 |  173 ----
 waflib/extras/biber.py                      |   58 --
 waflib/extras/bjam.py                       |  128 ---
 waflib/extras/blender.py                    |  108 --
 waflib/extras/boo.py                        |   81 --
 waflib/extras/boost.py                      |  525 ----------
 waflib/extras/build_file_tracker.py         |   28 -
 waflib/extras/build_logs.py                 |  110 ---
 waflib/extras/buildcopy.py                  |   85 --
 waflib/extras/c_bgxlc.py                    |   32 -
 waflib/extras/c_dumbpreproc.py              |   72 --
 waflib/extras/c_emscripten.py               |   87 --
 waflib/extras/c_nec.py                      |   74 --
 waflib/extras/cabal.py                      |  152 ---
 waflib/extras/cfg_altoptions.py             |  110 ---
 waflib/extras/clang_compilation_database.py |   85 --
 waflib/extras/codelite.py                   |  875 ----------------
 waflib/extras/color_gcc.py                  |   39 -
 waflib/extras/color_rvct.py                 |   51 -
 waflib/extras/compat15.py                   |  406 --------
 waflib/extras/cppcheck.py                   |  591 -----------
 waflib/extras/cpplint.py                    |  209 ----
 waflib/extras/cross_gnu.py                  |  227 -----
 waflib/extras/cython.py                     |  147 ---
 waflib/extras/dcc.py                        |   72 --
 waflib/extras/distnet.py                    |  430 --------
 waflib/extras/doxygen.py                    |  227 -----
 waflib/extras/dpapi.py                      |   87 --
 waflib/extras/eclipse.py                    |  431 --------
 waflib/extras/erlang.py                     |  110 ---
 waflib/extras/fast_partial.py               |  518 ----------
 waflib/extras/fc_bgxlf.py                   |   32 -
 waflib/extras/fc_cray.py                    |   51 -
 waflib/extras/fc_nag.py                     |   61 --
 waflib/extras/fc_nec.py                     |   60 --
 waflib/extras/fc_nfort.py                   |   52 -
 waflib/extras/fc_open64.py                  |   58 --
 waflib/extras/fc_pgfortran.py               |   68 --
 waflib/extras/fc_solstudio.py               |   62 --
 waflib/extras/fc_xlf.py                     |   63 --
 waflib/extras/file_to_object.py             |  137 ---
 waflib/extras/fluid.py                      |   30 -
 waflib/extras/freeimage.py                  |   74 --
 waflib/extras/fsb.py                        |   31 -
 waflib/extras/fsc.py                        |   64 --
 waflib/extras/gccdeps.py                    |  214 ----
 waflib/extras/gdbus.py                      |   87 --
 waflib/extras/gob2.py                       |   17 -
 waflib/extras/halide.py                     |  151 ---
 waflib/extras/javatest.py                   |  118 ---
 waflib/extras/kde4.py                       |   93 --
 waflib/extras/local_rpath.py                |   19 -
 waflib/extras/lv2.py                        |   75 --
 waflib/extras/make.py                       |  142 ---
 waflib/extras/midl.py                       |   69 --
 waflib/extras/msvcdeps.py                   |  256 -----
 waflib/extras/msvs.py                       | 1048 --------------------
 waflib/extras/netcache_client.py            |  390 --------
 waflib/extras/objcopy.py                    |   50 -
 waflib/extras/ocaml.py                      |  348 -------
 waflib/extras/package.py                    |   76 --
 waflib/extras/parallel_debug.py             |  462 ---------
 waflib/extras/pch.py                        |  148 ---
 waflib/extras/pep8.py                       |  106 --
 waflib/extras/pgicc.py                      |   75 --
 waflib/extras/pgicxx.py                     |   20 -
 waflib/extras/proc.py                       |   54 -
 waflib/extras/protoc.py                     |  223 -----
 waflib/extras/pyqt5.py                      |  241 -----
 waflib/extras/pytest.py                     |  225 -----
 waflib/extras/qnxnto.py                     |   72 --
 waflib/extras/qt4.py                        |  695 -------------
 waflib/extras/relocation.py                 |   85 --
 waflib/extras/remote.py                     |  327 ------
 waflib/extras/resx.py                       |   35 -
 waflib/extras/review.py                     |  325 ------
 waflib/extras/rst.py                        |  260 -----
 waflib/extras/run_do_script.py              |  139 ---
 waflib/extras/run_m_script.py               |   88 --
 waflib/extras/run_py_script.py              |  104 --
 waflib/extras/run_r_script.py               |   86 --
 waflib/extras/sas.py                        |   71 --
 waflib/extras/satellite_assembly.py         |   57 --
 waflib/extras/scala.py                      |  128 ---
 waflib/extras/slow_qt4.py                   |   96 --
 waflib/extras/softlink_libs.py              |   76 --
 waflib/extras/stale.py                      |   98 --
 waflib/extras/stracedeps.py                 |  174 ----
 waflib/extras/swig.py                       |  237 -----
 waflib/extras/syms.py                       |   84 --
 waflib/extras/ticgt.py                      |  300 ------
 waflib/extras/unity.py                      |  108 --
 waflib/extras/use_config.py                 |  185 ----
 waflib/extras/valadoc.py                    |  140 ---
 waflib/extras/waf_xattr.py                  |  150 ---
 waflib/extras/why.py                        |   78 --
 waflib/extras/win32_opts.py                 |  170 ----
 waflib/extras/wix.py                        |   87 --
 waflib/extras/xcode6.py                     |  727 --------------
 102 files changed, 1 insertion(+), 18487 deletions(-)
 create mode 160000 waflib
 delete mode 100644 waflib/extras/__init__.py
 delete mode 100644 waflib/extras/autowaf.py
 delete mode 100644 waflib/extras/batched_cc.py
 delete mode 100644 waflib/extras/biber.py
 delete mode 100644 waflib/extras/bjam.py
 delete mode 100644 waflib/extras/blender.py
 delete mode 100644 waflib/extras/boo.py
 delete mode 100644 waflib/extras/boost.py
 delete mode 100644 waflib/extras/build_file_tracker.py
 delete mode 100644 waflib/extras/build_logs.py
 delete mode 100644 waflib/extras/buildcopy.py
 delete mode 100644 waflib/extras/c_bgxlc.py
 delete mode 100644 waflib/extras/c_dumbpreproc.py
 delete mode 100644 waflib/extras/c_emscripten.py
 delete mode 100644 waflib/extras/c_nec.py
 delete mode 100644 waflib/extras/cabal.py
 delete mode 100644 waflib/extras/cfg_altoptions.py
 delete mode 100644 waflib/extras/clang_compilation_database.py
 delete mode 100644 waflib/extras/codelite.py
 delete mode 100644 waflib/extras/color_gcc.py
 delete mode 100644 waflib/extras/color_rvct.py
 delete mode 100644 waflib/extras/compat15.py
 delete mode 100644 waflib/extras/cppcheck.py
 delete mode 100644 waflib/extras/cpplint.py
 delete mode 100644 waflib/extras/cross_gnu.py
 delete mode 100644 waflib/extras/cython.py
 delete mode 100644 waflib/extras/dcc.py
 delete mode 100644 waflib/extras/distnet.py
 delete mode 100644 waflib/extras/doxygen.py
 delete mode 100644 waflib/extras/dpapi.py
 delete mode 100644 waflib/extras/eclipse.py
 delete mode 100644 waflib/extras/erlang.py
 delete mode 100644 waflib/extras/fast_partial.py
 delete mode 100644 waflib/extras/fc_bgxlf.py
 delete mode 100644 waflib/extras/fc_cray.py
 delete mode 100644 waflib/extras/fc_nag.py
 delete mode 100644 waflib/extras/fc_nec.py
 delete mode 100644 waflib/extras/fc_nfort.py
 delete mode 100644 waflib/extras/fc_open64.py
 delete mode 100644 waflib/extras/fc_pgfortran.py
 delete mode 100644 waflib/extras/fc_solstudio.py
 delete mode 100644 waflib/extras/fc_xlf.py
 delete mode 100644 waflib/extras/file_to_object.py
 delete mode 100644 waflib/extras/fluid.py
 delete mode 100644 waflib/extras/freeimage.py
 delete mode 100644 waflib/extras/fsb.py
 delete mode 100644 waflib/extras/fsc.py
 delete mode 100644 waflib/extras/gccdeps.py
 delete mode 100644 waflib/extras/gdbus.py
 delete mode 100644 waflib/extras/gob2.py
 delete mode 100644 waflib/extras/halide.py
 delete mode 100755 waflib/extras/javatest.py
 delete mode 100644 waflib/extras/kde4.py
 delete mode 100644 waflib/extras/local_rpath.py
 delete mode 100644 waflib/extras/lv2.py
 delete mode 100644 waflib/extras/make.py
 delete mode 100644 waflib/extras/midl.py
 delete mode 100644 waflib/extras/msvcdeps.py
 delete mode 100644 waflib/extras/msvs.py
 delete mode 100644 waflib/extras/netcache_client.py
 delete mode 100644 waflib/extras/objcopy.py
 delete mode 100644 waflib/extras/ocaml.py
 delete mode 100644 waflib/extras/package.py
 delete mode 100644 waflib/extras/parallel_debug.py
 delete mode 100644 waflib/extras/pch.py
 delete mode 100644 waflib/extras/pep8.py
 delete mode 100644 waflib/extras/pgicc.py
 delete mode 100644 waflib/extras/pgicxx.py
 delete mode 100644 waflib/extras/proc.py
 delete mode 100644 waflib/extras/protoc.py
 delete mode 100644 waflib/extras/pyqt5.py
 delete mode 100644 waflib/extras/pytest.py
 delete mode 100644 waflib/extras/qnxnto.py
 delete mode 100644 waflib/extras/qt4.py
 delete mode 100644 waflib/extras/relocation.py
 delete mode 100644 waflib/extras/remote.py
 delete mode 100644 waflib/extras/resx.py
 delete mode 100644 waflib/extras/review.py
 delete mode 100644 waflib/extras/rst.py
 delete mode 100644 waflib/extras/run_do_script.py
 delete mode 100644 waflib/extras/run_m_script.py
 delete mode 100644 waflib/extras/run_py_script.py
 delete mode 100644 waflib/extras/run_r_script.py
 delete mode 100644 waflib/extras/sas.py
 delete mode 100644 waflib/extras/satellite_assembly.py
 delete mode 100644 waflib/extras/scala.py
 delete mode 100644 waflib/extras/slow_qt4.py
 delete mode 100644 waflib/extras/softlink_libs.py
 delete mode 100644 waflib/extras/stale.py
 delete mode 100644 waflib/extras/stracedeps.py
 delete mode 100644 waflib/extras/swig.py
 delete mode 100644 waflib/extras/syms.py
 delete mode 100644 waflib/extras/ticgt.py
 delete mode 100644 waflib/extras/unity.py
 delete mode 100644 waflib/extras/use_config.py
 delete mode 100644 waflib/extras/valadoc.py
 delete mode 100644 waflib/extras/waf_xattr.py
 delete mode 100644 waflib/extras/why.py
 delete mode 100644 waflib/extras/win32_opts.py
 delete mode 100644 waflib/extras/wix.py
 delete mode 100644 waflib/extras/xcode6.py

(limited to 'waflib/extras')

diff --git a/waflib b/waflib
new file mode 160000
index 0000000..2314e23
--- /dev/null
+++ b/waflib
@@ -0,0 +1 @@
+Subproject commit 2314e236ca6e7d94a26c3c17091da0f25f5867f3
diff --git a/waflib/extras/__init__.py b/waflib/extras/__init__.py
deleted file mode 100644
index c8a3c34..0000000
--- a/waflib/extras/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
diff --git a/waflib/extras/autowaf.py b/waflib/extras/autowaf.py
deleted file mode 100644
index 870a69a..0000000
--- a/waflib/extras/autowaf.py
+++ /dev/null
@@ -1,1424 +0,0 @@
-import glob
-import os
-import subprocess
-import sys
-import time
-
-from waflib import Configure, ConfigSet, Build, Context, Logs, Options, Utils
-from waflib.TaskGen import feature, before, after
-
-NONEMPTY = -10
-
-if sys.platform == 'win32':
-    lib_path_name = 'PATH'
-elif sys.platform == 'darwin':
-    lib_path_name = 'DYLD_LIBRARY_PATH'
-else:
-    lib_path_name = 'LD_LIBRARY_PATH'
-
-# Compute dependencies globally
-# import preproc
-# preproc.go_absolute = True
-
-@feature('c', 'cxx')
-@after('apply_incpaths')
-def include_config_h(self):
-    self.env.append_value('INCPATHS', self.bld.bldnode.abspath())
-
-class OptionsContext(Options.OptionsContext):
-    def __init__(self, **kwargs):
-        super(OptionsContext, self).__init__(**kwargs)
-        set_options(self)
-
-    def configuration_options(self):
-        return self.get_option_group('Configuration options')
-
-    def add_flags(self, group, flags):
-        """Tersely add flags (a dictionary of longname:desc) to a group"""
-        for name, desc in flags.items():
-            group.add_option('--' + name, action='store_true',
-                             dest=name.replace('-', '_'), help=desc)
-
-def set_options(opt, debug_by_default=False):
-    "Add standard autowaf options"
-    opts = opt.get_option_group('Configuration options')
-
-    # Standard directory options
-    opts.add_option('--bindir', type='string',
-                    help="executable programs [default: PREFIX/bin]")
-    opts.add_option('--configdir', type='string',
-                    help="configuration data [default: PREFIX/etc]")
-    opts.add_option('--datadir', type='string',
-                    help="shared data [default: PREFIX/share]")
-    opts.add_option('--includedir', type='string',
-                    help="header files [default: PREFIX/include]")
-    opts.add_option('--libdir', type='string',
-                    help="libraries [default: PREFIX/lib]")
-    opts.add_option('--mandir', type='string',
-                    help="manual pages [default: DATADIR/man]")
-    opts.add_option('--docdir', type='string',
-                    help="HTML documentation [default: DATADIR/doc]")
-
-    # Build options
-    if debug_by_default:
-        opts.add_option('--optimize', action='store_false', default=True,
-                        dest='debug', help="build optimized binaries")
-    else:
-        opts.add_option('-d', '--debug', action='store_true', default=False,
-                        dest='debug', help="build debuggable binaries")
-        opts.add_option('--pardebug', action='store_true', default=False,
-                        dest='pardebug',
-                        help="build debug libraries with D suffix")
-
-    opts.add_option('-s', '--strict', action='store_true', default=False,
-                    dest='strict',
-                    help="use strict compiler flags and show all warnings")
-    opts.add_option('-S', '--ultra-strict', action='store_true', default=False,
-                    dest='ultra_strict',
-                    help="use extremely strict compiler flags (likely noisy)")
-    opts.add_option('--docs', action='store_true', default=False, dest='docs',
-                    help="build documentation (requires doxygen)")
-
-    # Test options
-    if hasattr(Context.g_module, 'test'):
-        test_opts = opt.add_option_group('Test options', '')
-        opts.add_option('-T', '--test', action='store_true', dest='build_tests',
-                        help='build unit tests')
-        opts.add_option('--no-coverage', action='store_true',
-                        dest='no_coverage',
-                        help='do not instrument code for test coverage')
-        test_opts.add_option('--wrapper', type='string',
-                             dest='test_wrapper',
-                             help='command prefix for tests (e.g. valgrind)')
-        test_opts.add_option('--test-filter', type='string',
-                             dest='test_filter',
-                             help='regular expression for tests to run')
-
-    # Run options
-    run_opts = opt.add_option_group('Run options')
-    run_opts.add_option('--cmd', type='string', dest='cmd',
-                        help='command to run from build directory')
-
-class ConfigureContext(Configure.ConfigurationContext):
-    """configures the project"""
-
-    def __init__(self, **kwargs):
-        self.line_just = 45
-        if hasattr(Context.g_module, 'line_just'):
-            self.line_just = Context.g_module.line_just
-
-        super(ConfigureContext, self).__init__(**kwargs)
-        self.run_env = ConfigSet.ConfigSet()
-        self.system_include_paths = set()
-
-    def pre_recurse(self, node):
-        if len(self.stack_path) == 1:
-            Logs.pprint('BOLD', 'Configuring %s' % node.parent.srcpath())
-        super(ConfigureContext, self).pre_recurse(node)
-
-    def store(self):
-        self.env.AUTOWAF_RUN_ENV = self.run_env.get_merged_dict()
-        for path in sorted(self.system_include_paths):
-            if 'COMPILER_CC' in self.env:
-                self.env.append_value('CFLAGS', ['-isystem', path])
-            if 'COMPILER_CXX' in self.env:
-                self.env.append_value('CXXFLAGS', ['-isystem', path])
-
-        super(ConfigureContext, self).store()
-
-    def build_path(self, path='.'):
-        """Return `path` within the build directory"""
-        return str(self.path.get_bld().find_node(path))
-
-def get_check_func(conf, lang):
-    if lang == 'c':
-        return conf.check_cc
-    elif lang == 'cxx':
-        return conf.check_cxx
-    else:
-        Logs.error("Unknown header language `%s'" % lang)
-
-def check_header(conf, lang, name, define='', mandatory=True):
-    "Check for a header"
-    check_func = get_check_func(conf, lang)
-    if define != '':
-        check_func(header_name=name,
-                   define_name=define,
-                   mandatory=mandatory)
-    else:
-        check_func(header_name=name, mandatory=mandatory)
-
-def check_function(conf, lang, name, **args):
-    "Check for a function"
-    header_names = Utils.to_list(args['header_name'])
-    includes = ''.join(['#include <%s>\n' % x for x in header_names])
-    fragment = '''
-%s
-int main() { return !(void(*)())(%s); }
-''' % (includes, name)
-
-    check_func  = get_check_func(conf, lang)
-    args['msg'] = 'Checking for %s' % name
-    check_func(fragment=fragment, **args)
-
-def nameify(name):
-    return (name.replace('/', '_').replace('++', 'PP')
-            .replace('-', '_').replace('.', '_'))
-
-def define(conf, var_name, value):
-    conf.define(var_name, value)
-    conf.env[var_name] = value
-
-def check_pkg(conf, name, **args):
-    "Check for a package iff it hasn't been checked for yet"
-    if (args['uselib_store'].lower() in conf.env['AUTOWAF_LOCAL_LIBS'] or
-        args['uselib_store'].lower() in conf.env['AUTOWAF_LOCAL_HEADERS']):
-        return
-
-    class CheckType:
-        OPTIONAL = 1
-        MANDATORY = 2
-
-    var_name = 'CHECKED_' + nameify(args['uselib_store'])
-    check = var_name not in conf.env
-    mandatory = 'mandatory' not in args or args['mandatory']
-    if not check and 'atleast_version' in args:
-        # Re-check if version is newer than previous check
-        checked_version = conf.env['VERSION_' + name]
-        if checked_version and checked_version < args['atleast_version']:
-            check = True
-    if not check and mandatory and conf.env[var_name] == CheckType.OPTIONAL:
-        # Re-check if previous check was optional but this one is mandatory
-        check = True
-    if check:
-        found = None
-        pkg_var_name = 'PKG_' + name.replace('-', '_')
-        pkg_name = name
-        if conf.env.PARDEBUG:
-            args['mandatory'] = False  # Smash mandatory arg
-            found = conf.check_cfg(package=pkg_name + 'D',
-                                   args="--cflags --libs", **args)
-            if found:
-                pkg_name += 'D'
-        if mandatory:
-            args['mandatory'] = True  # Unsmash mandatory arg
-        if not found:
-            found = conf.check_cfg(package=pkg_name, args="--cflags --libs",
-                                   **args)
-        if found:
-            conf.env[pkg_var_name] = pkg_name
-        if 'atleast_version' in args:
-            conf.env['VERSION_' + name] = args['atleast_version']
-    if mandatory:
-        conf.env[var_name] = CheckType.MANDATORY
-    else:
-        conf.env[var_name] = CheckType.OPTIONAL
-
-    if not conf.env.MSVC_COMPILER and 'system' in args and args['system']:
-        conf.system_include_paths.update(
-            conf.env['INCLUDES_' + nameify(args['uselib_store'])])
-
-def normpath(path):
-    if sys.platform == 'win32':
-        return os.path.normpath(path).replace('\\', '/')
-    else:
-        return os.path.normpath(path)
-
-def configure(conf):
-    def append_cxx_flags(flags):
-        conf.env.append_value('CFLAGS', flags)
-        conf.env.append_value('CXXFLAGS', flags)
-
-    if Options.options.docs:
-        conf.load('doxygen')
-
-    try:
-        conf.load('clang_compilation_database')
-    except Exception:
-        pass
-
-    prefix = normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX'])))
-
-    conf.env['DOCS'] = Options.options.docs and conf.env.DOXYGEN
-    conf.env['DEBUG'] = Options.options.debug or Options.options.pardebug
-    conf.env['PARDEBUG'] = Options.options.pardebug
-    conf.env['PREFIX'] = prefix
-
-    def config_dir(var, opt, default):
-        if opt:
-            conf.env[var] = normpath(opt)
-        else:
-            conf.env[var] = normpath(default)
-
-    opts = Options.options
-
-    config_dir('BINDIR',     opts.bindir,     os.path.join(prefix,  'bin'))
-    config_dir('SYSCONFDIR', opts.configdir,  os.path.join(prefix,  'etc'))
-    config_dir('DATADIR',    opts.datadir,    os.path.join(prefix,  'share'))
-    config_dir('INCLUDEDIR', opts.includedir, os.path.join(prefix,  'include'))
-    config_dir('LIBDIR',     opts.libdir,     os.path.join(prefix,  'lib'))
-
-    datadir = conf.env['DATADIR']
-    config_dir('MANDIR', opts.mandir, os.path.join(datadir, 'man'))
-    config_dir('DOCDIR', opts.docdir, os.path.join(datadir, 'doc'))
-
-    if Options.options.debug:
-        if conf.env['MSVC_COMPILER']:
-            conf.env['CFLAGS']    = ['/Od', '/Z7', '/MTd', '/FS']
-            conf.env['CXXFLAGS']  = ['/Od', '/Z7', '/MTd', '/FS']
-            conf.env['LINKFLAGS'] = ['/DEBUG', '/MANIFEST']
-        else:
-            conf.env['CFLAGS']   = ['-O0', '-g']
-            conf.env['CXXFLAGS'] = ['-O0', '-g']
-    else:
-        if conf.env['MSVC_COMPILER']:
-            append_cxx_flags(['/MD', '/FS', '/DNDEBUG'])
-        else:
-            append_cxx_flags(['-DNDEBUG'])
-
-    if conf.env.MSVC_COMPILER:
-        Options.options.no_coverage = True
-        append_cxx_flags(['/nologo',
-                          '/FS',
-                          '/DNDEBUG',
-                          '/D_CRT_SECURE_NO_WARNINGS',
-                          '/experimental:external',
-                          '/external:W0',
-                          '/external:anglebrackets'])
-        conf.env.append_value('LINKFLAGS', '/nologo')
-        if Options.options.strict or Options.options.ultra_strict:
-            ms_strict_flags = ['/Wall',
-                               '/wd4061',
-                               '/wd4200',
-                               '/wd4514',
-                               '/wd4571',
-                               '/wd4625',
-                               '/wd4626',
-                               '/wd4706',
-                               '/wd4710',
-                               '/wd4820',
-                               '/wd5026',
-                               '/wd5027',
-                               '/wd5045']
-            conf.env.append_value('CFLAGS', ms_strict_flags)
-            conf.env.append_value('CXXFLAGS', ms_strict_flags)
-            conf.env.append_value('CXXFLAGS', ['/EHsc'])
-    else:
-        if Options.options.ultra_strict:
-            Options.options.strict = True
-            conf.env.append_value('CFLAGS', ['-Wredundant-decls',
-                                             '-Wstrict-prototypes',
-                                             '-Wmissing-prototypes',
-                                             '-Wcast-qual'])
-            conf.env.append_value('CXXFLAGS', ['-Wcast-qual'])
-
-        if Options.options.strict:
-            conf.env.append_value('CFLAGS', ['-pedantic', '-Wshadow'])
-            if conf.env.DEST_OS != "darwin":
-                conf.env.append_value('LINKFLAGS', ['-Wl,--no-undefined'])
-            conf.env.append_value('CXXFLAGS', ['-Wnon-virtual-dtor',
-                                               '-Woverloaded-virtual'])
-            append_cxx_flags(['-Wall',
-                              '-Wcast-align',
-                              '-Wextra',
-                              '-Wmissing-declarations',
-                              '-Wno-unused-parameter',
-                              '-Wstrict-overflow',
-                              '-Wundef',
-                              '-Wwrite-strings',
-                              '-fstrict-overflow'])
-
-            # Add less universal flags after checking they work
-            extra_flags = ['-Wlogical-op',
-                           '-Wsuggest-attribute=noreturn',
-                           '-Wunsafe-loop-optimizations']
-            if conf.check_cc(cflags=['-Werror'] + extra_flags, mandatory=False,
-                             msg="Checking for extra C warning flags"):
-                conf.env.append_value('CFLAGS', extra_flags)
-            if 'COMPILER_CXX' in conf.env:
-                if conf.check_cxx(cxxflags=['-Werror'] + extra_flags,
-                                  mandatory=False,
-                                  msg="Checking for extra C++ warning flags"):
-                    conf.env.append_value('CXXFLAGS', extra_flags)
-
-    if not conf.env['MSVC_COMPILER']:
-        append_cxx_flags(['-fshow-column'])
-
-    conf.env.NO_COVERAGE = True
-    conf.env.BUILD_TESTS = False
-    try:
-        conf.env.BUILD_TESTS = Options.options.build_tests
-        conf.env.NO_COVERAGE = Options.options.no_coverage
-        if not Options.options.no_coverage:
-            # Set up unit test code coverage
-            if conf.is_defined('CLANG'):
-                for cov in [conf.env.CC[0].replace('clang', 'llvm-cov'),
-                            'llvm-cov']:
-                    if conf.find_program(cov, var='LLVM_COV', mandatory=False):
-                        break
-            else:
-                conf.check_cc(lib='gcov', define_name='HAVE_GCOV',
-                              mandatory=False)
-    except Exception:
-        pass  # Test options do not exist
-
-    # Define version in configuration
-    appname = getattr(Context.g_module, Context.APPNAME, 'noname')
-    version = getattr(Context.g_module, Context.VERSION, '0.0.0')
-    defname = appname.upper().replace('-', '_').replace('.', '_')
-    define(conf, defname + '_VERSION', version)
-
-    conf.env.prepend_value('CFLAGS', '-I' + os.path.abspath('.'))
-    conf.env.prepend_value('CXXFLAGS', '-I' + os.path.abspath('.'))
-
-def display_summary(conf, msgs=None):
-    if len(conf.stack_path) == 1:
-        display_msg(conf, "Install prefix", conf.env['PREFIX'])
-        if 'COMPILER_CC' in conf.env:
-            display_msg(conf, "C Flags", ' '.join(conf.env['CFLAGS']))
-        if 'COMPILER_CXX' in conf.env:
-            display_msg(conf, "C++ Flags", ' '.join(conf.env['CXXFLAGS']))
-        display_msg(conf, "Debuggable", bool(conf.env['DEBUG']))
-        display_msg(conf, "Build documentation", bool(conf.env['DOCS']))
-
-    if msgs is not None:
-        display_msgs(conf, msgs)
-
-def set_c_lang(conf, lang):
-    "Set a specific C language standard, like 'c99' or 'c11'"
-    if conf.env.MSVC_COMPILER:
-        # MSVC has no hope or desire to compile C99, just compile as C++
-        conf.env.append_unique('CFLAGS', ['/TP'])
-    else:
-        flag = '-std=%s' % lang
-        conf.check(cflags=['-Werror', flag],
-                   msg="Checking for flag '%s'" % flag)
-        conf.env.append_unique('CFLAGS', [flag])
-
-def set_cxx_lang(conf, lang):
-    "Set a specific C++ language standard, like 'c++11', 'c++14', or 'c++17'"
-    if conf.env.MSVC_COMPILER:
-        if lang != 'c++14':
-            lang = 'c++latest'
-        conf.env.append_unique('CXXFLAGS', ['/std:%s' % lang])
-    else:
-        flag = '-std=%s' % lang
-        conf.check(cxxflags=['-Werror', flag],
-                   msg="Checking for flag '%s'" % flag)
-        conf.env.append_unique('CXXFLAGS', [flag])
-
-def set_modern_c_flags(conf):
-    "Use the most modern C language available"
-    if 'COMPILER_CC' in conf.env:
-        if conf.env.MSVC_COMPILER:
-            # MSVC has no hope or desire to compile C99, just compile as C++
-            conf.env.append_unique('CFLAGS', ['/TP'])
-        else:
-            for flag in ['-std=c11', '-std=c99']:
-                if conf.check(cflags=['-Werror', flag], mandatory=False,
-                              msg="Checking for flag '%s'" % flag):
-                    conf.env.append_unique('CFLAGS', [flag])
-                    break
-
-def set_modern_cxx_flags(conf, mandatory=False):
-    "Use the most modern C++ language available"
-    if 'COMPILER_CXX' in conf.env:
-        if conf.env.MSVC_COMPILER:
-            conf.env.append_unique('CXXFLAGS', ['/std:c++latest'])
-        else:
-            for lang in ['c++14', 'c++1y', 'c++11', 'c++0x']:
-                flag = '-std=%s' % lang
-                if conf.check(cxxflags=['-Werror', flag], mandatory=False,
-                              msg="Checking for flag '%s'" % flag):
-                    conf.env.append_unique('CXXFLAGS', [flag])
-                    break
-
-def set_local_lib(conf, name, has_objects):
-    var_name = 'HAVE_' + nameify(name.upper())
-    define(conf, var_name, 1)
-    if has_objects:
-        if type(conf.env['AUTOWAF_LOCAL_LIBS']) != dict:
-            conf.env['AUTOWAF_LOCAL_LIBS'] = {}
-        conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()] = True
-    else:
-        if type(conf.env['AUTOWAF_LOCAL_HEADERS']) != dict:
-            conf.env['AUTOWAF_LOCAL_HEADERS'] = {}
-        conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()] = True
-
-def append_property(obj, key, val):
-    if hasattr(obj, key):
-        setattr(obj, key, getattr(obj, key) + val)
-    else:
-        setattr(obj, key, val)
-
-@feature('c', 'cxx')
-@before('apply_link')
-def version_lib(self):
-    if self.env.DEST_OS == 'win32':
-        self.vnum = None  # Prevent waf from automatically appending -0
-    if self.env['PARDEBUG']:
-        applicable = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib']
-        if [x for x in applicable if x in self.features]:
-            self.target = self.target + 'D'
-
-def set_lib_env(conf,
-                name,
-                version,
-                has_objects=True,
-                include_path=None,
-                lib_path=None):
-    "Set up environment for local library as if found via pkg-config."
-    NAME         = name.upper()
-    major_ver    = version.split('.')[0]
-    pkg_var_name = 'PKG_' + name.replace('-', '_') + '_' + major_ver
-    lib_name     = '%s-%s' % (name, major_ver)
-
-    if lib_path is None:
-        lib_path = str(conf.path.get_bld())
-
-    if include_path is None:
-        include_path = str(conf.path)
-
-    if conf.env.PARDEBUG:
-        lib_name += 'D'
-
-    conf.env[pkg_var_name]       = lib_name
-    conf.env['INCLUDES_' + NAME] = [include_path]
-    conf.env['LIBPATH_' + NAME]  = [lib_path]
-    if has_objects:
-        conf.env['LIB_' + NAME] = [lib_name]
-
-    conf.run_env.append_unique(lib_path_name, [lib_path])
-    conf.define(NAME + '_VERSION', version)
-
-def display_msg(conf, msg, status=None, color=None):
-    color = 'CYAN'
-    if type(status) == bool and status:
-        color  = 'GREEN'
-        status = 'yes'
-    elif type(status) == bool and not status or status == "False":
-        color  = 'YELLOW'
-        status = 'no'
-    Logs.pprint('BOLD', '%s' % msg.ljust(conf.line_just), sep='')
-    Logs.pprint('BOLD', ":", sep='')
-    Logs.pprint(color, status)
-
-def display_msgs(conf, msgs):
-    for k, v in msgs.items():
-        display_msg(conf, k, v)
-
-def link_flags(env, lib):
-    return ' '.join(map(lambda x: env['LIB_ST'] % x,
-                        env['LIB_' + lib]))
-
-def compile_flags(env, lib):
-    return ' '.join(map(lambda x: env['CPPPATH_ST'] % x,
-                        env['INCLUDES_' + lib]))
-
-def build_pc(bld, name, version, version_suffix, libs, subst_dict={}):
-    """Build a pkg-config file for a library.
-
-    name           -- uppercase variable name     (e.g. 'SOMENAME')
-    version        -- version string              (e.g. '1.2.3')
-    version_suffix -- name version suffix         (e.g. '2')
-    libs           -- string/list of dependencies (e.g. 'LIBFOO GLIB')
-    """
-
-    pkg_prefix       = bld.env['PREFIX']
-    if len(pkg_prefix) > 1 and pkg_prefix[-1] == '/':
-        pkg_prefix = pkg_prefix[:-1]
-
-    target = name.lower()
-    if version_suffix != '':
-        target += '-' + version_suffix
-
-    if bld.env['PARDEBUG']:
-        target += 'D'
-
-    target += '.pc'
-
-    libdir = bld.env['LIBDIR']
-    if libdir.startswith(pkg_prefix):
-        libdir = libdir.replace(pkg_prefix, '${exec_prefix}')
-
-    includedir = bld.env['INCLUDEDIR']
-    if includedir.startswith(pkg_prefix):
-        includedir = includedir.replace(pkg_prefix, '${prefix}')
-
-    obj = bld(features='subst',
-              source='%s.pc.in' % name.lower(),
-              target=target,
-              install_path=os.path.join(bld.env['LIBDIR'], 'pkgconfig'),
-              exec_prefix='${prefix}',
-              PREFIX=pkg_prefix,
-              EXEC_PREFIX='${prefix}',
-              LIBDIR=libdir,
-              INCLUDEDIR=includedir)
-
-    if type(libs) != list:
-        libs = libs.split()
-
-    subst_dict[name + '_VERSION'] = version
-    subst_dict[name + '_MAJOR_VERSION'] = version[0:version.find('.')]
-    for i in libs:
-        subst_dict[i + '_LIBS']   = link_flags(bld.env, i)
-        lib_cflags = compile_flags(bld.env, i)
-        if lib_cflags == '':
-            lib_cflags = ' '
-        subst_dict[i + '_CFLAGS'] = lib_cflags
-
-    obj.__dict__.update(subst_dict)
-
-def make_simple_dox(name):
-    "Clean up messy Doxygen documentation after it is built"
-    name = name.lower()
-    NAME = name.upper()
-    try:
-        top = os.getcwd()
-        os.chdir(build_dir(name, 'doc/html'))
-        page = 'group__%s.html' % name
-        if not os.path.exists(page):
-            return
-        for i in [
-            ['%s_API ' % NAME, ''],
-            ['%s_DEPRECATED ' % NAME, ''],
-            ['group__%s.html' % name, ''],
-            ['&#160;', ''],
-            [r'<script.*><\/script>', ''],
-            [r'<hr\/><a name="details" id="details"><\/a><h2>.*<\/h2>', ''],
-            [r'<link href=\"tabs.css\" rel=\"stylesheet\" type=\"text\/css\"\/>',
-             ''],
-            [r'<img class=\"footer\" src=\"doxygen.png\" alt=\"doxygen\"\/>',
-             'Doxygen']]:
-            os.system("sed -i 's/%s/%s/g' %s" % (i[0], i[1], page))
-        os.rename('group__%s.html' % name, 'index.html')
-        for i in (glob.glob('*.png') +
-                  glob.glob('*.html') +
-                  glob.glob('*.js') +
-                  glob.glob('*.css')):
-            if i != 'index.html' and i != 'style.css':
-                os.remove(i)
-        os.chdir(top)
-        os.chdir(build_dir(name, 'doc/man/man3'))
-        for i in glob.glob('*.3'):
-            os.system("sed -i 's/%s_API //' %s" % (NAME, i))
-        for i in glob.glob('_*'):
-            os.remove(i)
-        os.chdir(top)
-    except Exception as e:
-        Logs.error("Failed to fix up %s documentation: %s" % (name, e))
-    finally:
-        os.chdir(top)
-
-def build_dox(bld, name, version, srcdir, blddir, outdir='', versioned=True):
-    """Build Doxygen API documentation"""
-    if not bld.env['DOCS']:
-        return
-
-    # Doxygen paths in are relative to the doxygen file
-    src_dir = bld.path.srcpath()
-    subst_tg = bld(features='subst',
-                   source='doc/reference.doxygen.in',
-                   target='doc/reference.doxygen',
-                   install_path='',
-                   name='doxyfile')
-
-    subst_dict = {
-        name + '_VERSION': version,
-        name + '_SRCDIR': os.path.abspath(src_dir),
-        name + '_DOC_DIR': ''
-    }
-
-    subst_tg.__dict__.update(subst_dict)
-
-    subst_tg.post()
-
-    docs = bld(features='doxygen',
-               doxyfile='doc/reference.doxygen')
-
-    docs.post()
-
-    outname = name.lower()
-    if versioned:
-        outname += '-%d' % int(version[0:version.find('.')])
-    bld.install_files(
-        os.path.join('${DOCDIR}', outname, outdir, 'html'),
-        bld.path.get_bld().ant_glob('doc/html/*'))
-    for i in range(1, 8):
-        bld.install_files('${MANDIR}/man%d' % i,
-                          bld.path.get_bld().ant_glob('doc/man/man%d/*' % i,
-                                                      excl='**/_*'))
-
-
-def build_version_files(header_path, source_path, domain, major, minor, micro):
-    """Generate version code header"""
-    header_path = os.path.abspath(header_path)
-    source_path = os.path.abspath(source_path)
-    text  = "int " + domain + "_major_version = " + str(major) + ";\n"
-    text += "int " + domain + "_minor_version = " + str(minor) + ";\n"
-    text += "int " + domain + "_micro_version = " + str(micro) + ";\n"
-    try:
-        o = open(source_path, 'w')
-        o.write(text)
-        o.close()
-    except IOError:
-        Logs.error('Failed to open %s for writing\n' % source_path)
-        sys.exit(-1)
-
-    text  = "#ifndef __" + domain + "_version_h__\n"
-    text += "#define __" + domain + "_version_h__\n"
-    text += "extern const char* " + domain + "_revision;\n"
-    text += "extern int " + domain + "_major_version;\n"
-    text += "extern int " + domain + "_minor_version;\n"
-    text += "extern int " + domain + "_micro_version;\n"
-    text += "#endif /* __" + domain + "_version_h__ */\n"
-    try:
-        o = open(header_path, 'w')
-        o.write(text)
-        o.close()
-    except IOError:
-        Logs.warn('Failed to open %s for writing\n' % header_path)
-        sys.exit(-1)
-
-    return None
-
-def build_i18n_pot(bld, srcdir, dir, name, sources, copyright_holder=None):
-    Logs.info('Generating pot file from %s' % name)
-    pot_file = '%s.pot' % name
-
-    cmd = ['xgettext',
-           '--keyword=_',
-           '--keyword=N_',
-           '--keyword=S_',
-           '--from-code=UTF-8',
-           '-o', pot_file]
-
-    if copyright_holder:
-        cmd += ['--copyright-holder="%s"' % copyright_holder]
-
-    cmd += sources
-    Logs.info('Updating ' + pot_file)
-    subprocess.call(cmd, cwd=os.path.join(srcdir, dir))
-
-def build_i18n_po(bld, srcdir, dir, name, sources, copyright_holder=None):
-    pwd = os.getcwd()
-    os.chdir(os.path.join(srcdir, dir))
-    pot_file = '%s.pot' % name
-    po_files = glob.glob('po/*.po')
-    for po_file in po_files:
-        cmd = ['msgmerge',
-               '--update',
-               po_file,
-               pot_file]
-        Logs.info('Updating ' + po_file)
-        subprocess.call(cmd)
-    os.chdir(pwd)
-
-def build_i18n_mo(bld, srcdir, dir, name, sources, copyright_holder=None):
-    pwd = os.getcwd()
-    os.chdir(os.path.join(srcdir, dir))
-    po_files = glob.glob('po/*.po')
-    for po_file in po_files:
-        mo_file = po_file.replace('.po', '.mo')
-        cmd = ['msgfmt',
-               '-c',
-               '-f',
-               '-o',
-               mo_file,
-               po_file]
-        Logs.info('Generating ' + po_file)
-        subprocess.call(cmd)
-    os.chdir(pwd)
-
-def build_i18n(bld, srcdir, dir, name, sources, copyright_holder=None):
-    build_i18n_pot(bld, srcdir, dir, name, sources, copyright_holder)
-    build_i18n_po(bld, srcdir, dir, name, sources, copyright_holder)
-    build_i18n_mo(bld, srcdir, dir, name, sources, copyright_holder)
-
-class ExecutionEnvironment:
-    """Context that sets system environment variables for program execution"""
-    def __init__(self, changes):
-        self.original_environ = os.environ.copy()
-
-        self.diff = {}
-        for path_name, paths in changes.items():
-            value = os.pathsep.join(paths)
-            if path_name in os.environ:
-                value += os.pathsep + os.environ[path_name]
-
-            self.diff[path_name] = value
-
-        os.environ.update(self.diff)
-
-    def __str__(self):
-        return '\n'.join({'%s="%s"' % (k, v) for k, v in self.diff.items()})
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, type, value, traceback):
-        os.environ = self.original_environ
-
-class RunContext(Build.BuildContext):
-    "runs an executable from the build directory"
-    cmd = 'run'
-
-    def execute(self):
-        self.restore()
-        if not self.all_envs:
-            self.load_envs()
-
-        with ExecutionEnvironment(self.env.AUTOWAF_RUN_ENV) as env:
-            if Options.options.verbose:
-                Logs.pprint('GREEN', str(env) + '\n')
-
-            if Options.options.cmd:
-                Logs.pprint('GREEN', 'Running %s' % Options.options.cmd)
-                subprocess.call(Options.options.cmd, shell=True)
-            else:
-                Logs.error("error: Missing --cmd option for run command")
-
-def show_diff(from_lines, to_lines, from_filename, to_filename):
-    import difflib
-    import sys
-
-    same = True
-    for line in difflib.unified_diff(
-            from_lines, to_lines,
-            fromfile=os.path.abspath(from_filename),
-            tofile=os.path.abspath(to_filename)):
-        sys.stderr.write(line)
-        same = False
-
-    return same
-
-def test_file_equals(patha, pathb):
-    import filecmp
-    import io
-
-    for path in (patha, pathb):
-        if not os.access(path, os.F_OK):
-            Logs.pprint('RED', 'error: missing file %s' % path)
-            return False
-
-    if filecmp.cmp(patha, pathb, shallow=False):
-        return True
-
-    with io.open(patha, 'rU', encoding='utf-8') as fa:
-        with io.open(pathb, 'rU', encoding='utf-8') as fb:
-            return show_diff(fa.readlines(), fb.readlines(), patha, pathb)
-
-def bench_time():
-    if hasattr(time, 'perf_counter'): # Added in Python 3.3
-        return time.perf_counter()
-    else:
-        return time.time()
-
-class TestOutput:
-    """Test output that is truthy if result is as expected"""
-    def __init__(self, expected, result=None):
-        self.stdout = self.stderr = None
-        self.expected = expected
-        self.result = result
-
-    def __bool__(self):
-        return self.expected is None or self.result == self.expected
-
-    __nonzero__ = __bool__
-
-def is_string(s):
-    if sys.version_info[0] < 3:
-        return isinstance(s, basestring)
-    return isinstance(s, str)
-
-class TestScope:
-    """Scope for running tests that maintains pass/fail statistics"""
-    def __init__(self, tst, name, defaults):
-        self.tst = tst
-        self.name = name
-        self.defaults = defaults
-        self.n_failed = 0
-        self.n_total = 0
-
-    def run(self, test, **kwargs):
-        if type(test) == list and 'name' not in kwargs:
-            import pipes
-            kwargs['name'] = ' '.join(map(pipes.quote, test))
-
-        if Options.options.test_filter and 'name' in kwargs:
-            import re
-            found = False
-            for scope in self.tst.stack:
-                if re.search(Options.options.test_filter, scope.name):
-                    found = True
-                    break
-
-            if (not found and
-                not re.search(Options.options.test_filter, self.name) and
-                not re.search(Options.options.test_filter, kwargs['name'])):
-                return True
-
-        if callable(test):
-            output = self._run_callable(test, **kwargs)
-        elif type(test) == list:
-
-            output = self._run_command(test, **kwargs)
-        else:
-            raise Exception("Unknown test type")
-
-        if not output:
-            self.tst.log_bad('FAILED', kwargs['name'])
-
-        return self.tst.test_result(output)
-
-    def _run_callable(self, test, **kwargs):
-        expected = kwargs['expected'] if 'expected' in kwargs else True
-        return TestOutput(expected, test())
-
-    def _run_command(self, test, **kwargs):
-        if 'stderr' in kwargs and kwargs['stderr'] == NONEMPTY:
-            # Run with a temp file for stderr and check that it is non-empty
-            import tempfile
-            with tempfile.TemporaryFile() as stderr:
-                kwargs['stderr'] = stderr
-                output = self.run(test, **kwargs)
-                stderr.seek(0, 2) # Seek to end
-                return (output if not output else
-                        self.run(
-                            lambda: stderr.tell() > 0,
-                            name=kwargs['name'] + ' error message'))
-
-        try:
-            # Run with stdout and stderr set to the appropriate streams
-            out_stream = self._stream('stdout', kwargs)
-            err_stream = self._stream('stderr', kwargs)
-            return self._exec(test, **kwargs)
-        finally:
-            out_stream = out_stream.close() if out_stream else None
-            err_stream = err_stream.close() if err_stream else None
-
-    def _stream(self, stream_name, kwargs):
-        s = kwargs[stream_name] if stream_name in kwargs else None
-        if is_string(s):
-            kwargs[stream_name] = open(s, 'wb')
-            return kwargs[stream_name]
-        return None
-
-    def _exec(self,
-              test,
-              expected=0,
-              name='',
-              stdin=None,
-              stdout=None,
-              stderr=None,
-              verbosity=1):
-        def stream(s):
-            return open(s, 'wb') if type(s) == str else s
-
-        if verbosity > 1:
-            self.tst.log_good('RUN     ', name)
-
-        if Options.options.test_wrapper:
-            test = [Options.options.test_wrapper] + test
-
-        output = TestOutput(expected)
-        with open(os.devnull, 'wb') as null:
-            out = null if verbosity < 3 and not stdout else stdout
-            err = null if verbosity < 2 and not stderr else stderr
-            proc = subprocess.Popen(test, stdin=stdin, stdout=out, stderr=err)
-            output.stdout, output.stderr = proc.communicate()
-            output.result = proc.returncode
-
-        if output and verbosity > 0:
-            self.tst.log_good('      OK', name)
-
-        return output
-
-class TestContext(Build.BuildContext):
-    "runs test suite"
-    fun = cmd = 'test'
-
-    def __init__(self, **kwargs):
-        super(TestContext, self).__init__(**kwargs)
-        self.start_time = bench_time()
-        self.max_depth = 1
-
-        defaults = {'verbosity': Options.options.verbose}
-        self.stack = [TestScope(self, Context.g_module.APPNAME, defaults)]
-
-    def defaults(self):
-        return self.stack[-1].defaults
-
-    def finalize(self):
-        if self.stack[-1].n_failed > 0:
-            sys.exit(1)
-
-        super(TestContext, self).finalize()
-
-    def __call__(self, test, **kwargs):
-        return self.stack[-1].run(test, **self.args(**kwargs))
-
-    def file_equals(self, from_path, to_path, **kwargs):
-        kwargs.update({'expected': True,
-                       'name': '%s == %s' % (from_path, to_path)})
-        return self(lambda: test_file_equals(from_path, to_path), **kwargs)
-
-    def log_good(self, title, fmt, *args):
-        Logs.pprint('GREEN', '[%s] %s' % (title.center(10), fmt % args))
-
-    def log_bad(self, title, fmt, *args):
-        Logs.pprint('RED', '[%s] %s' % (title.center(10), fmt % args))
-
-    def pre_recurse(self, node):
-        wscript_module = Context.load_module(node.abspath())
-        group_name = wscript_module.APPNAME
-        self.stack.append(TestScope(self, group_name, self.defaults()))
-        self.max_depth = max(self.max_depth, len(self.stack) - 1)
-
-        bld_dir = node.get_bld().parent
-        if bld_dir != self.path.get_bld():
-            Logs.info('')
-
-        self.original_dir = os.getcwd()
-        Logs.info("Waf: Entering directory `%s'\n", bld_dir)
-        os.chdir(str(bld_dir))
-
-        if not self.env.NO_COVERAGE and str(node.parent) == Context.top_dir:
-            self.clear_coverage()
-
-        self.log_good('=' * 10, 'Running %s tests', group_name)
-        super(TestContext, self).pre_recurse(node)
-
-    def test_result(self, success):
-        self.stack[-1].n_total += 1
-        self.stack[-1].n_failed += 1 if not success else 0
-        return success
-
-    def pop(self):
-        scope = self.stack.pop()
-        self.stack[-1].n_total += scope.n_total
-        self.stack[-1].n_failed += scope.n_failed
-        return scope
-
-    def post_recurse(self, node):
-        super(TestContext, self).post_recurse(node)
-
-        scope = self.pop()
-        duration = (bench_time() - self.start_time) * 1000.0
-        is_top = str(node.parent) == str(Context.top_dir)
-
-        if is_top and self.max_depth > 1:
-            Logs.info('')
-
-        self.log_good('=' * 10, '%d tests from %s ran (%d ms total)',
-                      scope.n_total, scope.name, duration)
-
-        if not self.env.NO_COVERAGE:
-            if is_top:
-                self.gen_coverage()
-
-            if os.path.exists('coverage/index.html'):
-                self.log_good('REPORT', '<file://%s>',
-                              os.path.abspath('coverage/index.html'))
-
-        successes = scope.n_total - scope.n_failed
-        Logs.pprint('GREEN', '[  PASSED  ] %d tests' % successes)
-        if scope.n_failed > 0:
-            Logs.pprint('RED', '[  FAILED  ] %d tests' % scope.n_failed)
-        if is_top:
-            Logs.info("\nWaf: Leaving directory `%s'" % os.getcwd())
-
-        os.chdir(self.original_dir)
-
-    def execute(self):
-        self.restore()
-        if not self.all_envs:
-            self.load_envs()
-
-        if not self.env.BUILD_TESTS:
-            self.fatal('Configuration does not include tests')
-
-        with ExecutionEnvironment(self.env.AUTOWAF_RUN_ENV) as env:
-            if self.defaults()['verbosity'] > 0:
-                Logs.pprint('GREEN', str(env) + '\n')
-            self.recurse([self.run_dir])
-
-    def src_path(self, path):
-        return os.path.relpath(os.path.join(str(self.path), path))
-
-    def args(self, **kwargs):
-        all_kwargs = self.defaults().copy()
-        all_kwargs.update(kwargs)
-        return all_kwargs
-
-    def group(self, name, **kwargs):
-        return TestGroup(
-            self, self.stack[-1].name, name, **self.args(**kwargs))
-
-    def set_test_defaults(self, **kwargs):
-        """Set default arguments to be passed to all tests"""
-        self.stack[-1].defaults.update(kwargs)
-
-    def clear_coverage(self):
-        """Zero old coverage data"""
-        try:
-            with open('cov-clear.log', 'w') as log:
-                subprocess.call(['lcov', '-z', '-d', str(self.path)],
-                                stdout=log, stderr=log)
-
-        except Exception:
-            Logs.warn('Failed to run lcov to clear old coverage data')
-
-    def gen_coverage(self):
-        """Generate coverage data and report"""
-        try:
-            with open('cov.lcov', 'w') as out:
-                with open('cov.log', 'w') as err:
-                    subprocess.call(['lcov', '-c', '--no-external',
-                                     '--rc', 'lcov_branch_coverage=1',
-                                     '-b', '.',
-                                     '-d', str(self.path)],
-                                    stdout=out, stderr=err)
-
-            if not os.path.isdir('coverage'):
-                os.makedirs('coverage')
-
-            with open('genhtml.log', 'w') as log:
-                subprocess.call(['genhtml',
-                                 '-o', 'coverage',
-                                 '--rc', 'genhtml_branch_coverage=1',
-                                 'cov.lcov'],
-                                stdout=log, stderr=log)
-
-            summary = subprocess.check_output(
-                ['lcov', '--summary',
-                 '--rc', 'lcov_branch_coverage=1',
-                 'cov.lcov'],
-                stderr=subprocess.STDOUT).decode('ascii')
-
-            import re
-            lines = re.search('lines\.*: (.*)%.*', summary).group(1)
-            functions = re.search('functions\.*: (.*)%.*', summary).group(1)
-            branches = re.search('branches\.*: (.*)%.*', summary).group(1)
-            self.log_good('COVERAGE', '%s%% lines, %s%% functions, %s%% branches',
-                          lines, functions, branches)
-
-        except Exception:
-            Logs.warn('Failed to run lcov to generate coverage report')
-
-class TestGroup:
-    def __init__(self, tst, suitename, name, **kwargs):
-        self.tst = tst
-        self.suitename = suitename
-        self.name = name
-        self.kwargs = kwargs
-        self.start_time = bench_time()
-        tst.stack.append(TestScope(tst, name, tst.defaults()))
-
-    def label(self):
-        return self.suitename + '.%s' % self.name if self.name else ''
-
-    def args(self, **kwargs):
-        all_kwargs = self.tst.args(**self.kwargs)
-        all_kwargs.update(kwargs)
-        return all_kwargs
-
-    def __enter__(self):
-        if 'verbosity' in self.kwargs and self.kwargs['verbosity'] > 0:
-            self.tst.log_good('-' * 10, self.label())
-        return self
-
-    def __call__(self, test, **kwargs):
-        return self.tst(test, **self.args(**kwargs))
-
-    def file_equals(self, from_path, to_path, **kwargs):
-        return self.tst.file_equals(from_path, to_path, **kwargs)
-
-    def __exit__(self, type, value, traceback):
-        duration = (bench_time() - self.start_time) * 1000.0
-        scope = self.tst.pop()
-        n_passed = scope.n_total - scope.n_failed
-        if scope.n_failed == 0:
-            self.tst.log_good('-' * 10, '%d tests from %s (%d ms total)',
-                              scope.n_total, self.label(), duration)
-        else:
-            self.tst.log_bad('-' * 10, '%d/%d tests from %s (%d ms total)',
-                             n_passed, scope.n_total, self.label(), duration)
-
-def run_ldconfig(ctx):
-    should_run = (ctx.cmd == 'install' and
-                  not ctx.env['RAN_LDCONFIG'] and
-                  ctx.env['LIBDIR'] and
-                  'DESTDIR' not in os.environ and
-                  not Options.options.destdir)
-
-    if should_run:
-        try:
-            Logs.info("Waf: Running `/sbin/ldconfig %s'" % ctx.env['LIBDIR'])
-            subprocess.call(['/sbin/ldconfig', ctx.env['LIBDIR']])
-            ctx.env['RAN_LDCONFIG'] = True
-        except Exception:
-            pass
-
-def get_rdf_news(name,
-                 in_files,
-                 top_entries=None,
-                 extra_entries=None,
-                 dev_dist=None):
-    import rdflib
-    from time import strptime
-
-    doap = rdflib.Namespace('http://usefulinc.com/ns/doap#')
-    dcs  = rdflib.Namespace('http://ontologi.es/doap-changeset#')
-    rdfs = rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#')
-    foaf = rdflib.Namespace('http://xmlns.com/foaf/0.1/')
-    rdf  = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
-    m    = rdflib.ConjunctiveGraph()
-
-    try:
-        for i in in_files:
-            m.parse(i, format='n3')
-    except Exception:
-        Logs.warn('Error parsing data, unable to generate NEWS')
-        return
-
-    proj = m.value(None, rdf.type, doap.Project)
-    for f in m.triples([proj, rdfs.seeAlso, None]):
-        if f[2].endswith('.ttl'):
-            m.parse(f[2], format='n3')
-
-    entries = {}
-    for r in m.triples([proj, doap.release, None]):
-        release   = r[2]
-        revision  = m.value(release, doap.revision, None)
-        date      = m.value(release, doap.created, None)
-        blamee    = m.value(release, dcs.blame, None)
-        changeset = m.value(release, dcs.changeset, None)
-        dist      = m.value(release, doap['file-release'], None)
-
-        if not dist:
-            Logs.warn('No file release for %s %s' % (proj, revision))
-            dist = dev_dist
-
-        if revision and date and blamee and changeset:
-            entry             = {}
-            entry['name']     = str(name)
-            entry['revision'] = str(revision)
-            entry['date']     = strptime(str(date), '%Y-%m-%d')
-            entry['status']   = 'stable' if dist != dev_dist else 'unstable'
-            entry['dist']     = str(dist)
-            entry['items']    = []
-
-            for i in m.triples([changeset, dcs.item, None]):
-                item = str(m.value(i[2], rdfs.label, None))
-                entry['items'] += [item]
-                if dist and top_entries is not None:
-                    if not str(dist) in top_entries:
-                        top_entries[str(dist)] = {'items': []}
-                    top_entries[str(dist)]['items'] += [
-                        '%s: %s' % (name, item)]
-
-            if extra_entries and dist:
-                for i in extra_entries[str(dist)]:
-                    entry['items'] += extra_entries[str(dist)]['items']
-
-            entry['blamee_name'] = str(m.value(blamee, foaf.name, None))
-            entry['blamee_mbox'] = str(m.value(blamee, foaf.mbox, None))
-
-            entries[(str(date), str(revision))] = entry
-        else:
-            Logs.warn('Ignored incomplete %s release description' % name)
-
-    return entries
-
-def write_news(entries, out_file):
-    import textwrap
-    from time import strftime
-
-    if len(entries) == 0:
-        return
-
-    news = open(out_file, 'w')
-    for e in sorted(entries.keys(), reverse=True):
-        entry = entries[e]
-        news.write('%s (%s) %s;\n' % (entry['name'], entry['revision'], entry['status']))
-        for item in entry['items']:
-            wrapped = textwrap.wrap(item, width=79)
-            news.write('\n  * ' + '\n    '.join(wrapped))
-
-        news.write('\n\n --')
-        news.write(' %s <%s>' % (entry['blamee_name'],
-                                 entry['blamee_mbox'].replace('mailto:', '')))
-
-        news.write('  %s\n\n' % (
-            strftime('%a, %d %b %Y %H:%M:%S +0000', entry['date'])))
-
-    news.close()
-
-def write_posts(entries, meta, out_dir, status='stable'):
-    "write news posts in Pelican Markdown format"
-    from time import strftime
-    try:
-        os.mkdir(out_dir)
-    except Exception:
-        pass
-
-    for i in entries:
-        entry    = entries[i]
-        revision = i[1]
-        if entry['status'] != status:
-            continue
-
-        date_str     = strftime('%Y-%m-%d', entry['date'])
-        datetime_str = strftime('%Y-%m-%d %H:%M', entry['date'])
-
-        path  = os.path.join(out_dir, '%s-%s-%s.md' % (
-            date_str, entry['name'], revision.replace('.', '-')))
-        post  = open(path, 'w')
-        title = entry['title'] if 'title' in entry else entry['name']
-        post.write('Title: %s %s\n' % (title, revision))
-        post.write('Date: %s\n' % datetime_str)
-        post.write('Slug: %s-%s\n' % (entry['name'], revision.replace('.', '-')))
-        for k in meta:
-            post.write('%s: %s\n' % (k, meta[k]))
-        post.write('\n')
-
-        url = entry['dist']
-        if entry['status'] == status:
-            post.write('[%s %s](%s) has been released.' % (
-                (entry['name'], revision, url)))
-
-        if 'description' in entry:
-            post.write('  ' + entry['description'])
-
-        post.write('\n')
-        if (len(entry['items']) > 0 and
-            not (len(entry['items']) == 1 and
-                 entry['items'][0] == 'Initial release')):
-            post.write('\nChanges:\n\n')
-            for i in entry['items']:
-                post.write(' * %s\n' % i)
-
-        post.close()
-
-def get_blurb(in_file):
-    "Get the first paragram of a Markdown formatted file, skipping the title"
-    f = open(in_file, 'r')
-    f.readline()  # Title
-    f.readline()  # Title underline
-    f.readline()  # Blank
-    out = ''
-    line = f.readline()
-    while len(line) > 0 and line != '\n':
-        out += line.replace('\n', ' ')
-        line = f.readline()
-    return out.strip()
-
-def get_news(in_file, entry_props={}):
-    """Get NEWS entries in the format expected by write_posts().
-
-    Properties that should be set on every entry can be passed in
-    `entry_props`.  If `entry_props` has a 'dist_pattern' value, it is used to
-    set the 'dist' entry of entries by substituting the version number.
-    """
-
-    import re
-    import rfc822
-
-    f       = open(in_file, 'r')
-    entries = {}
-    while True:
-        # Read header line
-        head    = f.readline()
-        matches = re.compile(r'([^ ]*) \((.*)\) ([a-zA-z]*);').match(head)
-        if matches is None:
-            break
-
-        entry             = {}
-        entry['name']     = matches.group(1)
-        entry['revision'] = matches.group(2)
-        entry['status']   = matches.group(3)
-        entry['items']    = []
-        if 'dist_pattern' in entry_props:
-            entry['dist'] = entry_props['dist_pattern'] % entry['revision']
-
-        # Read blank line after header
-        if f.readline() != '\n':
-            raise SyntaxError('expected blank line after NEWS header')
-
-        def add_item(item):
-            if len(item) > 0:
-                entry['items'] += [item.replace('\n', ' ').strip()]
-
-        # Read entries for this revision
-        item = ''
-        line = ''
-        while line != '\n':
-            line = f.readline()
-            if line.startswith('  * '):
-                add_item(item)
-                item = line[3:].lstrip()
-            else:
-                item += line.lstrip()
-        add_item(item)
-
-        # Read footer line
-        foot    = f.readline()
-        matches = re.compile(' -- (.*) <(.*)>  (.*)').match(foot)
-        entry['date']        = rfc822.parsedate(matches.group(3))
-        entry['blamee_name'] = matches.group(1)
-        entry['blamee_mbox'] = matches.group(2)
-        entry.update(entry_props)
-        entries[(entry['date'], entry['revision'])] = entry
-
-        # Skip trailing blank line before next entry
-        f.readline()
-
-    f.close()
-
-    return entries
-
-def news_to_posts(news_file, entry_props, post_meta, default_post_dir):
-    post_dir = os.getenv('POST_DIR')
-    if not post_dir:
-        post_dir = default_post_dir
-        sys.stderr.write('POST_DIR not set in environment, writing to %s\n' % post_dir)
-    else:
-        sys.stderr.write('writing posts to %s\n' % post_dir)
-
-    entries = get_news(news_file, entry_props)
-    write_posts(entries, post_meta, post_dir)
-
-def run_script(cmds):
-    for cmd in cmds:
-        subprocess.check_call(cmd, shell=True)
-
-def release(name, version, dist_name=None):
-    if dist_name is None:
-        dist_name = name.lower()
-
-    dist = '%s-%s.tar.bz2' % (dist_name or name.lower(), version)
-    try:
-        os.remove(dist)
-        os.remove(dist + '.sig')
-    except Exception:
-        pass
-
-    status = subprocess.check_output('git status --porcelain', shell=True)
-    if status:
-        Logs.error('error: git working copy is dirty\n' + status)
-        raise Exception('git working copy is dirty')
-
-    head = subprocess.check_output('git show -s --oneline', shell=True)
-    head_summary = head[8:].strip().lower()
-    expected_summary = '%s %s' % (name.lower(), version)
-    if head_summary != expected_summary:
-        raise Exception('latest commit "%s" does not match "%s"' % (
-            head_summary, expected_summary))
-
-    run_script(['./waf configure --docs',
-                './waf',
-                './waf distcheck',
-                './waf posts',
-                'gpg -b %s' % dist,
-                'git tag -s v%s -m "%s %s"' % (version, name, version)])
diff --git a/waflib/extras/batched_cc.py b/waflib/extras/batched_cc.py
deleted file mode 100644
index aad2872..0000000
--- a/waflib/extras/batched_cc.py
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2015 (ita)
-
-"""
-Instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
-cc -c ../file1.c ../file2.c ../file3.c
-
-Files are output on the directory where the compiler is called, and dependencies are more difficult
-to track (do not run the command on all source files if only one file changes)
-As such, we do as if the files were compiled one by one, but no command is actually run:
-replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
-signatures from each slave and finds out the command-line to run.
-
-Just import this module to start using it:
-def build(bld):
-	bld.load('batched_cc')
-
-Note that this is provided as an example, unity builds are recommended
-for best performance results (fewer tasks and fewer jobs to execute).
-See waflib/extras/unity.py.
-"""
-
-from waflib import Task, Utils
-from waflib.TaskGen import extension, feature, after_method
-from waflib.Tools import c, cxx
-
-MAX_BATCH = 50
-
-c_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
-c_fun, _ = Task.compile_fun_noshell(c_str)
-
-cxx_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
-cxx_fun, _ = Task.compile_fun_noshell(cxx_str)
-
-count = 70000
-class batch(Task.Task):
-	color = 'PINK'
-
-	after = ['c', 'cxx']
-	before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']
-
-	def uid(self):
-		return Utils.h_list([Task.Task.uid(self), self.generator.idx, self.generator.path.abspath(), self.generator.target])
-
-	def __str__(self):
-		return 'Batch compilation for %d slaves' % len(self.slaves)
-
-	def __init__(self, *k, **kw):
-		Task.Task.__init__(self, *k, **kw)
-		self.slaves = []
-		self.inputs = []
-		self.hasrun = 0
-
-		global count
-		count += 1
-		self.idx = count
-
-	def add_slave(self, slave):
-		self.slaves.append(slave)
-		self.set_run_after(slave)
-
-	def runnable_status(self):
-		for t in self.run_after:
-			if not t.hasrun:
-				return Task.ASK_LATER
-
-		for t in self.slaves:
-			#if t.executed:
-			if t.hasrun != Task.SKIPPED:
-				return Task.RUN_ME
-
-		return Task.SKIP_ME
-
-	def get_cwd(self):
-		return self.slaves[0].outputs[0].parent
-
-	def batch_incpaths(self):
-		st = self.env.CPPPATH_ST
-		return [st % node.abspath() for node in self.generator.includes_nodes]
-
-	def run(self):
-		self.outputs = []
-
-		srclst = []
-		slaves = []
-		for t in self.slaves:
-			if t.hasrun != Task.SKIPPED:
-				slaves.append(t)
-				srclst.append(t.inputs[0].abspath())
-
-		self.env.SRCLST = srclst
-
-		if self.slaves[0].__class__.__name__ == 'c':
-			ret = c_fun(self)
-		else:
-			ret = cxx_fun(self)
-
-		if ret:
-			return ret
-
-		for t in slaves:
-			t.old_post_run()
-
-def hook(cls_type):
-	def n_hook(self, node):
-
-		ext = '.obj' if self.env.CC_NAME == 'msvc' else '.o'
-		name = node.name
-		k = name.rfind('.')
-		if k >= 0:
-			basename = name[:k] + ext
-		else:
-			basename = name + ext
-
-		outdir = node.parent.get_bld().make_node('%d' % self.idx)
-		outdir.mkdir()
-		out = outdir.find_or_declare(basename)
-
-		task = self.create_task(cls_type, node, out)
-
-		try:
-			self.compiled_tasks.append(task)
-		except AttributeError:
-			self.compiled_tasks = [task]
-
-		if not getattr(self, 'masters', None):
-			self.masters = {}
-			self.allmasters = []
-
-		def fix_path(tsk):
-			if self.env.CC_NAME == 'msvc':
-				tsk.env.append_unique('CXX_TGT_F_BATCHED', '/Fo%s\\' % outdir.abspath())
-
-		if not node.parent in self.masters:
-			m = self.masters[node.parent] = self.master = self.create_task('batch')
-			fix_path(m)
-			self.allmasters.append(m)
-		else:
-			m = self.masters[node.parent]
-			if len(m.slaves) > MAX_BATCH:
-				m = self.masters[node.parent] = self.master = self.create_task('batch')
-				fix_path(m)
-				self.allmasters.append(m)
-		m.add_slave(task)
-		return task
-	return n_hook
-
-extension('.c')(hook('c'))
-extension('.cpp','.cc','.cxx','.C','.c++')(hook('cxx'))
-
-@feature('cprogram', 'cshlib', 'cstaticlib', 'cxxprogram', 'cxxshlib', 'cxxstlib')
-@after_method('apply_link')
-def link_after_masters(self):
-	if getattr(self, 'allmasters', None):
-		for m in self.allmasters:
-			self.link_task.set_run_after(m)
-
-# Modify the c and cxx task classes - in theory it would be best to
-# create subclasses and to re-map the c/c++ extensions
-for x in ('c', 'cxx'):
-	t = Task.classes[x]
-	def run(self):
-		pass
-
-	def post_run(self):
-		pass
-
-	setattr(t, 'oldrun', getattr(t, 'run', None))
-	setattr(t, 'run', run)
-	setattr(t, 'old_post_run', t.post_run)
-	setattr(t, 'post_run', post_run)
-
diff --git a/waflib/extras/biber.py b/waflib/extras/biber.py
deleted file mode 100644
index fd9db4e..0000000
--- a/waflib/extras/biber.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-"""
-Latex processing using "biber"
-"""
-
-import os
-from waflib import Task, Logs
-
-from waflib.Tools import tex as texmodule
-
-class tex(texmodule.tex):
-	biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False)
-	biber_fun.__doc__ = """
-	Execute the program **biber**
-	"""
-
-	def bibfile(self):
-		return None
-
-	def bibunits(self):
-		self.env.env = {}
-		self.env.env.update(os.environ)
-		self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
-		self.env.SRCFILE = self.aux_nodes[0].name[:-4]
-
-		if not self.env['PROMPT_LATEX']:
-			self.env.append_unique('BIBERFLAGS', '--quiet')
-
-		path = self.aux_nodes[0].abspath()[:-4] + '.bcf'
-		if os.path.isfile(path):
-			Logs.warn('calling biber')
-			self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun())
-		else:
-			super(tex, self).bibfile()
-			super(tex, self).bibunits()
-
-class latex(tex):
-	texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
-class pdflatex(tex):
-	texfun, vars =  Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
-class xelatex(tex):
-	texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
-
-def configure(self):
-	"""
-	Almost the same as in tex.py, but try to detect 'biber'
-	"""
-	v = self.env
-	for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
-		try:
-			self.find_program(p, var=p.upper())
-		except self.errors.ConfigurationError:
-			pass
-	v['DVIPSFLAGS'] = '-Ppdf'
-
diff --git a/waflib/extras/bjam.py b/waflib/extras/bjam.py
deleted file mode 100644
index 8e04d3a..0000000
--- a/waflib/extras/bjam.py
+++ /dev/null
@@ -1,128 +0,0 @@
-#! /usr/bin/env python
-# per rosengren 2011
-
-from os import sep, readlink
-from waflib import Logs
-from waflib.TaskGen import feature, after_method
-from waflib.Task import Task, always_run
-
-def options(opt):
-	grp = opt.add_option_group('Bjam Options')
-	grp.add_option('--bjam_src', default=None, help='You can find it in <boost root>/tools/jam/src')
-	grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in <src>/bin.<uname>/bjam')
-	grp.add_option('--bjam_config', default=None)
-	grp.add_option('--bjam_toolset', default=None)
-
-def configure(cnf):
-	if not cnf.env.BJAM_SRC:
-		cnf.env.BJAM_SRC = cnf.options.bjam_src
-	if not cnf.env.BJAM_UNAME:
-		cnf.env.BJAM_UNAME = cnf.options.bjam_uname
-	try:
-		cnf.find_program('bjam', path_list=[
-			cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME
-		])
-	except Exception:
-		cnf.env.BJAM = None
-	if not cnf.env.BJAM_CONFIG:
-		cnf.env.BJAM_CONFIG = cnf.options.bjam_config
-	if not cnf.env.BJAM_TOOLSET:
-		cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset
-
-@feature('bjam')
-@after_method('process_rule')
-def process_bjam(self):
-	if not self.bld.env.BJAM:
-		self.create_task('bjam_creator')
-	self.create_task('bjam_build')
-	self.create_task('bjam_installer')
-	if getattr(self, 'always', False):
-		always_run(bjam_creator)
-		always_run(bjam_build)
-	always_run(bjam_installer)
-
-class bjam_creator(Task):
-	ext_out = 'bjam_exe'
-	vars=['BJAM_SRC', 'BJAM_UNAME']
-	def run(self):
-		env = self.env
-		gen = self.generator
-		bjam = gen.bld.root.find_dir(env.BJAM_SRC)
-		if not bjam:
-			Logs.error('Can not find bjam source')
-			return -1
-		bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam'
-		bjam_exe = bjam.find_resource(bjam_exe_relpath)
-		if bjam_exe:
-			env.BJAM = bjam_exe.srcpath()
-			return 0
-		bjam_cmd = ['./build.sh']
-		Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd))
-		result = self.exec_command(bjam_cmd, cwd=bjam.srcpath())
-		if not result == 0:
-			Logs.error('bjam failed')
-			return -1
-		bjam_exe = bjam.find_resource(bjam_exe_relpath)
-		if bjam_exe:
-			env.BJAM = bjam_exe.srcpath()
-			return 0
-		Logs.error('bjam failed')
-		return -1
-
-class bjam_build(Task):
-	ext_in = 'bjam_exe'
-	ext_out = 'install'
-	vars = ['BJAM_TOOLSET']
-	def run(self):
-		env = self.env
-		gen = self.generator
-		path = gen.path
-		bld = gen.bld
-		if hasattr(gen, 'root'):
-			build_root = path.find_node(gen.root)
-		else:
-			build_root = path
-		jam = bld.srcnode.find_resource(env.BJAM_CONFIG)
-		if jam:
-			Logs.debug('bjam: Using jam configuration from ' + jam.srcpath())
-			jam_rel = jam.relpath_gen(build_root)
-		else:
-			Logs.warn('No build configuration in build_config/user-config.jam. Using default')
-			jam_rel = None
-		bjam_exe = bld.srcnode.find_node(env.BJAM)
-		if not bjam_exe:
-			Logs.error('env.BJAM is not set')
-			return -1
-		bjam_exe_rel = bjam_exe.relpath_gen(build_root)
-		cmd = ([bjam_exe_rel] +
-			(['--user-config=' + jam_rel] if jam_rel else []) +
-			['--stagedir=' + path.get_bld().path_from(build_root)] +
-			['--debug-configuration'] +
-			['--with-' + lib for lib in self.generator.target] +
-			(['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) +
-			['link=' + 'shared'] +
-			['variant=' + 'release']
-		)
-		Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd))
-		ret = self.exec_command(cmd, cwd=build_root.srcpath())
-		if ret != 0:
-			return ret
-		self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*'))
-		return 0
-
-class bjam_installer(Task):
-	ext_in = 'install'
-	def run(self):
-		gen = self.generator
-		path = gen.path
-		for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')):
-			files = []
-			for n in path.get_bld().ant_glob(pat):
-				try:
-					t = readlink(n.srcpath())
-					gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False)
-				except OSError:
-					files.append(n)
-			gen.bld.install_files(idir, files, postpone=False)
-		return 0
-
diff --git a/waflib/extras/blender.py b/waflib/extras/blender.py
deleted file mode 100644
index e5efc28..0000000
--- a/waflib/extras/blender.py
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Michal Proszek, 2014 (poxip)
-
-"""
-Detect the version of Blender, path
-and install the extension:
-
-	def options(opt):
-		opt.load('blender')
-	def configure(cnf):
-		cnf.load('blender')
-	def build(bld):
-		bld(name='io_mesh_raw',
-			feature='blender',
-			files=['file1.py', 'file2.py']
-		)
-If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name
-Use ./waf configure --system to set the installation directory to system path
-"""
-import os
-import re
-from getpass import getuser
-
-from waflib import Utils
-from waflib.TaskGen import feature
-from waflib.Configure import conf
-
-def options(opt):
-	opt.add_option(
-		'-s', '--system',
-		dest='directory_system',
-		default=False,
-		action='store_true',
-		help='determines installation directory (default: user)'
-	)
-
-@conf
-def find_blender(ctx):
-	'''Return version number of blender, if not exist return None'''
-	blender = ctx.find_program('blender')
-	output = ctx.cmd_and_log(blender + ['--version'])
-	m = re.search(r'Blender\s*((\d+(\.|))*)', output)
-	if not m:
-		ctx.fatal('Could not retrieve blender version')
-
-	try:
-		blender_version = m.group(1)
-	except IndexError:
-		ctx.fatal('Could not retrieve blender version')
-
-	ctx.env['BLENDER_VERSION'] = blender_version
-	return blender
-
-@conf
-def configure_paths(ctx):
-	"""Setup blender paths"""
-	# Get the username
-	user = getuser()
-	_platform = Utils.unversioned_sys_platform()
-	config_path = {'user': '', 'system': ''}
-	if _platform.startswith('linux'):
-		config_path['user'] = '/home/%s/.config/blender/' % user
-		config_path['system'] = '/usr/share/blender/'
-	elif _platform == 'darwin':
-		# MAC OS X
-		config_path['user'] = \
-			'/Users/%s/Library/Application Support/Blender/' % user
-		config_path['system'] = '/Library/Application Support/Blender/'
-	elif Utils.is_win32:
-		# Windows
-		appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
-		homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')
-
-		config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path
-		config_path['system'] = \
-			'%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
-	else:
-		ctx.fatal(
-			'Unsupported platform. '
-			'Available platforms: Linux, OSX, MS-Windows.'
-		)
-
-	blender_version = ctx.env['BLENDER_VERSION']
-
-	config_path['user'] += blender_version + '/'
-	config_path['system'] += blender_version + '/'
-
-	ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
-	if ctx.options.directory_system:
-		ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']
-
-	ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
-		ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
-	)
-	Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])
-
-def configure(ctx):
-	ctx.find_blender()
-	ctx.configure_paths()
-
-@feature('blender_list')
-def blender(self):
-	# Two ways to install a blender extension: as a module or just .py files
-	dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
-	Utils.check_dir(dest_dir)
-	self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.'))
-
diff --git a/waflib/extras/boo.py b/waflib/extras/boo.py
deleted file mode 100644
index 06623d4..0000000
--- a/waflib/extras/boo.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Yannick LM 2011
-
-"""
-Support for the boo programming language, for example::
-
-	bld(features = "boo",       # necessary feature
-		source   = "src.boo",   # list of boo files
-		gen      = "world.dll", # target
-		type     = "library",   # library/exe ("-target:xyz" flag)
-		name     = "world"      # necessary if the target is referenced by 'use'
-	)
-"""
-
-from waflib import Task
-from waflib.Configure import conf
-from waflib.TaskGen import feature, after_method, before_method, extension
-
-@extension('.boo')
-def boo_hook(self, node):
-	# Nothing here yet ...
-	# TODO filter the non-boo source files in 'apply_booc' and remove this method
-	pass
-
-@feature('boo')
-@before_method('process_source')
-def apply_booc(self):
-	"""Create a booc task """
-	src_nodes = self.to_nodes(self.source)
-	out_node = self.path.find_or_declare(self.gen)
-
-	self.boo_task = self.create_task('booc', src_nodes, [out_node])
-
-	# Set variables used by the 'booc' task
-	self.boo_task.env.OUT = '-o:%s' % out_node.abspath()
-
-	# type is "exe" by default
-	type = getattr(self, "type", "exe")
-	self.boo_task.env.BOO_TARGET_TYPE = "-target:%s" % type
-
-@feature('boo')
-@after_method('apply_boo')
-def use_boo(self):
-	""""
-	boo applications honor the **use** keyword::
-	"""
-	dep_names = self.to_list(getattr(self, 'use', []))
-	for dep_name in dep_names:
-		dep_task_gen = self.bld.get_tgen_by_name(dep_name)
-		if not dep_task_gen:
-			continue
-		dep_task_gen.post()
-		dep_task = getattr(dep_task_gen, 'boo_task', None)
-		if not dep_task:
-			# Try a cs task:
-			dep_task = getattr(dep_task_gen, 'cs_task', None)
-			if not dep_task:
-				# Try a link task:
-				dep_task = getattr(dep_task, 'link_task', None)
-				if not dep_task:
-					# Abort ...
-					continue
-		self.boo_task.set_run_after(dep_task) # order
-		self.boo_task.dep_nodes.extend(dep_task.outputs) # dependency
-		self.boo_task.env.append_value('BOO_FLAGS', '-reference:%s' % dep_task.outputs[0].abspath())
-
-class booc(Task.Task):
-	"""Compiles .boo files """
-	color   = 'YELLOW'
-	run_str = '${BOOC} ${BOO_FLAGS} ${BOO_TARGET_TYPE} ${OUT} ${SRC}'
-
-@conf
-def check_booc(self):
-	self.find_program('booc', 'BOOC')
-	self.env.BOO_FLAGS = ['-nologo']
-
-def configure(self):
-	"""Check that booc is available """
-	self.check_booc()
-
diff --git a/waflib/extras/boost.py b/waflib/extras/boost.py
deleted file mode 100644
index c2aaaa9..0000000
--- a/waflib/extras/boost.py
+++ /dev/null
@@ -1,525 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-#
-# partially based on boost.py written by Gernot Vormayr
-# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
-# modified by Bjoern Michaelsen, 2008
-# modified by Luca Fossati, 2008
-# rewritten for waf 1.5.1, Thomas Nagy, 2008
-# rewritten for waf 1.6.2, Sylvain Rouquette, 2011
-
-'''
-
-This is an extra tool, not bundled with the default waf binary.
-To add the boost tool to the waf file:
-$ ./waf-light --tools=compat15,boost
-	or, if you have waf >= 1.6.2
-$ ./waf update --files=boost
-
-When using this tool, the wscript will look like:
-
-	def options(opt):
-		opt.load('compiler_cxx boost')
-
-	def configure(conf):
-		conf.load('compiler_cxx boost')
-		conf.check_boost(lib='system filesystem')
-
-	def build(bld):
-		bld(source='main.cpp', target='app', use='BOOST')
-
-Options are generated, in order to specify the location of boost includes/libraries.
-The `check_boost` configuration function allows to specify the used boost libraries.
-It can also provide default arguments to the --boost-mt command-line arguments.
-Everything will be packaged together in a BOOST component that you can use.
-
-When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
- - you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
-   Errors: C4530
- - boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
-   So before calling `conf.check_boost` you might want to disabling by adding
-		conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
-   Errors:
- - boost might also be compiled with /MT, which links the runtime statically.
-   If you have problems with redefined symbols,
-		self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
-		self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
-Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.
-
-'''
-
-import sys
-import re
-from waflib import Utils, Logs, Errors
-from waflib.Configure import conf
-from waflib.TaskGen import feature, after_method
-
-BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
-BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
-BOOST_VERSION_FILE = 'boost/version.hpp'
-BOOST_VERSION_CODE = '''
-#include <iostream>
-#include <boost/version.hpp>
-int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; }
-'''
-
-BOOST_ERROR_CODE = '''
-#include <boost/system/error_code.hpp>
-int main() { boost::system::error_code c; }
-'''
-
-PTHREAD_CODE = '''
-#include <pthread.h>
-static void* f(void*) { return 0; }
-int main() {
-	pthread_t th;
-	pthread_attr_t attr;
-	pthread_attr_init(&attr);
-	pthread_create(&th, &attr, &f, 0);
-	pthread_join(th, 0);
-	pthread_cleanup_push(0, 0);
-	pthread_cleanup_pop(0);
-	pthread_attr_destroy(&attr);
-}
-'''
-
-BOOST_THREAD_CODE = '''
-#include <boost/thread.hpp>
-int main() { boost::thread t; }
-'''
-
-BOOST_LOG_CODE = '''
-#include <boost/log/trivial.hpp>
-#include <boost/log/utility/setup/console.hpp>
-#include <boost/log/utility/setup/common_attributes.hpp>
-int main() {
-	using namespace boost::log;
-	add_common_attributes();
-	add_console_log(std::clog, keywords::format = "%Message%");
-	BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl;
-}
-'''
-
-# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
-PLATFORM = Utils.unversioned_sys_platform()
-detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
-detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
-detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
-BOOST_TOOLSETS = {
-	'borland':  'bcb',
-	'clang':	detect_clang,
-	'como':	 'como',
-	'cw':	   'cw',
-	'darwin':   'xgcc',
-	'edg':	  'edg',
-	'g++':	  detect_mingw,
-	'gcc':	  detect_mingw,
-	'icpc':	 detect_intel,
-	'intel':	detect_intel,
-	'kcc':	  'kcc',
-	'kylix':	'bck',
-	'mipspro':  'mp',
-	'mingw':	'mgw',
-	'msvc':	 'vc',
-	'qcc':	  'qcc',
-	'sun':	  'sw',
-	'sunc++':   'sw',
-	'tru64cxx': 'tru',
-	'vacpp':	'xlc'
-}
-
-
-def options(opt):
-	opt = opt.add_option_group('Boost Options')
-	opt.add_option('--boost-includes', type='string',
-				   default='', dest='boost_includes',
-				   help='''path to the directory where the boost includes are,
-				   e.g., /path/to/boost_1_55_0/stage/include''')
-	opt.add_option('--boost-libs', type='string',
-				   default='', dest='boost_libs',
-				   help='''path to the directory where the boost libs are,
-				   e.g., path/to/boost_1_55_0/stage/lib''')
-	opt.add_option('--boost-mt', action='store_true',
-				   default=False, dest='boost_mt',
-				   help='select multi-threaded libraries')
-	opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
-				   help='''select libraries with tags (gd for debug, static is automatically added),
-				   see doc Boost, Getting Started, chapter 6.1''')
-	opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
-				   help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
-	opt.add_option('--boost-toolset', type='string',
-				   default='', dest='boost_toolset',
-				   help='force a toolset e.g. msvc, vc90, \
-						gcc, mingw, mgw45 (default: auto)')
-	py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
-	opt.add_option('--boost-python', type='string',
-				   default=py_version, dest='boost_python',
-				   help='select the lib python with this version \
-						(default: %s)' % py_version)
-
-
-@conf
-def __boost_get_version_file(self, d):
-	if not d:
-		return None
-	dnode = self.root.find_dir(d)
-	if dnode:
-		return dnode.find_node(BOOST_VERSION_FILE)
-	return None
-
-@conf
-def boost_get_version(self, d):
-	"""silently retrieve the boost version number"""
-	node = self.__boost_get_version_file(d)
-	if node:
-		try:
-			txt = node.read()
-		except EnvironmentError:
-			Logs.error("Could not read the file %r", node.abspath())
-		else:
-			re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M)
-			m1 = re_but1.search(txt)
-			re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M)
-			m2 = re_but2.search(txt)
-			if m1 and m2:
-				return (m1.group(1), m2.group(1))
-	return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":")
-
-@conf
-def boost_get_includes(self, *k, **kw):
-	includes = k and k[0] or kw.get('includes')
-	if includes and self.__boost_get_version_file(includes):
-		return includes
-	for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES:
-		if self.__boost_get_version_file(d):
-			return d
-	if includes:
-		self.end_msg('headers not found in %s' % includes)
-		self.fatal('The configuration failed')
-	else:
-		self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
-		self.fatal('The configuration failed')
-
-
-@conf
-def boost_get_toolset(self, cc):
-	toolset = cc
-	if not cc:
-		build_platform = Utils.unversioned_sys_platform()
-		if build_platform in BOOST_TOOLSETS:
-			cc = build_platform
-		else:
-			cc = self.env.CXX_NAME
-	if cc in BOOST_TOOLSETS:
-		toolset = BOOST_TOOLSETS[cc]
-	return isinstance(toolset, str) and toolset or toolset(self.env)
-
-
-@conf
-def __boost_get_libs_path(self, *k, **kw):
-	''' return the lib path and all the files in it '''
-	if 'files' in kw:
-		return self.root.find_dir('.'), Utils.to_list(kw['files'])
-	libs = k and k[0] or kw.get('libs')
-	if libs:
-		path = self.root.find_dir(libs)
-		files = path.ant_glob('*boost_*')
-	if not libs or not files:
-		for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS:
-			if not d:
-				continue
-			path = self.root.find_dir(d)
-			if path:
-				files = path.ant_glob('*boost_*')
-				if files:
-					break
-			path = self.root.find_dir(d + '64')
-			if path:
-				files = path.ant_glob('*boost_*')
-				if files:
-					break
-	if not path:
-		if libs:
-			self.end_msg('libs not found in %s' % libs)
-			self.fatal('The configuration failed')
-		else:
-			self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
-			self.fatal('The configuration failed')
-
-	self.to_log('Found the boost path in %r with the libraries:' % path)
-	for x in files:
-		self.to_log('    %r' % x)
-	return path, files
-
-@conf
-def boost_get_libs(self, *k, **kw):
-	'''
-	return the lib path and the required libs
-	according to the parameters
-	'''
-	path, files = self.__boost_get_libs_path(**kw)
-	files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True)
-	toolset = self.boost_get_toolset(kw.get('toolset', ''))
-	toolset_pat = '(-%s[0-9]{0,3})' % toolset
-	version = '-%s' % self.env.BOOST_VERSION
-
-	def find_lib(re_lib, files):
-		for file in files:
-			if re_lib.search(file.name):
-				self.to_log('Found boost lib %s' % file)
-				return file
-		return None
-
-	def format_lib_name(name):
-		if name.startswith('lib') and self.env.CC_NAME != 'msvc':
-			name = name[3:]
-		return name[:name.rfind('.')]
-
-	def match_libs(lib_names, is_static):
-		libs = []
-		lib_names = Utils.to_list(lib_names)
-		if not lib_names:
-			return libs
-		t = []
-		if kw.get('mt', False):
-			t.append('-mt')
-		if kw.get('abi'):
-			t.append('%s%s' % (is_static and '-s' or '-', kw['abi']))
-		elif is_static:
-			t.append('-s')
-		tags_pat = t and ''.join(t) or ''
-		ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN
-		ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN
-
-		for lib in lib_names:
-			if lib == 'python':
-				# for instance, with python='27',
-				# accepts '-py27', '-py2', '27', '-2.7' and '2'
-				# but will reject '-py3', '-py26', '26' and '3'
-				tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|(-{1}.{3})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'], kw['python'][1])
-			else:
-				tags = tags_pat
-			# Trying libraries, from most strict match to least one
-			for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext),
-							'boost_%s%s%s%s$' % (lib, tags, version, ext),
-							# Give up trying to find the right version
-							'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext),
-							'boost_%s%s%s$' % (lib, tags, ext),
-							'boost_%s%s$' % (lib, ext),
-							'boost_%s' % lib]:
-				self.to_log('Trying pattern %s' % pattern)
-				file = find_lib(re.compile(pattern), files)
-				if file:
-					libs.append(format_lib_name(file.name))
-					break
-			else:
-				self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
-				self.fatal('The configuration failed')
-		return libs
-
-	return  path.abspath(), match_libs(kw.get('lib'), False), match_libs(kw.get('stlib'), True)
-
-@conf
-def _check_pthread_flag(self, *k, **kw):
-	'''
-	Computes which flags should be added to CXXFLAGS and LINKFLAGS to compile in multi-threading mode
-
-	Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3,
-	boost/thread.hpp will trigger a #error if -pthread isn't used:
-	  boost/config/requires_threads.hpp:47:5: #error "Compiler threading support
-	  is not turned on. Please set the correct command line options for
-	  threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)"
-
-	Based on _BOOST_PTHREAD_FLAG(): https://github.com/tsuna/boost.m4/blob/master/build-aux/boost.m4
-    '''
-
-	var = kw.get('uselib_store', 'BOOST')
-
-	self.start_msg('Checking the flags needed to use pthreads')
-
-	# The ordering *is* (sometimes) important.  Some notes on the
-	# individual items follow:
-	# (none): in case threads are in libc; should be tried before -Kthread and
-	#       other compiler flags to prevent continual compiler warnings
-	# -lpthreads: AIX (must check this before -lpthread)
-	# -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
-	# -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
-	# -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
-	# -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads)
-	# -pthreads: Solaris/GCC
-	# -mthreads: MinGW32/GCC, Lynx/GCC
-	# -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
-	#      doesn't hurt to check since this sometimes defines pthreads too;
-	#      also defines -D_REENTRANT)
-	#      ... -mt is also the pthreads flag for HP/aCC
-	# -lpthread: GNU Linux, etc.
-	# --thread-safe: KAI C++
-	if Utils.unversioned_sys_platform() == "sunos":
-		# On Solaris (at least, for some versions), libc contains stubbed
-		# (non-functional) versions of the pthreads routines, so link-based
-		# tests will erroneously succeed.  (We need to link with -pthreads/-mt/
-		# -lpthread.)  (The stubs are missing pthread_cleanup_push, or rather
-		# a function called by this macro, so we could check for that, but
-		# who knows whether they'll stub that too in a future libc.)  So,
-		# we'll just look for -pthreads and -lpthread first:
-		boost_pthread_flags = ["-pthreads", "-lpthread", "-mt", "-pthread"]
-	else:
-		boost_pthread_flags = ["", "-lpthreads", "-Kthread", "-kthread", "-llthread", "-pthread",
-							   "-pthreads", "-mthreads", "-lpthread", "--thread-safe", "-mt"]
-
-	for boost_pthread_flag in boost_pthread_flags:
-		try:
-			self.env.stash()
-			self.env.append_value('CXXFLAGS_%s' % var, boost_pthread_flag)
-			self.env.append_value('LINKFLAGS_%s' % var, boost_pthread_flag)
-			self.check_cxx(code=PTHREAD_CODE, msg=None, use=var, execute=False)
-
-			self.end_msg(boost_pthread_flag)
-			return
-		except self.errors.ConfigurationError:
-			self.env.revert()
-	self.end_msg('None')
-
-@conf
-def check_boost(self, *k, **kw):
-	"""
-	Initialize boost libraries to be used.
-
-	Keywords: you can pass the same parameters as with the command line (without "--boost-").
-	Note that the command line has the priority, and should preferably be used.
-	"""
-	if not self.env['CXX']:
-		self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
-
-	params = {
-		'lib': k and k[0] or kw.get('lib'),
-		'stlib': kw.get('stlib')
-	}
-	for key, value in self.options.__dict__.items():
-		if not key.startswith('boost_'):
-			continue
-		key = key[len('boost_'):]
-		params[key] = value and value or kw.get(key, '')
-
-	var = kw.get('uselib_store', 'BOOST')
-
-	self.find_program('dpkg-architecture', var='DPKG_ARCHITECTURE', mandatory=False)
-	if self.env.DPKG_ARCHITECTURE:
-		deb_host_multiarch = self.cmd_and_log([self.env.DPKG_ARCHITECTURE[0], '-qDEB_HOST_MULTIARCH'])
-		BOOST_LIBS.insert(0, '/usr/lib/%s' % deb_host_multiarch.strip())
-
-	self.start_msg('Checking boost includes')
-	self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
-	versions = self.boost_get_version(inc)
-	self.env.BOOST_VERSION = versions[0]
-	self.env.BOOST_VERSION_NUMBER = int(versions[1])
-	self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000,
-							   int(versions[1]) / 100 % 1000,
-							   int(versions[1]) % 100))
-	if Logs.verbose:
-		Logs.pprint('CYAN', '	path : %s' % self.env['INCLUDES_%s' % var])
-
-	if not params['lib'] and not params['stlib']:
-		return
-	if 'static' in kw or 'static' in params:
-		Logs.warn('boost: static parameter is deprecated, use stlib instead.')
-	self.start_msg('Checking boost libs')
-	path, libs, stlibs = self.boost_get_libs(**params)
-	self.env['LIBPATH_%s' % var] = [path]
-	self.env['STLIBPATH_%s' % var] = [path]
-	self.env['LIB_%s' % var] = libs
-	self.env['STLIB_%s' % var] = stlibs
-	self.end_msg('ok')
-	if Logs.verbose:
-		Logs.pprint('CYAN', '	path : %s' % path)
-		Logs.pprint('CYAN', '	shared libs : %s' % libs)
-		Logs.pprint('CYAN', '	static libs : %s' % stlibs)
-
-	def has_shlib(lib):
-		return params['lib'] and lib in params['lib']
-	def has_stlib(lib):
-		return params['stlib'] and lib in params['stlib']
-	def has_lib(lib):
-		return has_shlib(lib) or has_stlib(lib)
-	if has_lib('thread'):
-		# not inside try_link to make check visible in the output
-		self._check_pthread_flag(k, kw)
-
-	def try_link():
-		if has_lib('system'):
-			self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False)
-		if has_lib('thread'):
-			self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False)
-		if has_lib('log'):
-			if not has_lib('thread'):
-				self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
-			if has_shlib('log'):
-				self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK']
-			self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)
-
-	if params.get('linkage_autodetect', False):
-		self.start_msg("Attempting to detect boost linkage flags")
-		toolset = self.boost_get_toolset(kw.get('toolset', ''))
-		if toolset in ('vc',):
-			# disable auto-linking feature, causing error LNK1181
-			# because the code wants to be linked against
-			self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
-
-			# if no dlls are present, we guess the .lib files are not stubs
-			has_dlls = False
-			for x in Utils.listdir(path):
-				if x.endswith(self.env.cxxshlib_PATTERN % ''):
-					has_dlls = True
-					break
-			if not has_dlls:
-				self.env['STLIBPATH_%s' % var] = [path]
-				self.env['STLIB_%s' % var] = libs
-				del self.env['LIB_%s' % var]
-				del self.env['LIBPATH_%s' % var]
-
-			# we attempt to play with some known-to-work CXXFLAGS combinations
-			for cxxflags in (['/MD', '/EHsc'], []):
-				self.env.stash()
-				self.env["CXXFLAGS_%s" % var] += cxxflags
-				try:
-					try_link()
-				except Errors.ConfigurationError as e:
-					self.env.revert()
-					exc = e
-				else:
-					self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
-					exc = None
-					self.env.commit()
-					break
-
-			if exc is not None:
-				self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc)
-				self.fatal('The configuration failed')
-		else:
-			self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
-			self.fatal('The configuration failed')
-	else:
-		self.start_msg('Checking for boost linkage')
-		try:
-			try_link()
-		except Errors.ConfigurationError as e:
-			self.end_msg("Could not link against boost libraries using supplied options")
-			self.fatal('The configuration failed')
-		self.end_msg('ok')
-
-
-@feature('cxx')
-@after_method('apply_link')
-def install_boost(self):
-	if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'):
-		return
-	install_boost.done = True
-	inst_to = getattr(self, 'install_path', '${BINDIR}')
-	for lib in self.env.LIB_BOOST:
-		try:
-			file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST)
-			self.add_install_files(install_to=inst_to, install_from=self.bld.root.find_node(file))
-		except:
-			continue
-install_boost.done = False
-
diff --git a/waflib/extras/build_file_tracker.py b/waflib/extras/build_file_tracker.py
deleted file mode 100644
index c4f26fd..0000000
--- a/waflib/extras/build_file_tracker.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015
-
-"""
-Force files to depend on the timestamps of those located in the build directory. You may
-want to use this to force partial rebuilds, see playground/track_output_files/ for a working example.
-
-Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
-or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
-or to hash the file in the build directory with its timestamp
-"""
-
-import os
-from waflib import Node, Utils
-
-def get_bld_sig(self):
-	if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
-		return Utils.h_file(self.abspath())
-
-	try:
-		# add the creation time to the signature
-		return self.sig + str(os.stat(self.abspath()).st_mtime)
-	except AttributeError:
-		return None
-
-Node.Node.get_bld_sig = get_bld_sig
-
diff --git a/waflib/extras/build_logs.py b/waflib/extras/build_logs.py
deleted file mode 100644
index cdf8ed0..0000000
--- a/waflib/extras/build_logs.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2013 (ita)
-
-"""
-A system for recording all outputs to a log file. Just add the following to your wscript file::
-
-  def init(ctx):
-    ctx.load('build_logs')
-"""
-
-import atexit, sys, time, os, shutil, threading
-from waflib import ansiterm, Logs, Context
-
-# adding the logs under the build/ directory will clash with the clean/ command
-try:
-	up = os.path.dirname(Context.g_module.__file__)
-except AttributeError:
-	up = '.'
-LOGFILE = os.path.join(up, 'logs', time.strftime('%Y_%m_%d_%H_%M.log'))
-
-wlock = threading.Lock()
-class log_to_file(object):
-	def __init__(self, stream, fileobj, filename):
-		self.stream = stream
-		self.encoding = self.stream.encoding
-		self.fileobj = fileobj
-		self.filename = filename
-		self.is_valid = True
-	def replace_colors(self, data):
-		for x in Logs.colors_lst.values():
-			if isinstance(x, str):
-				data = data.replace(x, '')
-		return data
-	def write(self, data):
-		try:
-			wlock.acquire()
-			self.stream.write(data)
-			self.stream.flush()
-			if self.is_valid:
-				self.fileobj.write(self.replace_colors(data))
-		finally:
-			wlock.release()
-	def fileno(self):
-		return self.stream.fileno()
-	def flush(self):
-		self.stream.flush()
-		if self.is_valid:
-			self.fileobj.flush()
-	def isatty(self):
-		return self.stream.isatty()
-
-def init(ctx):
-	global LOGFILE
-	filename = os.path.abspath(LOGFILE)
-	try:
-		os.makedirs(os.path.dirname(os.path.abspath(filename)))
-	except OSError:
-		pass
-
-	if hasattr(os, 'O_NOINHERIT'):
-		fd = os.open(LOGFILE, os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT)
-		fileobj = os.fdopen(fd, 'w')
-	else:
-		fileobj = open(LOGFILE, 'w')
-	old_stderr = sys.stderr
-
-	# sys.stdout has already been replaced, so __stdout__ will be faster
-	#sys.stdout = log_to_file(sys.stdout, fileobj, filename)
-	#sys.stderr = log_to_file(sys.stderr, fileobj, filename)
-	def wrap(stream):
-		if stream.isatty():
-			return ansiterm.AnsiTerm(stream)
-		return stream
-	sys.stdout = log_to_file(wrap(sys.__stdout__), fileobj, filename)
-	sys.stderr = log_to_file(wrap(sys.__stderr__), fileobj, filename)
-
-	# now mess with the logging module...
-	for x in Logs.log.handlers:
-		try:
-			stream = x.stream
-		except AttributeError:
-			pass
-		else:
-			if id(stream) == id(old_stderr):
-				x.stream = sys.stderr
-
-def exit_cleanup():
-	try:
-		fileobj = sys.stdout.fileobj
-	except AttributeError:
-		pass
-	else:
-		sys.stdout.is_valid = False
-		sys.stderr.is_valid = False
-		fileobj.close()
-		filename = sys.stdout.filename
-
-		Logs.info('Output logged to %r', filename)
-
-		# then copy the log file to "latest.log" if possible
-		up = os.path.dirname(os.path.abspath(filename))
-		try:
-			shutil.copy(filename, os.path.join(up, 'latest.log'))
-		except OSError:
-			# this may fail on windows due to processes spawned
-			pass
-
-atexit.register(exit_cleanup)
-
diff --git a/waflib/extras/buildcopy.py b/waflib/extras/buildcopy.py
deleted file mode 100644
index eaff7e6..0000000
--- a/waflib/extras/buildcopy.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Calle Rosenquist, 2017 (xbreak)
-"""
-Create task that copies source files to the associated build node.
-This is useful to e.g. construct a complete Python package so it can be unit tested
-without installation.
-
-Source files to be copied can be specified either in `buildcopy_source` attribute, or
-`source` attribute. If both are specified `buildcopy_source` has priority.
-
-Examples::
-
-	def build(bld):
-		bld(name             = 'bar',
-			features         = 'py buildcopy',
-			source           = bld.path.ant_glob('src/bar/*.py'))
-
-		bld(name             = 'py baz',
-			features         = 'buildcopy',
-			buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt'])
-
-"""
-import os, shutil
-from waflib import Errors, Task, TaskGen, Utils, Node, Logs
-
-@TaskGen.before_method('process_source')
-@TaskGen.feature('buildcopy')
-def make_buildcopy(self):
-	"""
-	Creates the buildcopy task.
-	"""
-	def to_src_nodes(lst):
-		"""Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives
-		preference to nodes in build.
-		"""
-		if isinstance(lst, Node.Node):
-			if not lst.is_src():
-				raise Errors.WafError('buildcopy: node %s is not in src'%lst)
-			if not os.path.isfile(lst.abspath()):
-				raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst)
-			return lst
-
-		if isinstance(lst, str):
-			lst = [x for x in Utils.split_path(lst) if x and x != '.']
-
-		node = self.bld.path.get_src().search_node(lst)
-		if node:
-			if not os.path.isfile(node.abspath()):
-				raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
-			return node
-
-		node = self.bld.path.get_src().find_node(lst)
-		if node:
-			if not os.path.isfile(node.abspath()):
-				raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
-			return node
-		raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst))
-
-	nodes = [ to_src_nodes(n) for n in getattr(self, 'buildcopy_source', getattr(self, 'source', [])) ]
-	if not nodes:
-		Logs.warn('buildcopy: No source files provided to buildcopy in %s (set `buildcopy_source` or `source`)',
-			self)
-		return
-	node_pairs = [(n, n.get_bld()) for n in nodes]
-	self.create_task('buildcopy', [n[0] for n in node_pairs], [n[1] for n in node_pairs], node_pairs=node_pairs)
-
-class buildcopy(Task.Task):
-	"""
-	Copy for each pair `n` in `node_pairs`: n[0] -> n[1].
-
-	Attribute `node_pairs` should contain a list of tuples describing source and target:
-
-		node_pairs = [(in, out), ...]
-
-	"""
-	color = 'PINK'
-
-	def keyword(self):
-		return 'Copying'
-
-	def run(self):
-		for f,t in self.node_pairs:
-			t.parent.mkdir()
-			shutil.copy2(f.abspath(), t.abspath())
diff --git a/waflib/extras/c_bgxlc.py b/waflib/extras/c_bgxlc.py
deleted file mode 100644
index 6e3eaf7..0000000
--- a/waflib/extras/c_bgxlc.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-"""
-IBM XL Compiler for Blue Gene
-"""
-
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
-
-from waflib.Tools import xlc # method xlc_common_flags
-from waflib.Tools.compiler_c import c_compiler
-c_compiler['linux'].append('c_bgxlc')
-
-@conf
-def find_bgxlc(conf):
-	cc = conf.find_program(['bgxlc_r','bgxlc'], var='CC')
-	conf.get_xlc_version(cc)
-	conf.env.CC = cc
-	conf.env.CC_NAME = 'bgxlc'
-
-def configure(conf):
-	conf.find_bgxlc()
-	conf.find_ar()
-	conf.xlc_common_flags()
-	conf.env.LINKFLAGS_cshlib = ['-G','-Wl,-bexpfull']
-	conf.env.LINKFLAGS_cprogram = []
-	conf.cc_load_tools()
-	conf.cc_add_flags()
-	conf.link_add_flags()
-
diff --git a/waflib/extras/c_dumbpreproc.py b/waflib/extras/c_dumbpreproc.py
deleted file mode 100644
index ce9e1a4..0000000
--- a/waflib/extras/c_dumbpreproc.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
-
-"""
-Dumb C/C++ preprocessor for finding dependencies
-
-It will look at all include files it can find after removing the comments, so the following
-will always add the dependency on both "a.h" and "b.h"::
-
-	#include "a.h"
-	#ifdef B
-		#include "b.h"
-	#endif
-	int main() {
-		return 0;
-	}
-
-To use::
-
-	def configure(conf):
-		conf.load('compiler_c')
-		conf.load('c_dumbpreproc')
-"""
-
-import re
-from waflib.Tools import c_preproc
-
-re_inc = re.compile(
-	'^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$',
-	re.IGNORECASE | re.MULTILINE)
-
-def lines_includes(node):
-	code = node.read()
-	if c_preproc.use_trigraphs:
-		for (a, b) in c_preproc.trig_def:
-			code = code.split(a).join(b)
-	code = c_preproc.re_nl.sub('', code)
-	code = c_preproc.re_cpp.sub(c_preproc.repl, code)
-	return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
-
-parser = c_preproc.c_parser
-class dumb_parser(parser):
-	def addlines(self, node):
-		if node in self.nodes[:-1]:
-			return
-		self.currentnode_stack.append(node.parent)
-
-		# Avoid reading the same files again
-		try:
-			lines = self.parse_cache[node]
-		except KeyError:
-			lines = self.parse_cache[node] = lines_includes(node)
-
-		self.lines = lines + [(c_preproc.POPFILE, '')] +  self.lines
-
-	def start(self, node, env):
-		try:
-			self.parse_cache = node.ctx.parse_cache
-		except AttributeError:
-			self.parse_cache = node.ctx.parse_cache = {}
-
-		self.addlines(node)
-		while self.lines:
-			(x, y) = self.lines.pop(0)
-			if x == c_preproc.POPFILE:
-				self.currentnode_stack.pop()
-				continue
-			self.tryfind(y)
-
-c_preproc.c_parser = dumb_parser
-
diff --git a/waflib/extras/c_emscripten.py b/waflib/extras/c_emscripten.py
deleted file mode 100644
index e1ac494..0000000
--- a/waflib/extras/c_emscripten.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 vi:ts=4:noexpandtab
-
-import subprocess, shlex, sys
-
-from waflib.Tools import ccroot, gcc, gxx
-from waflib.Configure import conf
-from waflib.TaskGen import after_method, feature
-
-from waflib.Tools.compiler_c import c_compiler
-from waflib.Tools.compiler_cxx import cxx_compiler
-
-for supported_os in ('linux', 'darwin', 'gnu', 'aix'):
-	c_compiler[supported_os].append('c_emscripten')
-	cxx_compiler[supported_os].append('c_emscripten')
-
-
-@conf
-def get_emscripten_version(conf, cc):
-	"""
-	Emscripten doesn't support processing '-' like clang/gcc
-	"""
-
-	dummy = conf.cachedir.parent.make_node("waf-emscripten.c")
-	dummy.write("")
-	cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()]
-	env = conf.env.env or None
-	try:
-		p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
-		out = p.communicate()[0]
-	except Exception as e:
-		conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e))
-
-	if not isinstance(out, str):
-		out = out.decode(sys.stdout.encoding or 'latin-1')
-
-	k = {}
-	out = out.splitlines()
-	for line in out:
-		lst = shlex.split(line)
-		if len(lst)>2:
-			key = lst[1]
-			val = lst[2]
-			k[key] = val
-
-	if not ('__clang__' in k and 'EMSCRIPTEN' in k):
-		conf.fatal('Could not determine the emscripten compiler version.')
-
-	conf.env.DEST_OS = 'generic'
-	conf.env.DEST_BINFMT = 'elf'
-	conf.env.DEST_CPU = 'asm-js'
-	conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
-	return k
-
-@conf
-def find_emscripten(conf):
-	cc = conf.find_program(['emcc'], var='CC')
-	conf.get_emscripten_version(cc)
-	conf.env.CC = cc
-	conf.env.CC_NAME = 'emscripten'
-	cxx = conf.find_program(['em++'], var='CXX')
-	conf.env.CXX = cxx
-	conf.env.CXX_NAME = 'emscripten'
-	conf.find_program(['emar'], var='AR')
-
-def configure(conf):
-	conf.find_emscripten()
-	conf.find_ar()
-	conf.gcc_common_flags()
-	conf.gxx_common_flags()
-	conf.cc_load_tools()
-	conf.cc_add_flags()
-	conf.cxx_load_tools()
-	conf.cxx_add_flags()
-	conf.link_add_flags()
-	conf.env.ARFLAGS = ['rcs']
-	conf.env.cshlib_PATTERN = '%s.js'
-	conf.env.cxxshlib_PATTERN = '%s.js'
-	conf.env.cstlib_PATTERN = '%s.a'
-	conf.env.cxxstlib_PATTERN = '%s.a'
-	conf.env.cprogram_PATTERN = '%s.html'
-	conf.env.cxxprogram_PATTERN = '%s.html'
-	conf.env.CXX_TGT_F           = ['-c', '-o', '']
-	conf.env.CC_TGT_F            = ['-c', '-o', '']
-	conf.env.CXXLNK_TGT_F        = ['-o', '']
-	conf.env.CCLNK_TGT_F         = ['-o', '']
-	conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
diff --git a/waflib/extras/c_nec.py b/waflib/extras/c_nec.py
deleted file mode 100644
index 96bfae4..0000000
--- a/waflib/extras/c_nec.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-"""
-NEC SX Compiler for SX vector systems
-"""
-
-import re
-from waflib import Utils
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
-
-from waflib.Tools import xlc # method xlc_common_flags
-from waflib.Tools.compiler_c import c_compiler
-c_compiler['linux'].append('c_nec')
-
-@conf
-def find_sxc(conf):
-	cc = conf.find_program(['sxcc'], var='CC')
-	conf.get_sxc_version(cc)
-	conf.env.CC = cc
-	conf.env.CC_NAME = 'sxcc'
-
-@conf
-def get_sxc_version(conf, fc):
-	version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
-	cmd = fc + ['-V']
-	p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
-	out, err = p.communicate()
-
-	if out:
-		match = version_re(out)
-	else:
-		match = version_re(err)
-	if not match:
-		conf.fatal('Could not determine the NEC C compiler version.')
-	k = match.groupdict()
-	conf.env['C_VERSION'] = (k['major'], k['minor'])
-
-@conf
-def sxc_common_flags(conf):
-	v=conf.env
-	v['CC_SRC_F']=[]
-	v['CC_TGT_F']=['-c','-o']
-	if not v['LINK_CC']:
-		v['LINK_CC']=v['CC']
-	v['CCLNK_SRC_F']=[]
-	v['CCLNK_TGT_F']=['-o']
-	v['CPPPATH_ST']='-I%s'
-	v['DEFINES_ST']='-D%s'
-	v['LIB_ST']='-l%s'
-	v['LIBPATH_ST']='-L%s'
-	v['STLIB_ST']='-l%s'
-	v['STLIBPATH_ST']='-L%s'
-	v['RPATH_ST']=''
-	v['SONAME_ST']=[]
-	v['SHLIB_MARKER']=[]
-	v['STLIB_MARKER']=[]
-	v['LINKFLAGS_cprogram']=['']
-	v['cprogram_PATTERN']='%s'
-	v['CFLAGS_cshlib']=['-fPIC']
-	v['LINKFLAGS_cshlib']=['']
-	v['cshlib_PATTERN']='lib%s.so'
-	v['LINKFLAGS_cstlib']=[]
-	v['cstlib_PATTERN']='lib%s.a'
-
-def configure(conf):
-	conf.find_sxc()
-	conf.find_program('sxar',VAR='AR')
-	conf.sxc_common_flags()
-	conf.cc_load_tools()
-	conf.cc_add_flags()
-	conf.link_add_flags()
diff --git a/waflib/extras/cabal.py b/waflib/extras/cabal.py
deleted file mode 100644
index e10a0d1..0000000
--- a/waflib/extras/cabal.py
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Anton Feldmann, 2012
-# "Base for cabal"
-
-from waflib import Task, Utils
-from waflib.TaskGen import extension
-from waflib.Utils import threading
-from shutil import rmtree
-
-lock = threading.Lock()
-registering = False
-
-def configure(self):
-    self.find_program('cabal', var='CABAL')
-    self.find_program('ghc-pkg', var='GHCPKG')
-    pkgconfd = self.bldnode.abspath() + '/package.conf.d'
-    self.env.PREFIX = self.bldnode.abspath() + '/dist'
-    self.env.PKGCONFD = pkgconfd
-    if self.root.find_node(pkgconfd + '/package.cache'):
-        self.msg('Using existing package database', pkgconfd, color='CYAN')
-    else:
-        pkgdir = self.root.find_dir(pkgconfd)
-        if pkgdir:
-            self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED')
-            rmtree(pkgdir.abspath())
-            pkgdir = None
-
-        self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd])
-        self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN')
-
-@extension('.cabal')
-def process_cabal(self, node):
-    out_dir_node = self.bld.root.find_dir(self.bld.out_dir)
-    package_node = node.change_ext('.package')
-    package_node = out_dir_node.find_or_declare(package_node.name)
-    build_node   = node.parent.get_bld()
-    build_path   = build_node.abspath()
-    config_node  = build_node.find_or_declare('setup-config')
-    inplace_node = build_node.find_or_declare('package.conf.inplace')
-
-    config_task = self.create_task('cabal_configure', node)
-    config_task.cwd = node.parent.abspath()
-    config_task.depends_on = getattr(self, 'depends_on', '')
-    config_task.build_path = build_path
-    config_task.set_outputs(config_node)
-
-    build_task = self.create_task('cabal_build', config_node)
-    build_task.cwd = node.parent.abspath()
-    build_task.build_path = build_path
-    build_task.set_outputs(inplace_node)
-
-    copy_task = self.create_task('cabal_copy', inplace_node)
-    copy_task.cwd = node.parent.abspath()
-    copy_task.depends_on = getattr(self, 'depends_on', '')
-    copy_task.build_path = build_path
-
-    last_task = copy_task
-    task_list = [config_task, build_task, copy_task]
-
-    if (getattr(self, 'register', False)):
-        register_task = self.create_task('cabal_register', inplace_node)
-        register_task.cwd = node.parent.abspath()
-        register_task.set_run_after(copy_task)
-        register_task.build_path = build_path
-
-        pkgreg_task = self.create_task('ghcpkg_register', inplace_node)
-        pkgreg_task.cwd = node.parent.abspath()
-        pkgreg_task.set_run_after(register_task)
-        pkgreg_task.build_path = build_path
-
-        last_task = pkgreg_task
-        task_list += [register_task, pkgreg_task]
-
-    touch_task = self.create_task('cabal_touch', inplace_node)
-    touch_task.set_run_after(last_task)
-    touch_task.set_outputs(package_node)
-    touch_task.build_path = build_path
-
-    task_list += [touch_task]
-
-    return task_list
-
-def get_all_src_deps(node):
-    hs_deps = node.ant_glob('**/*.hs')
-    hsc_deps = node.ant_glob('**/*.hsc')
-    lhs_deps = node.ant_glob('**/*.lhs')
-    c_deps = node.ant_glob('**/*.c')
-    cpp_deps = node.ant_glob('**/*.cpp')
-    proto_deps = node.ant_glob('**/*.proto')
-    return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], [])
-
-class Cabal(Task.Task):
-    def scan(self):
-        return (get_all_src_deps(self.generator.path), ())
-
-class cabal_configure(Cabal):
-    run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}'
-    shell = True
-
-    def scan(self):
-        out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir)
-        deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)]
-        return (deps, ())
-
-class cabal_build(Cabal):
-    run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/'
-    shell = True
-
-class cabal_copy(Cabal):
-    run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}'
-    shell = True
-
-class cabal_register(Cabal):
-    run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}'
-    shell = True
-
-class ghcpkg_register(Cabal):
-    run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config'
-    shell = True
-
-    def runnable_status(self):
-        global lock, registering
-
-        val = False 
-        lock.acquire()
-        val = registering
-        lock.release()
-
-        if val:
-            return Task.ASK_LATER
-
-        ret = Task.Task.runnable_status(self)
-        if ret == Task.RUN_ME:
-            lock.acquire()
-            registering = True
-            lock.release()
-
-        return ret
-
-    def post_run(self):
-        global lock, registering
-
-        lock.acquire()
-        registering = False
-        lock.release()
-
-        return Task.Task.post_run(self)
-
-class cabal_touch(Cabal):
-    run_str = 'touch ${TGT}'
-
diff --git a/waflib/extras/cfg_altoptions.py b/waflib/extras/cfg_altoptions.py
deleted file mode 100644
index 47b1189..0000000
--- a/waflib/extras/cfg_altoptions.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-# Tool to extend c_config.check_cfg()
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2014"
-
-"""
-
-This tool allows to work around the absence of ``*-config`` programs
-on systems, by keeping the same clean configuration syntax but inferring
-values or permitting their modification via the options interface.
-
-Note that pkg-config can also support setting ``PKG_CONFIG_PATH``,
-so you can put custom files in a folder containing new .pc files.
-This tool could also be implemented by taking advantage of this fact.
-
-Usage::
-
-   def options(opt):
-     opt.load('c_config_alt')
-     opt.add_package_option('package')
-
-   def configure(cfg):
-     conf.load('c_config_alt')
-     conf.check_cfg(...)
-
-Known issues:
-
-- Behavior with different build contexts...
-
-"""
-
-import os
-import functools
-from waflib import Configure, Options, Errors
-
-def name_to_dest(x):
-	return x.lower().replace('-', '_')
-
-
-def options(opt):
-	def x(opt, param):
-		dest = name_to_dest(param)
-		gr = opt.get_option_group("configure options")
-		gr.add_option('--%s-root' % dest,
-		 help="path containing include and lib subfolders for %s" \
-		  % param,
-		)
-
-	opt.add_package_option = functools.partial(x, opt)
-
-
-check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg')
-
-@Configure.conf
-def check_cfg(conf, *k, **kw):
-	if k:
-		lst = k[0].split()
-		kw['package'] = lst[0]
-		kw['args'] = ' '.join(lst[1:])
-
-	if not 'package' in kw:
-		return check_cfg_old(conf, **kw)
-
-	package = kw['package']
-
-	package_lo = name_to_dest(package)
-	package_hi = package.upper().replace('-', '_') # TODO FIXME
-	package_hi = kw.get('uselib_store', package_hi)
-
-	def check_folder(path, name):
-		try:
-			assert os.path.isdir(path)
-		except AssertionError:
-			raise Errors.ConfigurationError(
-				"%s_%s (%s) is not a folder!" \
-				% (package_lo, name, path))
-		return path
-
-	root = getattr(Options.options, '%s_root' % package_lo, None)
-
-	if root is None:
-		return check_cfg_old(conf, **kw)
-	else:
-		def add_manual_var(k, v):
-			conf.start_msg('Adding for %s a manual var' % (package))
-			conf.env["%s_%s" % (k, package_hi)] = v
-			conf.end_msg("%s = %s" % (k, v))
-
-
-		check_folder(root, 'root')
-
-		pkg_inc = check_folder(os.path.join(root, "include"), 'inc')
-		add_manual_var('INCLUDES', [pkg_inc])
-		pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath')
-		add_manual_var('LIBPATH', [pkg_lib])
-		add_manual_var('LIB', [package])
-
-		for x in kw.get('manual_deps', []):
-			for k, v in sorted(conf.env.get_merged_dict().items()):
-				if k.endswith('_%s' % x):
-					k = k.replace('_%s' % x, '')
-					conf.start_msg('Adding for %s a manual dep' \
-					 %(package))
-					conf.env["%s_%s" % (k, package_hi)] += v
-					conf.end_msg('%s += %s' % (k, v))
-
-		return True
-
diff --git a/waflib/extras/clang_compilation_database.py b/waflib/extras/clang_compilation_database.py
deleted file mode 100644
index 4d9b5e2..0000000
--- a/waflib/extras/clang_compilation_database.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Christoph Koke, 2013
-
-"""
-Writes the c and cpp compile commands into build/compile_commands.json
-see http://clang.llvm.org/docs/JSONCompilationDatabase.html
-
-Usage:
-
-    def configure(conf):
-        conf.load('compiler_cxx')
-        ...
-        conf.load('clang_compilation_database')
-"""
-
-import sys, os, json, shlex, pipes
-from waflib import Logs, TaskGen, Task
-
-Task.Task.keep_last_cmd = True
-
-@TaskGen.feature('c', 'cxx')
-@TaskGen.after_method('process_use')
-def collect_compilation_db_tasks(self):
-	"Add a compilation database entry for compiled tasks"
-	try:
-		clang_db = self.bld.clang_compilation_database_tasks
-	except AttributeError:
-		clang_db = self.bld.clang_compilation_database_tasks = []
-		self.bld.add_post_fun(write_compilation_database)
-
-	tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
-	for task in getattr(self, 'compiled_tasks', []):
-		if isinstance(task, tup):
-			clang_db.append(task)
-
-def write_compilation_database(ctx):
-	"Write the clang compilation database as JSON"
-	database_file = ctx.bldnode.make_node('compile_commands.json')
-	Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path))
-	try:
-		root = json.load(database_file)
-	except IOError:
-		root = []
-	clang_db = dict((x['file'], x) for x in root)
-	for task in getattr(ctx, 'clang_compilation_database_tasks', []):
-		try:
-			cmd = task.last_cmd
-		except AttributeError:
-			continue
-		directory = getattr(task, 'cwd', ctx.variant_dir)
-		f_node = task.inputs[0]
-		filename = os.path.relpath(f_node.abspath(), directory)
-		entry = {
-			"directory": directory,
-			"arguments": cmd,
-			"file": filename,
-		}
-		clang_db[filename] = entry
-	root = list(clang_db.values())
-	database_file.write(json.dumps(root, indent=2))
-
-# Override the runnable_status function to do a dummy/dry run when the file doesn't need to be compiled.
-# This will make sure compile_commands.json is always fully up to date.
-# Previously you could end up with a partial compile_commands.json if the build failed.
-for x in ('c', 'cxx'):
-	if x not in Task.classes:
-		continue
-
-	t = Task.classes[x]
-
-	def runnable_status(self):
-		def exec_command(cmd, **kw):
-			pass
-
-		run_status = self.old_runnable_status()
-		if run_status == Task.SKIP_ME:
-			setattr(self, 'old_exec_command', getattr(self, 'exec_command', None))
-			setattr(self, 'exec_command', exec_command)
-			self.run()
-			setattr(self, 'exec_command', getattr(self, 'old_exec_command', None))
-		return run_status
-
-	setattr(t, 'old_runnable_status', getattr(t, 'runnable_status', None))
-	setattr(t, 'runnable_status', runnable_status)
diff --git a/waflib/extras/codelite.py b/waflib/extras/codelite.py
deleted file mode 100644
index 523302c..0000000
--- a/waflib/extras/codelite.py
+++ /dev/null
@@ -1,875 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# CodeLite Project
-# Christian Klein (chrikle@berlios.de)
-# Created: Jan 2012
-# As templete for this file I used the msvs.py
-# I hope this template will work proper
-
-"""
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
-1. Redistributions of source code must retain the above copyright
-   notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
-   notice, this list of conditions and the following disclaimer in the
-   documentation and/or other materials provided with the distribution.
-
-3. The name of the author may not be used to endorse or promote products
-   derived from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
-IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
-INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
-IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
-"""
-
-"""
- 
-
-To add this tool to your project:
-def options(conf):
-        opt.load('codelite')
-
-It can be a good idea to add the sync_exec tool too.
-
-To generate solution files:
-$ waf configure codelite
-
-To customize the outputs, provide subclasses in your wscript files:
-
-from waflib.extras import codelite
-class vsnode_target(codelite.vsnode_target):
-        def get_build_command(self, props):
-                # likely to be required
-                return "waf.bat build"
-        def collect_source(self):
-                # likely to be required
-                ...
-class codelite_bar(codelite.codelite_generator):
-        def init(self):
-                codelite.codelite_generator.init(self)
-                self.vsnode_target = vsnode_target
-
-The codelite class re-uses the same build() function for reading the targets (task generators),
-you may therefore specify codelite settings on the context object:
-
-def build(bld):
-        bld.codelite_solution_name = 'foo.workspace'
-        bld.waf_command = 'waf.bat'
-        bld.projects_dir = bld.srcnode.make_node('')
-        bld.projects_dir.mkdir()
-
-
-ASSUMPTIONS:
-* a project can be either a directory or a target, project files are written only for targets that have source files
-* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
-"""
-
-import os, re, sys
-import uuid # requires python 2.5
-from waflib.Build import BuildContext
-from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
-
-HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
-
-PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
-<CodeLite_Project Name="${project.name}" InternalType="Library">
-  <Plugins>
-    <Plugin Name="qmake">
-      <![CDATA[00010001N0005Release000000000000]]>
-    </Plugin>
-  </Plugins>
-  <Description/>
-  <Dependencies/>
-  <VirtualDirectory Name="src">
-  ${for x in project.source}  
-  ${if (project.get_key(x)=="sourcefile")}
-  <File Name="${x.abspath()}"/>
-  ${endif}
-  ${endfor}  
-  </VirtualDirectory>
-  <VirtualDirectory Name="include">  
-  ${for x in project.source}
-  ${if (project.get_key(x)=="headerfile")}
-  <File Name="${x.abspath()}"/>
-  ${endif}
-  ${endfor}
-  </VirtualDirectory>  
-  <Settings Type="Dynamic Library">
-    <GlobalSettings>
-      <Compiler Options="" C_Options="">
-        <IncludePath Value="."/>
-      </Compiler>
-      <Linker Options="">
-        <LibraryPath Value="."/>
-      </Linker>
-      <ResourceCompiler Options=""/>
-    </GlobalSettings>
-    <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
-      <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
-        <IncludePath Value="."/>
-        <IncludePath Value="."/>
-      </Compiler>
-      <Linker Options="" Required="yes">
-        <LibraryPath Value=""/>
-      </Linker>
-      <ResourceCompiler Options="" Required="no"/>
-      <General OutputFile="${xml:project.build_properties[0].output_file}" IntermediateDirectory="" Command="" CommandArguments="" PauseExecWhenProcTerminates="yes"/>
-      <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
-        <![CDATA[]]>
-      </Environment>
-      <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
-        <PostConnectCommands/>
-        <StartupCommands/>
-      </Releaseger>
-      <PreBuild/>
-      <PostBuild/>
-      <CustomBuild Enabled="yes">
-        $b = project.build_properties[0]}
-        <RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
-        <CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
-        <BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand> 
-        <Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
-        <Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>        
-        <Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
-        <Target Name="Rebuild All">${xml:project.get_rebuild_all_command(project.build_properties[0])}</Target>
-        <Target Name="Clean All">${xml:project.get_clean_all_command(project.build_properties[0])}</Target>
-        <Target Name="Build and Install All">${xml:project.get_build_and_install_all_command(project.build_properties[0])}</Target>
-        <PreprocessFileCommand/>
-        <SingleFileCommand/>
-        <MakefileGenerationCommand/>
-        <ThirdPartyToolName>None</ThirdPartyToolName>
-        <WorkingDirectory/>
-      </CustomBuild>
-      <AdditionalRules>
-        <CustomPostBuild/>
-        <CustomPreBuild/>
-      </AdditionalRules>
-      <Completion>
-        <ClangCmpFlags/>
-        <ClangPP/>
-        <SearchPaths/>
-      </Completion>
-    </Configuration>
-    <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
-      <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
-        <IncludePath Value="."/>
-      </Compiler>
-      <Linker Options="" Required="yes"/>
-      <ResourceCompiler Options="" Required="no"/>
-      <General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateReleaseArgs="no" ReleaseArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes"/>
-      <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
-        <![CDATA[
-      
-      
-      
-      ]]>
-      </Environment>
-      <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
-        <PostConnectCommands/>
-        <StartupCommands/>
-      </Releaseger>
-      <PreBuild/>
-      <PostBuild/>
-      <CustomBuild Enabled="no">
-        <RebuildCommand/>
-        <CleanCommand/>
-        <BuildCommand/>
-        <PreprocessFileCommand/>
-        <SingleFileCommand/>
-        <MakefileGenerationCommand/>
-        <ThirdPartyToolName/>
-        <WorkingDirectory/>
-      </CustomBuild>
-      <AdditionalRules>
-        <CustomPostBuild/>
-        <CustomPreBuild/>
-      </AdditionalRules>
-      <Completion>
-        <ClangCmpFlags/>
-        <ClangPP/>
-        <SearchPaths/>
-      </Completion>
-    </Configuration>
-  </Settings>
-</CodeLite_Project>'''
-
-
-
-
-SOLUTION_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
-<CodeLite_Workspace Name="${getattr(project, 'codelite_solution_name', None)[:-10]}" Database="./${getattr(project, 'codelite_solution_name', None)[:-10]}.tags">
-${for p in project.all_projects}
-  <Project Name = "${p.name}" Path = "${p.title}" Active="No"/>
-${endfor}
-  <BuildMatrix>
-    <WorkspaceConfiguration Name="Release" Selected="yes">
-${for p in project.all_projects}
-      <Project Name="${p.name}" ConfigName="Release"/>        
-${endfor}
-    </WorkspaceConfiguration>        
-  </BuildMatrix>
-</CodeLite_Workspace>'''
-
-
-
-COMPILE_TEMPLATE = '''def f(project):
-        lst = []
-        def xml_escape(value):
-                return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
-
-        %s
-
-        #f = open('cmd.txt', 'w')
-        #f.write(str(lst))
-        #f.close()
-        return ''.join(lst)
-'''
-reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
-def compile_template(line):
-        """
-        Compile a template expression into a python function (like jsps, but way shorter)
-        """
-        extr = []
-        def repl(match):
-                g = match.group
-                if g('dollar'):
-                        return "$"
-                elif g('backslash'):
-                        return "\\"
-                elif g('subst'):
-                        extr.append(g('code'))
-                        return "<<|@|>>"
-                return None
-
-        line2 = reg_act.sub(repl, line)
-        params = line2.split('<<|@|>>')
-        assert(extr)
-
-
-        indent = 0
-        buf = []
-        app = buf.append
-
-        def app(txt):
-                buf.append(indent * '\t' + txt)
-
-        for x in range(len(extr)):
-                if params[x]:
-                        app("lst.append(%r)" % params[x])
-
-                f = extr[x]
-                if f.startswith(('if', 'for')):
-                        app(f + ':')
-                        indent += 1
-                elif f.startswith('py:'):
-                        app(f[3:])
-                elif f.startswith(('endif', 'endfor')):
-                        indent -= 1
-                elif f.startswith(('else', 'elif')):
-                        indent -= 1
-                        app(f + ':')
-                        indent += 1
-                elif f.startswith('xml:'):
-                        app('lst.append(xml_escape(%s))' % f[4:])
-                else:
-                        #app('lst.append((%s) or "cannot find %s")' % (f, f))
-                        app('lst.append(%s)' % f)
-
-        if extr:
-                if params[-1]:
-                        app("lst.append(%r)" % params[-1])
-
-        fun = COMPILE_TEMPLATE % "\n\t".join(buf)
-        #print(fun)
-        return Task.funex(fun)
-
-
-re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
-def rm_blank_lines(txt):
-        txt = re_blank.sub('\r\n', txt)
-        return txt
-
-BOM = '\xef\xbb\xbf'
-try:
-        BOM = bytes(BOM, 'latin-1') # python 3
-except (TypeError, NameError):
-        pass
-
-def stealth_write(self, data, flags='wb'):
-        try:
-                unicode
-        except NameError:
-                data = data.encode('utf-8') # python 3
-        else:
-                data = data.decode(sys.getfilesystemencoding(), 'replace')
-                data = data.encode('utf-8')
-
-        if self.name.endswith('.project'):
-                data = BOM + data
-
-        try:
-                txt = self.read(flags='rb')
-                if txt != data:
-                        raise ValueError('must write')
-        except (IOError, ValueError):
-                self.write(data, flags=flags)
-        else:
-                Logs.debug('codelite: skipping %r', self)
-Node.Node.stealth_write = stealth_write
-
-re_quote = re.compile("[^a-zA-Z0-9-]")
-def quote(s):
-        return re_quote.sub("_", s)
-
-def xml_escape(value):
-        return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
-
-def make_uuid(v, prefix = None):
-        """
-        simple utility function
-        """
-        if isinstance(v, dict):
-                keys = list(v.keys())
-                keys.sort()
-                tmp = str([(k, v[k]) for k in keys])
-        else:
-                tmp = str(v)
-        d = Utils.md5(tmp.encode()).hexdigest().upper()
-        if prefix:
-                d = '%s%s' % (prefix, d[8:])
-        gid = uuid.UUID(d, version = 4)
-        return str(gid).upper()
-
-def diff(node, fromnode):
-        # difference between two nodes, but with "(..)" instead of ".."
-        c1 = node
-        c2 = fromnode
-
-        c1h = c1.height()
-        c2h = c2.height()
-
-        lst = []
-        up = 0
-
-        while c1h > c2h:
-                lst.append(c1.name)
-                c1 = c1.parent
-                c1h -= 1
-
-        while c2h > c1h:
-                up += 1
-                c2 = c2.parent
-                c2h -= 1
-
-        while id(c1) != id(c2):
-                lst.append(c1.name)
-                up += 1
-
-                c1 = c1.parent
-                c2 = c2.parent
-
-        for i in range(up):
-                lst.append('(..)')
-        lst.reverse()
-        return tuple(lst)
-
-class build_property(object):
-        pass
-
-class vsnode(object):
-        """
-        Abstract class representing visual studio elements
-        We assume that all visual studio nodes have a uuid and a parent
-        """
-        def __init__(self, ctx):
-                self.ctx = ctx # codelite context
-                self.name = '' # string, mandatory
-                self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
-                self.uuid = '' # string, mandatory
-                self.parent = None # parent node for visual studio nesting
-
-        def get_waf(self):
-                """
-                Override in subclasses...
-                """
-                return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf'))
-
-        def ptype(self):
-                """
-                Return a special uuid for projects written in the solution file
-                """
-                pass
-
-        def write(self):
-                """
-                Write the project file, by default, do nothing
-                """
-                pass
-
-        def make_uuid(self, val):
-                """
-                Alias for creating uuid values easily (the templates cannot access global variables)
-                """
-                return make_uuid(val)
-
-class vsnode_vsdir(vsnode):
-        """
-        Nodes representing visual studio folders (which do not match the filesystem tree!)
-        """
-        VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
-        def __init__(self, ctx, uuid, name, vspath=''):
-                vsnode.__init__(self, ctx)
-                self.title = self.name = name
-                self.uuid = uuid
-                self.vspath = vspath or name
-
-        def ptype(self):
-                return self.VS_GUID_SOLUTIONFOLDER
-
-class vsnode_project(vsnode):
-        """
-        Abstract class representing visual studio project elements
-        A project is assumed to be writable, and has a node representing the file to write to
-        """
-        VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
-        def ptype(self):
-                return self.VS_GUID_VCPROJ
-
-        def __init__(self, ctx, node):
-                vsnode.__init__(self, ctx)
-                self.path = node
-                self.uuid = make_uuid(node.abspath())
-                self.name = node.name
-                self.title = self.path.abspath()
-                self.source = [] # list of node objects
-                self.build_properties = [] # list of properties (nmake commands, output dir, etc)
-
-        def dirs(self):
-                """
-                Get the list of parent folders of the source files (header files included)
-                for writing the filters
-                """
-                lst = []
-                def add(x):
-                        if x.height() > self.tg.path.height() and x not in lst:
-                                lst.append(x)
-                                add(x.parent)
-                for x in self.source:
-                        add(x.parent)
-                return lst
-
-        def write(self):
-                Logs.debug('codelite: creating %r', self.path)
-                #print "self.name:",self.name
-
-                # first write the project file
-                template1 = compile_template(PROJECT_TEMPLATE)
-                proj_str = template1(self)
-                proj_str = rm_blank_lines(proj_str)
-                self.path.stealth_write(proj_str)
-
-                # then write the filter
-                #template2 = compile_template(FILTER_TEMPLATE)
-                #filter_str = template2(self)
-                #filter_str = rm_blank_lines(filter_str)
-                #tmp = self.path.parent.make_node(self.path.name + '.filters')
-                #tmp.stealth_write(filter_str)
-
-        def get_key(self, node):
-                """
-                required for writing the source files
-                """
-                name = node.name
-                if name.endswith(('.cpp', '.c')):
-                        return 'sourcefile'
-                return 'headerfile'
-
-        def collect_properties(self):
-                """
-                Returns a list of triplet (configuration, platform, output_directory)
-                """
-                ret = []
-                for c in self.ctx.configurations:
-                        for p in self.ctx.platforms:
-                                x = build_property()
-                                x.outdir = ''
-
-                                x.configuration = c
-                                x.platform = p
-
-                                x.preprocessor_definitions = ''
-                                x.includes_search_path = ''
-
-                                # can specify "deploy_dir" too
-                                ret.append(x)
-                self.build_properties = ret
-
-        def get_build_params(self, props):
-                opt = ''
-                return (self.get_waf(), opt)
-
-        def get_build_command(self, props):
-                return "%s build %s" % self.get_build_params(props)
-
-        def get_clean_command(self, props):
-                return "%s clean %s" % self.get_build_params(props)
-
-        def get_rebuild_command(self, props):
-                return "%s clean build %s" % self.get_build_params(props)
-                
-        def get_install_command(self, props):
-                return "%s install %s" % self.get_build_params(props)
-        def get_build_and_install_command(self, props):
-                return "%s build install %s" % self.get_build_params(props)
-                
-        def get_build_and_install_all_command(self, props):
-                return "%s build install" % self.get_build_params(props)[0]
-                
-        def get_clean_all_command(self, props):
-                return "%s clean" % self.get_build_params(props)[0]
-        
-        def get_build_all_command(self, props):
-                return "%s build" % self.get_build_params(props)[0]
-                
-        def get_rebuild_all_command(self, props):
-                return "%s clean build" % self.get_build_params(props)[0]
-
-        def get_filter_name(self, node):
-                lst = diff(node, self.tg.path)
-                return '\\'.join(lst) or '.'
-
-class vsnode_alias(vsnode_project):
-        def __init__(self, ctx, node, name):
-                vsnode_project.__init__(self, ctx, node)
-                self.name = name
-                self.output_file = ''
-
-class vsnode_build_all(vsnode_alias):
-        """
-        Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
-        This is the only alias enabled by default
-        """
-        def __init__(self, ctx, node, name='build_all_projects'):
-                vsnode_alias.__init__(self, ctx, node, name)
-                self.is_active = True
-
-class vsnode_install_all(vsnode_alias):
-        """
-        Fake target used to emulate the behaviour of "make install"
-        """
-        def __init__(self, ctx, node, name='install_all_projects'):
-                vsnode_alias.__init__(self, ctx, node, name)
-
-        def get_build_command(self, props):
-                return "%s build install %s" % self.get_build_params(props)
-
-        def get_clean_command(self, props):
-                return "%s clean %s" % self.get_build_params(props)
-
-        def get_rebuild_command(self, props):
-                return "%s clean build install %s" % self.get_build_params(props)
-
-class vsnode_project_view(vsnode_alias):
-        """
-        Fake target used to emulate a file system view
-        """
-        def __init__(self, ctx, node, name='project_view'):
-                vsnode_alias.__init__(self, ctx, node, name)
-                self.tg = self.ctx() # fake one, cannot remove
-                self.exclude_files = Node.exclude_regs + '''
-waf-2*
-waf3-2*/**
-.waf-2*
-.waf3-2*/**
-**/*.sdf
-**/*.suo
-**/*.ncb
-**/%s
-                ''' % Options.lockfile
-
-        def collect_source(self):
-                # this is likely to be slow
-                self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
-
-        def get_build_command(self, props):
-                params = self.get_build_params(props) + (self.ctx.cmd,)
-                return "%s %s %s" % params
-
-        def get_clean_command(self, props):
-                return ""
-
-        def get_rebuild_command(self, props):
-                return self.get_build_command(props)
-
-class vsnode_target(vsnode_project):
-        """
-        CodeLite project representing a targets (programs, libraries, etc) and bound
-        to a task generator
-        """
-        def __init__(self, ctx, tg):
-                """
-                A project is more or less equivalent to a file/folder
-                """
-                base = getattr(ctx, 'projects_dir', None) or tg.path
-                node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
-                vsnode_project.__init__(self, ctx, node)
-                self.name = quote(tg.name)
-                self.tg     = tg  # task generator
-
-        def get_build_params(self, props):
-                """
-                Override the default to add the target name
-                """
-                opt = ''
-                if getattr(self, 'tg', None):
-                        opt += " --targets=%s" % self.tg.name
-                return (self.get_waf(), opt)
-
-        def collect_source(self):
-                tg = self.tg
-                source_files = tg.to_nodes(getattr(tg, 'source', []))
-                include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', []))
-                include_files = []
-                for x in include_dirs:
-                        if isinstance(x, str):
-                                x = tg.path.find_node(x)
-                        if x:
-                                lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
-                                include_files.extend(lst)
-
-                # remove duplicates
-                self.source.extend(list(set(source_files + include_files)))
-                self.source.sort(key=lambda x: x.abspath())
-
-        def collect_properties(self):
-                """
-                CodeLite projects are associated with platforms and configurations (for building especially)
-                """
-                super(vsnode_target, self).collect_properties()
-                for x in self.build_properties:
-                        x.outdir = self.path.parent.abspath()
-                        x.preprocessor_definitions = ''
-                        x.includes_search_path = ''
-
-                        try:
-                                tsk = self.tg.link_task
-                        except AttributeError:
-                                pass
-                        else:                                
-                                x.output_file = tsk.outputs[0].abspath()
-                                x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
-                                x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
-
-class codelite_generator(BuildContext):
-        '''generates a CodeLite workspace'''
-        cmd = 'codelite'
-        fun = 'build'
-
-        def init(self):
-                """
-                Some data that needs to be present
-                """
-                if not getattr(self, 'configurations', None):
-                        self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
-                if not getattr(self, 'platforms', None):
-                        self.platforms = ['Win32']
-                if not getattr(self, 'all_projects', None):
-                        self.all_projects = []
-                if not getattr(self, 'project_extension', None):
-                        self.project_extension = '.project'
-                if not getattr(self, 'projects_dir', None):
-                        self.projects_dir = self.srcnode.make_node('')
-                        self.projects_dir.mkdir()
-
-                # bind the classes to the object, so that subclass can provide custom generators
-                if not getattr(self, 'vsnode_vsdir', None):
-                        self.vsnode_vsdir = vsnode_vsdir
-                if not getattr(self, 'vsnode_target', None):
-                        self.vsnode_target = vsnode_target
-                if not getattr(self, 'vsnode_build_all', None):
-                        self.vsnode_build_all = vsnode_build_all
-                if not getattr(self, 'vsnode_install_all', None):
-                        self.vsnode_install_all = vsnode_install_all
-                if not getattr(self, 'vsnode_project_view', None):
-                        self.vsnode_project_view = vsnode_project_view
-
-                self.numver = '11.00'
-                self.vsver  = '2010'
-
-        def execute(self):
-                """
-                Entry point
-                """
-                self.restore()
-                if not self.all_envs:
-                        self.load_envs()
-                self.recurse([self.run_dir])
-
-                # user initialization
-                self.init()
-
-                # two phases for creating the solution
-                self.collect_projects() # add project objects into "self.all_projects"
-                self.write_files() # write the corresponding project and solution files
-
-        def collect_projects(self):
-                """
-                Fill the list self.all_projects with project objects
-                Fill the list of build targets
-                """
-                self.collect_targets()
-                #self.add_aliases()
-                #self.collect_dirs()
-                default_project = getattr(self, 'default_project', None)
-                def sortfun(x):
-                        if x.name == default_project:
-                                return ''
-                        return getattr(x, 'path', None) and x.path.abspath() or x.name
-                self.all_projects.sort(key=sortfun)
-
-        def write_files(self):
-                """
-                Write the project and solution files from the data collected
-                so far. It is unlikely that you will want to change this
-                """
-                for p in self.all_projects:
-                        p.write()
-
-                # and finally write the solution file
-                node = self.get_solution_node()
-                node.parent.mkdir()
-                Logs.warn('Creating %r', node)
-                #a = dir(self.root)
-                #for b in a:
-                #        print b
-                #print self.group_names
-                #print "Hallo2:   ",self.root.listdir()
-                #print getattr(self, 'codelite_solution_name', None)
-                template1 = compile_template(SOLUTION_TEMPLATE)
-                sln_str = template1(self)
-                sln_str = rm_blank_lines(sln_str)
-                node.stealth_write(sln_str)
-
-        def get_solution_node(self):
-                """
-                The solution filename is required when writing the .vcproj files
-                return self.solution_node and if it does not exist, make one
-                """
-                try:
-                        return self.solution_node
-                except:
-                        pass
-
-                codelite_solution_name = getattr(self, 'codelite_solution_name', None)
-                if not codelite_solution_name:
-                        codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace'
-                        setattr(self, 'codelite_solution_name', codelite_solution_name)
-                if os.path.isabs(codelite_solution_name):
-                        self.solution_node = self.root.make_node(codelite_solution_name)
-                else:
-                        self.solution_node = self.srcnode.make_node(codelite_solution_name)
-                return self.solution_node
-
-        def project_configurations(self):
-                """
-                Helper that returns all the pairs (config,platform)
-                """
-                ret = []
-                for c in self.configurations:
-                        for p in self.platforms:
-                                ret.append((c, p))
-                return ret
-
-        def collect_targets(self):
-                """
-                Process the list of task generators
-                """
-                for g in self.groups:
-                        for tg in g:
-                                if not isinstance(tg, TaskGen.task_gen):
-                                        continue
-
-                                if not hasattr(tg, 'codelite_includes'):
-                                        tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
-                                tg.post()
-                                if not getattr(tg, 'link_task', None):
-                                        continue
-
-                                p = self.vsnode_target(self, tg)
-                                p.collect_source() # delegate this processing
-                                p.collect_properties()                               
-                                self.all_projects.append(p)
-
-        def add_aliases(self):
-                """
-                Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
-                We also add an alias for "make install" (disabled by default)
-                """
-                base = getattr(self, 'projects_dir', None) or self.tg.path
-
-                node_project = base.make_node('build_all_projects' + self.project_extension) # Node
-                p_build = self.vsnode_build_all(self, node_project)
-                p_build.collect_properties()
-                self.all_projects.append(p_build)
-
-                node_project = base.make_node('install_all_projects' + self.project_extension) # Node
-                p_install = self.vsnode_install_all(self, node_project)
-                p_install.collect_properties()
-                self.all_projects.append(p_install)
-
-                node_project = base.make_node('project_view' + self.project_extension) # Node
-                p_view = self.vsnode_project_view(self, node_project)
-                p_view.collect_source()
-                p_view.collect_properties()
-                self.all_projects.append(p_view)
-
-                n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases")
-                p_build.parent = p_install.parent = p_view.parent = n
-                self.all_projects.append(n)
-
-        def collect_dirs(self):
-                """
-                Create the folder structure in the CodeLite project view
-                """
-                seen = {}
-                def make_parents(proj):
-                        # look at a project, try to make a parent
-                        if getattr(proj, 'parent', None):
-                                # aliases already have parents
-                                return
-                        x = proj.iter_path
-                        if x in seen:
-                                proj.parent = seen[x]
-                                return
-
-                        # There is not vsnode_vsdir for x.
-                        # So create a project representing the folder "x"
-                        n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name)
-                        n.iter_path = x.parent
-                        self.all_projects.append(n)
-
-                        # recurse up to the project directory
-                        if x.height() > self.srcnode.height() + 1:
-                                make_parents(n)
-
-                for p in self.all_projects[:]: # iterate over a copy of all projects
-                        if not getattr(p, 'tg', None):
-                                # but only projects that have a task generator
-                                continue
-
-                        # make a folder for each task generator
-                        p.iter_path = p.tg.path
-                        make_parents(p)
-
diff --git a/waflib/extras/color_gcc.py b/waflib/extras/color_gcc.py
deleted file mode 100644
index b68c5eb..0000000
--- a/waflib/extras/color_gcc.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-
-# Replaces the default formatter by one which understands GCC output and colorizes it.
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2012"
-
-import sys
-from waflib import Logs
-
-class ColorGCCFormatter(Logs.formatter):
-	def __init__(self, colors):
-		self.colors = colors
-		Logs.formatter.__init__(self)
-	def format(self, rec):
-		frame = sys._getframe()
-		while frame:
-			func = frame.f_code.co_name
-			if func == 'exec_command':
-				cmd = frame.f_locals.get('cmd')
-				if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
-					lines = []
-					for line in rec.msg.splitlines():
-						if 'warning: ' in line:
-							lines.append(self.colors.YELLOW + line)
-						elif 'error: ' in line:
-							lines.append(self.colors.RED + line)
-						elif 'note: ' in line:
-							lines.append(self.colors.CYAN + line)
-						else:
-							lines.append(line)
-					rec.msg = "\n".join(lines)
-			frame = frame.f_back
-		return Logs.formatter.format(self, rec)
-
-def options(opt):
-	Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors))
-
diff --git a/waflib/extras/color_rvct.py b/waflib/extras/color_rvct.py
deleted file mode 100644
index f89ccbd..0000000
--- a/waflib/extras/color_rvct.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-
-# Replaces the default formatter by one which understands RVCT output and colorizes it.
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2012"
-
-import sys
-import atexit
-from waflib import Logs
-
-errors = []
-
-def show_errors():
-	for i, e in enumerate(errors):
-		if i > 5:
-			break
-		print("Error: %s" % e)
-
-atexit.register(show_errors)
-
-class RcvtFormatter(Logs.formatter):
-	def __init__(self, colors):
-		Logs.formatter.__init__(self)
-		self.colors = colors
-	def format(self, rec):
-		frame = sys._getframe()
-		while frame:
-			func = frame.f_code.co_name
-			if func == 'exec_command':
-				cmd = frame.f_locals['cmd']
-				if isinstance(cmd, list) and ('armcc' in cmd[0] or 'armld' in cmd[0]):
-					lines = []
-					for line in rec.msg.splitlines():
-						if 'Warning: ' in line:
-							lines.append(self.colors.YELLOW + line)
-						elif 'Error: ' in line:
-							lines.append(self.colors.RED + line)
-							errors.append(line)
-						elif 'note: ' in line:
-							lines.append(self.colors.CYAN + line)
-						else:
-							lines.append(line)
-					rec.msg = "\n".join(lines)
-			frame = frame.f_back
-		return Logs.formatter.format(self, rec)
-
-def options(opt):
-	Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors))
-
diff --git a/waflib/extras/compat15.py b/waflib/extras/compat15.py
deleted file mode 100644
index 0e74df8..0000000
--- a/waflib/extras/compat15.py
+++ /dev/null
@@ -1,406 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010 (ita)
-
-"""
-This file is provided to enable compatibility with waf 1.5
-It was enabled by default in waf 1.6, but it is not used in waf 1.7
-"""
-
-import sys
-from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context
-
-# the following is to bring some compatibility with waf 1.5 "import waflib.Configure → import Configure"
-sys.modules['Environment'] = ConfigSet
-ConfigSet.Environment = ConfigSet.ConfigSet
-
-sys.modules['Logs'] = Logs
-sys.modules['Options'] = Options
-sys.modules['Scripting'] = Scripting
-sys.modules['Task'] = Task
-sys.modules['Build'] = Build
-sys.modules['Configure'] = Configure
-sys.modules['Node'] = Node
-sys.modules['Runner'] = Runner
-sys.modules['TaskGen'] = TaskGen
-sys.modules['Utils'] = Utils
-sys.modules['Constants'] = Context
-Context.SRCDIR = ''
-Context.BLDDIR = ''
-
-from waflib.Tools import c_preproc
-sys.modules['preproc'] = c_preproc
-
-from waflib.Tools import c_config
-sys.modules['config_c'] = c_config
-
-ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive
-ConfigSet.ConfigSet.set_variant = Utils.nada
-
-Utils.pproc = Utils.subprocess
-
-Build.BuildContext.add_subdirs = Build.BuildContext.recurse
-Build.BuildContext.new_task_gen = Build.BuildContext.__call__
-Build.BuildContext.is_install = 0
-Node.Node.relpath_gen = Node.Node.path_from
-
-Utils.pproc = Utils.subprocess
-Utils.get_term_cols = Logs.get_term_cols
-
-def cmd_output(cmd, **kw):
-
-	silent = False
-	if 'silent' in kw:
-		silent = kw['silent']
-		del(kw['silent'])
-
-	if 'e' in kw:
-		tmp = kw['e']
-		del(kw['e'])
-		kw['env'] = tmp
-
-	kw['shell'] = isinstance(cmd, str)
-	kw['stdout'] = Utils.subprocess.PIPE
-	if silent:
-		kw['stderr'] = Utils.subprocess.PIPE
-
-	try:
-		p = Utils.subprocess.Popen(cmd, **kw)
-		output = p.communicate()[0]
-	except OSError as e:
-		raise ValueError(str(e))
-
-	if p.returncode:
-		if not silent:
-			msg = "command execution failed: %s -> %r" % (cmd, str(output))
-			raise ValueError(msg)
-		output = ''
-	return output
-Utils.cmd_output = cmd_output
-
-def name_to_obj(self, s, env=None):
-	if Logs.verbose:
-		Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
-	return self.get_tgen_by_name(s)
-Build.BuildContext.name_to_obj = name_to_obj
-
-def env_of_name(self, name):
-	try:
-		return self.all_envs[name]
-	except KeyError:
-		Logs.error('no such environment: '+name)
-		return None
-Build.BuildContext.env_of_name = env_of_name
-
-
-def set_env_name(self, name, env):
-	self.all_envs[name] = env
-	return env
-Configure.ConfigurationContext.set_env_name = set_env_name
-
-def retrieve(self, name, fromenv=None):
-	try:
-		env = self.all_envs[name]
-	except KeyError:
-		env = ConfigSet.ConfigSet()
-		self.prepare_env(env)
-		self.all_envs[name] = env
-	else:
-		if fromenv:
-			Logs.warn('The environment %s may have been configured already', name)
-	return env
-Configure.ConfigurationContext.retrieve = retrieve
-
-Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse
-Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load
-Configure.conftest = Configure.conf
-Configure.ConfigurationError = Errors.ConfigurationError
-Utils.WafError = Errors.WafError
-
-Options.OptionsContext.sub_options = Options.OptionsContext.recurse
-Options.OptionsContext.tool_options = Context.Context.load
-Options.Handler = Options.OptionsContext
-
-Task.simple_task_type = Task.task_type_from_func = Task.task_factory
-Task.Task.classes = Task.classes
-
-def setitem(self, key, value):
-	if key.startswith('CCFLAGS'):
-		key = key[1:]
-	self.table[key] = value
-ConfigSet.ConfigSet.__setitem__ = setitem
-
-@TaskGen.feature('d')
-@TaskGen.before('apply_incpaths')
-def old_importpaths(self):
-	if getattr(self, 'importpaths', []):
-		self.includes = self.importpaths
-
-from waflib import Context
-eld = Context.load_tool
-def load_tool(*k, **kw):
-	ret = eld(*k, **kw)
-	if 'set_options' in ret.__dict__:
-		if Logs.verbose:
-			Logs.warn('compat: rename "set_options" to options')
-		ret.options = ret.set_options
-	if 'detect' in ret.__dict__:
-		if Logs.verbose:
-			Logs.warn('compat: rename "detect" to "configure"')
-		ret.configure = ret.detect
-	return ret
-Context.load_tool = load_tool
-
-def get_curdir(self):
-	return self.path.abspath()
-Context.Context.curdir = property(get_curdir, Utils.nada)
-
-def get_srcdir(self):
-	return self.srcnode.abspath()
-Configure.ConfigurationContext.srcdir = property(get_srcdir, Utils.nada)
-
-def get_blddir(self):
-	return self.bldnode.abspath()
-Configure.ConfigurationContext.blddir = property(get_blddir, Utils.nada)
-
-Configure.ConfigurationContext.check_message_1 = Configure.ConfigurationContext.start_msg
-Configure.ConfigurationContext.check_message_2 = Configure.ConfigurationContext.end_msg
-
-rev = Context.load_module
-def load_module(path, encoding=None):
-	ret = rev(path, encoding)
-	if 'set_options' in ret.__dict__:
-		if Logs.verbose:
-			Logs.warn('compat: rename "set_options" to "options" (%r)', path)
-		ret.options = ret.set_options
-	if 'srcdir' in ret.__dict__:
-		if Logs.verbose:
-			Logs.warn('compat: rename "srcdir" to "top" (%r)', path)
-		ret.top = ret.srcdir
-	if 'blddir' in ret.__dict__:
-		if Logs.verbose:
-			Logs.warn('compat: rename "blddir" to "out" (%r)', path)
-		ret.out = ret.blddir
-	Utils.g_module = Context.g_module
-	Options.launch_dir = Context.launch_dir
-	return ret
-Context.load_module = load_module
-
-old_post = TaskGen.task_gen.post
-def post(self):
-	self.features = self.to_list(self.features)
-	if 'cc' in self.features:
-		if Logs.verbose:
-			Logs.warn('compat: the feature cc does not exist anymore (use "c")')
-		self.features.remove('cc')
-		self.features.append('c')
-	if 'cstaticlib' in self.features:
-		if Logs.verbose:
-			Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
-		self.features.remove('cstaticlib')
-		self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib')
-	if getattr(self, 'ccflags', None):
-		if Logs.verbose:
-			Logs.warn('compat: "ccflags" was renamed to "cflags"')
-		self.cflags = self.ccflags
-	return old_post(self)
-TaskGen.task_gen.post = post
-
-def waf_version(*k, **kw):
-	Logs.warn('wrong version (waf_version was removed in waf 1.6)')
-Utils.waf_version = waf_version
-
-
-import os
-@TaskGen.feature('c', 'cxx', 'd')
-@TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
-@TaskGen.after('apply_link', 'process_source')
-def apply_uselib_local(self):
-	"""
-	process the uselib_local attribute
-	execute after apply_link because of the execution order set on 'link_task'
-	"""
-	env = self.env
-	from waflib.Tools.ccroot import stlink_task
-
-	# 1. the case of the libs defined in the project (visit ancestors first)
-	# the ancestors external libraries (uselib) will be prepended
-	self.uselib = self.to_list(getattr(self, 'uselib', []))
-	self.includes = self.to_list(getattr(self, 'includes', []))
-	names = self.to_list(getattr(self, 'uselib_local', []))
-	get = self.bld.get_tgen_by_name
-	seen = set()
-	seen_uselib = set()
-	tmp = Utils.deque(names) # consume a copy of the list of names
-	if tmp:
-		if Logs.verbose:
-			Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
-	while tmp:
-		lib_name = tmp.popleft()
-		# visit dependencies only once
-		if lib_name in seen:
-			continue
-
-		y = get(lib_name)
-		y.post()
-		seen.add(lib_name)
-
-		# object has ancestors to process (shared libraries): add them to the end of the list
-		if getattr(y, 'uselib_local', None):
-			for x in self.to_list(getattr(y, 'uselib_local', [])):
-				obj = get(x)
-				obj.post()
-				if getattr(obj, 'link_task', None):
-					if not isinstance(obj.link_task, stlink_task):
-						tmp.append(x)
-
-		# link task and flags
-		if getattr(y, 'link_task', None):
-
-			link_name = y.target[y.target.rfind(os.sep) + 1:]
-			if isinstance(y.link_task, stlink_task):
-				env.append_value('STLIB', [link_name])
-			else:
-				# some linkers can link against programs
-				env.append_value('LIB', [link_name])
-
-			# the order
-			self.link_task.set_run_after(y.link_task)
-
-			# for the recompilation
-			self.link_task.dep_nodes += y.link_task.outputs
-
-			# add the link path too
-			tmp_path = y.link_task.outputs[0].parent.bldpath()
-			if not tmp_path in env['LIBPATH']:
-				env.prepend_value('LIBPATH', [tmp_path])
-
-		# add ancestors uselib too - but only propagate those that have no staticlib defined
-		for v in self.to_list(getattr(y, 'uselib', [])):
-			if v not in seen_uselib:
-				seen_uselib.add(v)
-				if not env['STLIB_' + v]:
-					if not v in self.uselib:
-						self.uselib.insert(0, v)
-
-		# if the library task generator provides 'export_includes', add to the include path
-		# the export_includes must be a list of paths relative to the other library
-		if getattr(y, 'export_includes', None):
-			self.includes.extend(y.to_incnodes(y.export_includes))
-
-@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
-@TaskGen.after('apply_link')
-def apply_objdeps(self):
-	"add the .o files produced by some other object files in the same manner as uselib_local"
-	names = getattr(self, 'add_objects', [])
-	if not names:
-		return
-	names = self.to_list(names)
-
-	get = self.bld.get_tgen_by_name
-	seen = []
-	while names:
-		x = names[0]
-
-		# visit dependencies only once
-		if x in seen:
-			names = names[1:]
-			continue
-
-		# object does not exist ?
-		y = get(x)
-
-		# object has ancestors to process first ? update the list of names
-		if getattr(y, 'add_objects', None):
-			added = 0
-			lst = y.to_list(y.add_objects)
-			lst.reverse()
-			for u in lst:
-				if u in seen:
-					continue
-				added = 1
-				names = [u]+names
-			if added:
-				continue # list of names modified, loop
-
-		# safe to process the current object
-		y.post()
-		seen.append(x)
-
-		for t in getattr(y, 'compiled_tasks', []):
-			self.link_task.inputs.extend(t.outputs)
-
-@TaskGen.after('apply_link')
-def process_obj_files(self):
-	if not hasattr(self, 'obj_files'):
-		return
-	for x in self.obj_files:
-		node = self.path.find_resource(x)
-		self.link_task.inputs.append(node)
-
-@TaskGen.taskgen_method
-def add_obj_file(self, file):
-	"""Small example on how to link object files as if they were source
-	obj = bld.create_obj('cc')
-	obj.add_obj_file('foo.o')"""
-	if not hasattr(self, 'obj_files'):
-		self.obj_files = []
-	if not 'process_obj_files' in self.meths:
-		self.meths.append('process_obj_files')
-	self.obj_files.append(file)
-
-
-old_define = Configure.ConfigurationContext.__dict__['define']
-
-@Configure.conf
-def define(self, key, val, quote=True, comment=''):
-	old_define(self, key, val, quote, comment)
-	if key.startswith('HAVE_'):
-		self.env[key] = 1
-
-old_undefine = Configure.ConfigurationContext.__dict__['undefine']
-
-@Configure.conf
-def undefine(self, key, comment=''):
-	old_undefine(self, key, comment)
-	if key.startswith('HAVE_'):
-		self.env[key] = 0
-
-# some people might want to use export_incdirs, but it was renamed
-def set_incdirs(self, val):
-	Logs.warn('compat: change "export_incdirs" by "export_includes"')
-	self.export_includes = val
-TaskGen.task_gen.export_incdirs = property(None, set_incdirs)
-
-def install_dir(self, path):
-	if not path:
-		return []
-
-	destpath = Utils.subst_vars(path, self.env)
-
-	if self.is_install > 0:
-		Logs.info('* creating %s', destpath)
-		Utils.check_dir(destpath)
-	elif self.is_install < 0:
-		Logs.info('* removing %s', destpath)
-		try:
-			os.remove(destpath)
-		except OSError:
-			pass
-Build.BuildContext.install_dir = install_dir
-
-# before/after names
-repl = {'apply_core': 'process_source',
-	'apply_lib_vars': 'process_source',
-	'apply_obj_vars': 'propagate_uselib_vars',
-	'exec_rule': 'process_rule'
-}
-def after(*k):
-	k = [repl.get(key, key) for key in k]
-	return TaskGen.after_method(*k)
-
-def before(*k):
-	k = [repl.get(key, key) for key in k]
-	return TaskGen.before_method(*k)
-TaskGen.before = before
-
diff --git a/waflib/extras/cppcheck.py b/waflib/extras/cppcheck.py
deleted file mode 100644
index 13ff424..0000000
--- a/waflib/extras/cppcheck.py
+++ /dev/null
@@ -1,591 +0,0 @@
-#! /usr/bin/env python
-# -*- encoding: utf-8 -*-
-# Michel Mooij, michel.mooij7@gmail.com
-
-"""
-Tool Description
-================
-This module provides a waf wrapper (i.e. waftool) around the C/C++ source code
-checking tool 'cppcheck'.
-
-See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool
-itself.
-Note that many linux distributions already provide a ready to install version
-of cppcheck. On fedora, for instance, it can be installed using yum:
-
-	'sudo yum install cppcheck'
-
-
-Usage
-=====
-In order to use this waftool simply add it to the 'options' and 'configure'
-functions of your main waf script as shown in the example below:
-
-	def options(opt):
-		opt.load('cppcheck', tooldir='./waftools')
-
-	def configure(conf):
-		conf.load('cppcheck')
-
-Note that example shown above assumes that the cppcheck waftool is located in
-the sub directory named 'waftools'.
-
-When configured as shown in the example above, cppcheck will automatically
-perform a source code analysis on all C/C++ build tasks that have been
-defined in your waf build system.
-
-The example shown below for a C program will be used as input for cppcheck when
-building the task.
-
-	def build(bld):
-		bld.program(name='foo', src='foobar.c')
-
-The result of the source code analysis will be stored both as xml and html
-files in the build location for the task. Should any error be detected by
-cppcheck the build will be aborted and a link to the html report will be shown.
-By default, one index.html file is created for each task generator. A global
-index.html file can be obtained by setting the following variable
-in the configuration section:
-
-	conf.env.CPPCHECK_SINGLE_HTML = False
-
-When needed source code checking by cppcheck can be disabled per task, per
-detected error or warning for a particular task. It can be also be disabled for
-all tasks.
-
-In order to exclude a task from source code checking add the skip option to the
-task as shown below:
-
-	def build(bld):
-		bld.program(
-				name='foo',
-				src='foobar.c'
-				cppcheck_skip=True
-		)
-
-When needed problems detected by cppcheck may be suppressed using a file
-containing a list of suppression rules. The relative or absolute path to this
-file can be added to the build task as shown in the example below:
-
-		bld.program(
-				name='bar',
-				src='foobar.c',
-				cppcheck_suppress='bar.suppress'
-		)
-
-A cppcheck suppress file should contain one suppress rule per line. Each of
-these rules will be passed as an '--suppress=<rule>' argument to cppcheck.
-
-Dependencies
-================
-This waftool depends on the python pygments module, it is used for source code
-syntax highlighting when creating the html reports. see http://pygments.org/ for
-more information on this package.
-
-Remarks
-================
-The generation of the html report is originally based on the cppcheck-htmlreport.py
-script that comes shipped with the cppcheck tool.
-"""
-
-import sys
-import xml.etree.ElementTree as ElementTree
-from waflib import Task, TaskGen, Logs, Context, Options
-
-PYGMENTS_EXC_MSG= '''
-The required module 'pygments' could not be found. Please install it using your
-platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install',
-see 'http://pygments.org/download/' for installation instructions.
-'''
-
-try:
-	import pygments
-	from pygments import formatters, lexers
-except ImportError as e:
-	Logs.warn(PYGMENTS_EXC_MSG)
-	raise e
-
-
-def options(opt):
-	opt.add_option('--cppcheck-skip', dest='cppcheck_skip',
-		default=False, action='store_true',
-		help='do not check C/C++ sources (default=False)')
-
-	opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume',
-		default=False, action='store_true',
-		help='continue in case of errors (default=False)')
-
-	opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable',
-		default='warning,performance,portability,style,unusedFunction', action='store',
-		help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)")
-
-	opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable',
-		default='warning,performance,portability,style', action='store',
-		help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)")
-
-	opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c',
-		default='c99', action='store',
-		help='cppcheck standard to use when checking C (default=c99)')
-
-	opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx',
-		default='c++03', action='store',
-		help='cppcheck standard to use when checking C++ (default=c++03)')
-
-	opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config',
-		default=False, action='store_true',
-		help='forced check for missing buildin include files, e.g. stdio.h (default=False)')
-
-	opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs',
-		default='20', action='store',
-		help='maximum preprocessor (--max-configs) define iterations (default=20)')
-
-	opt.add_option('--cppcheck-jobs', dest='cppcheck_jobs',
-		default='1', action='store',
-		help='number of jobs (-j) to do the checking work (default=1)')
-
-def configure(conf):
-	if conf.options.cppcheck_skip:
-		conf.env.CPPCHECK_SKIP = [True]
-	conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c
-	conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx
-	conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
-	conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
-	conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
-	conf.env.CPPCHECK_JOBS = conf.options.cppcheck_jobs
-	if conf.options.cppcheck_jobs != '1' and ('unusedFunction' in conf.options.cppcheck_bin_enable or 'unusedFunction' in conf.options.cppcheck_lib_enable or 'all' in conf.options.cppcheck_bin_enable or 'all' in conf.options.cppcheck_lib_enable):
-		Logs.warn('cppcheck: unusedFunction cannot be used with multiple threads, cppcheck will disable it automatically')
-	conf.find_program('cppcheck', var='CPPCHECK')
-
-	# set to True to get a single index.html file
-	conf.env.CPPCHECK_SINGLE_HTML = False
-
-@TaskGen.feature('c')
-@TaskGen.feature('cxx')
-def cppcheck_execute(self):
-	if hasattr(self.bld, 'conf'):
-		return
-	if len(self.env.CPPCHECK_SKIP) or Options.options.cppcheck_skip:
-		return
-	if getattr(self, 'cppcheck_skip', False):
-		return
-	task = self.create_task('cppcheck')
-	task.cmd = _tgen_create_cmd(self)
-	task.fatal = []
-	if not Options.options.cppcheck_err_resume:
-		task.fatal.append('error')
-
-
-def _tgen_create_cmd(self):
-	features = getattr(self, 'features', [])
-	std_c = self.env.CPPCHECK_STD_C
-	std_cxx = self.env.CPPCHECK_STD_CXX
-	max_configs = self.env.CPPCHECK_MAX_CONFIGS
-	bin_enable = self.env.CPPCHECK_BIN_ENABLE
-	lib_enable = self.env.CPPCHECK_LIB_ENABLE
-	jobs = self.env.CPPCHECK_JOBS
-
-	cmd  = self.env.CPPCHECK
-	args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
-	args.append('--max-configs=%s' % max_configs)
-	args.append('-j %s' % jobs)
-
-	if 'cxx' in features:
-		args.append('--language=c++')
-		args.append('--std=%s' % std_cxx)
-	else:
-		args.append('--language=c')
-		args.append('--std=%s' % std_c)
-
-	if Options.options.cppcheck_check_config:
-		args.append('--check-config')
-
-	if set(['cprogram','cxxprogram']) & set(features):
-		args.append('--enable=%s' % bin_enable)
-	else:
-		args.append('--enable=%s' % lib_enable)
-
-	for src in self.to_list(getattr(self, 'source', [])):
-		if not isinstance(src, str):
-			src = repr(src)
-		args.append(src)
-	for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))):
-		if not isinstance(inc, str):
-			inc = repr(inc)
-		args.append('-I%s' % inc)
-	for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)):
-		if not isinstance(inc, str):
-			inc = repr(inc)
-		args.append('-I%s' % inc)
-	return cmd + args
-
-
-class cppcheck(Task.Task):
-	quiet = True
-
-	def run(self):
-		stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR)
-		self._save_xml_report(stderr)
-		defects = self._get_defects(stderr)
-		index = self._create_html_report(defects)
-		self._errors_evaluate(defects, index)
-		return 0
-
-	def _save_xml_report(self, s):
-		'''use cppcheck xml result string, add the command string used to invoke cppcheck
-		and save as xml file.
-		'''
-		header = '%s\n' % s.splitlines()[0]
-		root = ElementTree.fromstring(s)
-		cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
-		cmd.text = str(self.cmd)
-		body = ElementTree.tostring(root).decode('us-ascii')
-		body_html_name = 'cppcheck-%s.xml' % self.generator.get_name()
-		if self.env.CPPCHECK_SINGLE_HTML:
-			body_html_name = 'cppcheck.xml'
-		node = self.generator.path.get_bld().find_or_declare(body_html_name)
-		node.write(header + body)
-
-	def _get_defects(self, xml_string):
-		'''evaluate the xml string returned by cppcheck (on sdterr) and use it to create
-		a list of defects.
-		'''
-		defects = []
-		for error in ElementTree.fromstring(xml_string).iter('error'):
-			defect = {}
-			defect['id'] = error.get('id')
-			defect['severity'] = error.get('severity')
-			defect['msg'] = str(error.get('msg')).replace('<','&lt;')
-			defect['verbose'] = error.get('verbose')
-			for location in error.findall('location'):
-				defect['file'] = location.get('file')
-				defect['line'] = str(int(location.get('line')) - 1)
-			defects.append(defect)
-		return defects
-
-	def _create_html_report(self, defects):
-		files, css_style_defs = self._create_html_files(defects)
-		index = self._create_html_index(files)
-		self._create_css_file(css_style_defs)
-		return index
-
-	def _create_html_files(self, defects):
-		sources = {}
-		defects = [defect for defect in defects if 'file' in defect]
-		for defect in defects:
-			name = defect['file']
-			if not name in sources:
-				sources[name] = [defect]
-			else:
-				sources[name].append(defect)
-
-		files = {}
-		css_style_defs = None
-		bpath = self.generator.path.get_bld().abspath()
-		names = list(sources.keys())
-		for i in range(0,len(names)):
-			name = names[i]
-			if self.env.CPPCHECK_SINGLE_HTML:
-				htmlfile = 'cppcheck/%i.html' % (i)
-			else:
-				htmlfile = 'cppcheck/%s%i.html' % (self.generator.get_name(),i)
-			errors = sources[name]
-			files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
-			css_style_defs = self._create_html_file(name, htmlfile, errors)
-		return files, css_style_defs
-
-	def _create_html_file(self, sourcefile, htmlfile, errors):
-		name = self.generator.get_name()
-		root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
-		title = root.find('head/title')
-		title.text = 'cppcheck - report - %s' % name
-
-		body = root.find('body')
-		for div in body.findall('div'):
-			if div.get('id') == 'page':
-				page = div
-				break
-		for div in page.findall('div'):
-			if div.get('id') == 'header':
-				h1 = div.find('h1')
-				h1.text = 'cppcheck report - %s' % name
-			if div.get('id') == 'menu':
-				indexlink = div.find('a')
-				if self.env.CPPCHECK_SINGLE_HTML:
-					indexlink.attrib['href'] = 'index.html'
-				else:
-					indexlink.attrib['href'] = 'index-%s.html' % name
-			if div.get('id') == 'content':
-				content = div
-				srcnode = self.generator.bld.root.find_node(sourcefile)
-				hl_lines = [e['line'] for e in errors if 'line' in e]
-				formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
-				formatter.errors = [e for e in errors if 'line' in e]
-				css_style_defs = formatter.get_style_defs('.highlight')
-				lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
-				s = pygments.highlight(srcnode.read(), lexer, formatter)
-				table = ElementTree.fromstring(s)
-				content.append(table)
-
-		s = ElementTree.tostring(root, method='html').decode('us-ascii')
-		s = CCPCHECK_HTML_TYPE + s
-		node = self.generator.path.get_bld().find_or_declare(htmlfile)
-		node.write(s)
-		return css_style_defs
-
-	def _create_html_index(self, files):
-		name = self.generator.get_name()
-		root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
-		title = root.find('head/title')
-		title.text = 'cppcheck - report - %s' % name
-
-		body = root.find('body')
-		for div in body.findall('div'):
-			if div.get('id') == 'page':
-				page = div
-				break
-		for div in page.findall('div'):
-			if div.get('id') == 'header':
-				h1 = div.find('h1')
-				h1.text = 'cppcheck report - %s' % name
-			if div.get('id') == 'content':
-				content = div
-				self._create_html_table(content, files)
-			if div.get('id') == 'menu':
-				indexlink = div.find('a')
-				if self.env.CPPCHECK_SINGLE_HTML:
-					indexlink.attrib['href'] = 'index.html'
-				else:
-					indexlink.attrib['href'] = 'index-%s.html' % name
-
-		s = ElementTree.tostring(root, method='html').decode('us-ascii')
-		s = CCPCHECK_HTML_TYPE + s
-		index_html_name = 'cppcheck/index-%s.html' % name
-		if self.env.CPPCHECK_SINGLE_HTML:
-			index_html_name = 'cppcheck/index.html'
-		node = self.generator.path.get_bld().find_or_declare(index_html_name)
-		node.write(s)
-		return node
-
-	def _create_html_table(self, content, files):
-		table = ElementTree.fromstring(CPPCHECK_HTML_TABLE)
-		for name, val in files.items():
-			f = val['htmlfile']
-			s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name)
-			row = ElementTree.fromstring(s)
-			table.append(row)
-
-			errors = sorted(val['errors'], key=lambda e: int(e['line']) if 'line' in e else sys.maxint)
-			for e in errors:
-				if not 'line' in e:
-					s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
-				else:
-					attr = ''
-					if e['severity'] == 'error':
-						attr = 'class="error"'
-					s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line'])
-					s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg'])
-				row = ElementTree.fromstring(s)
-				table.append(row)
-		content.append(table)
-
-	def _create_css_file(self, css_style_defs):
-		css = str(CPPCHECK_CSS_FILE)
-		if css_style_defs:
-			css = "%s\n%s\n" % (css, css_style_defs)
-		node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css')
-		node.write(css)
-
-	def _errors_evaluate(self, errors, http_index):
-		name = self.generator.get_name()
-		fatal = self.fatal
-		severity = [err['severity'] for err in errors]
-		problems = [err for err in errors if err['severity'] != 'information']
-
-		if set(fatal) & set(severity):
-			exc  = "\n"
-			exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name
-			exc += "\n    file://%r" % (http_index)
-			exc += "\n"
-			self.generator.bld.fatal(exc)
-
-		elif len(problems):
-			msg =  "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name
-			msg += "\n    file://%r" % http_index
-			msg += "\n"
-			Logs.error(msg)
-
-
-class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
-	errors = []
-
-	def wrap(self, source, outfile):
-		line_no = 1
-		for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile):
-			# If this is a source code line we want to add a span tag at the end.
-			if i == 1:
-				for error in self.errors:
-					if int(error['line']) == line_no:
-						t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
-				line_no += 1
-			yield i, t
-
-
-CCPCHECK_HTML_TYPE = \
-'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n'
-
-CPPCHECK_HTML_FILE = """
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp "&#160;">]>
-<html>
-	<head>
-		<title>cppcheck - report - XXX</title>
-		<link href="style.css" rel="stylesheet" type="text/css" />
-		<style type="text/css">
-		</style>
-	</head>
-	<body class="body">
-		<div id="page-header">&nbsp;</div>
-		<div id="page">
-			<div id="header">
-				<h1>cppcheck report - XXX</h1>
-			</div>
-			<div id="menu">
-				<a href="index.html">Defect list</a>
-			</div>
-			<div id="content">
-			</div>
-			<div id="footer">
-				<div>cppcheck - a tool for static C/C++ code analysis</div>
-				<div>
-				Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
-          		Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
-				IRC: #cppcheck at irc.freenode.net
-				</div>
-				&nbsp;
-			</div>
-		&nbsp;
-		</div>
-		<div id="page-footer">&nbsp;</div>
-	</body>
-</html>
-"""
-
-CPPCHECK_HTML_TABLE = """
-<table>
-	<tr>
-		<th>Line</th>
-		<th>Id</th>
-		<th>Severity</th>
-		<th>Message</th>
-	</tr>
-</table>
-"""
-
-CPPCHECK_HTML_ERROR = \
-'<span style="background: #ffaaaa;padding: 3px;">&lt;--- %s</span>\n'
-
-CPPCHECK_CSS_FILE = """
-body.body {
-	font-family: Arial;
-	font-size: 13px;
-	background-color: black;
-	padding: 0px;
-	margin: 0px;
-}
-
-.error {
-	font-family: Arial;
-	font-size: 13px;
-	background-color: #ffb7b7;
-	padding: 0px;
-	margin: 0px;
-}
-
-th, td {
-	min-width: 100px;
-	text-align: left;
-}
-
-#page-header {
-	clear: both;
-	width: 1200px;
-	margin: 20px auto 0px auto;
-	height: 10px;
-	border-bottom-width: 2px;
-	border-bottom-style: solid;
-	border-bottom-color: #aaaaaa;
-}
-
-#page {
-	width: 1160px;
-	margin: auto;
-	border-left-width: 2px;
-	border-left-style: solid;
-	border-left-color: #aaaaaa;
-	border-right-width: 2px;
-	border-right-style: solid;
-	border-right-color: #aaaaaa;
-	background-color: White;
-	padding: 20px;
-}
-
-#page-footer {
-	clear: both;
-	width: 1200px;
-	margin: auto;
-	height: 10px;
-	border-top-width: 2px;
-	border-top-style: solid;
-	border-top-color: #aaaaaa;
-}
-
-#header {
-	width: 100%;
-	height: 70px;
-	background-image: url(logo.png);
-	background-repeat: no-repeat;
-	background-position: left top;
-	border-bottom-style: solid;
-	border-bottom-width: thin;
-	border-bottom-color: #aaaaaa;
-}
-
-#menu {
-	margin-top: 5px;
-	text-align: left;
-	float: left;
-	width: 100px;
-	height: 300px;
-}
-
-#menu > a {
-	margin-left: 10px;
-	display: block;
-}
-
-#content {
-	float: left;
-	width: 1020px;
-	margin: 5px;
-	padding: 0px 10px 10px 10px;
-	border-left-style: solid;
-	border-left-width: thin;
-	border-left-color: #aaaaaa;
-}
-
-#footer {
-	padding-bottom: 5px;
-	padding-top: 5px;
-	border-top-style: solid;
-	border-top-width: thin;
-	border-top-color: #aaaaaa;
-	clear: both;
-	font-size: 10px;
-}
-
-#footer > div {
-	float: left;
-	width: 33%;
-}
-
-"""
-
diff --git a/waflib/extras/cpplint.py b/waflib/extras/cpplint.py
deleted file mode 100644
index 8cdd6dd..0000000
--- a/waflib/extras/cpplint.py
+++ /dev/null
@@ -1,209 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-#
-# written by Sylvain Rouquette, 2014
-
-'''
-
-This is an extra tool, not bundled with the default waf binary.
-To add the cpplint tool to the waf file:
-$ ./waf-light --tools=compat15,cpplint
-
-this tool also requires cpplint for python.
-If you have PIP, you can install it like this: pip install cpplint
-
-When using this tool, the wscript will look like:
-
-    def options(opt):
-        opt.load('compiler_cxx cpplint')
-
-    def configure(conf):
-        conf.load('compiler_cxx cpplint')
-        # optional, you can also specify them on the command line
-        conf.env.CPPLINT_FILTERS = ','.join((
-            '-whitespace/newline',      # c++11 lambda
-            '-readability/braces',      # c++11 constructor
-            '-whitespace/braces',       # c++11 constructor
-            '-build/storage_class',     # c++11 for-range
-            '-whitespace/blank_line',   # user pref
-            '-whitespace/labels'        # user pref
-            ))
-
-    def build(bld):
-        bld(features='cpplint', source='main.cpp', target='app')
-        # add include files, because they aren't usually built
-        bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp'))
-'''
-
-from __future__ import absolute_import
-import sys, re
-import logging
-from waflib import Errors, Task, TaskGen, Logs, Options, Node, Utils
-
-
-critical_errors = 0
-CPPLINT_FORMAT = '[CPPLINT] %(filename)s:\nline %(linenum)s, severity %(confidence)s, category: %(category)s\n%(message)s\n'
-RE_EMACS = re.compile(r'(?P<filename>.*):(?P<linenum>\d+):  (?P<message>.*)  \[(?P<category>.*)\] \[(?P<confidence>\d+)\]')
-CPPLINT_RE = {
-    'waf': RE_EMACS,
-    'emacs': RE_EMACS,
-    'vs7': re.compile(r'(?P<filename>.*)\((?P<linenum>\d+)\):  (?P<message>.*)  \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
-    'eclipse': re.compile(r'(?P<filename>.*):(?P<linenum>\d+): warning: (?P<message>.*)  \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
-}
-CPPLINT_STR = ('${CPPLINT} '
-               '--verbose=${CPPLINT_LEVEL} '
-               '--output=${CPPLINT_OUTPUT} '
-               '--filter=${CPPLINT_FILTERS} '
-               '--root=${CPPLINT_ROOT} '
-               '--linelength=${CPPLINT_LINE_LENGTH} ')
-
-
-def options(opt):
-    opt.add_option('--cpplint-filters', type='string',
-                   default='', dest='CPPLINT_FILTERS',
-                   help='add filters to cpplint')
-    opt.add_option('--cpplint-length', type='int',
-                   default=80, dest='CPPLINT_LINE_LENGTH',
-                   help='specify the line length (default: 80)')
-    opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL',
-                   help='specify the log level (default: 1)')
-    opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK',
-                   help='break the build if error >= level (default: 5)')
-    opt.add_option('--cpplint-root', type='string',
-                   default='', dest='CPPLINT_ROOT',
-                   help='root directory used to derive header guard')
-    opt.add_option('--cpplint-skip', action='store_true',
-                   default=False, dest='CPPLINT_SKIP',
-                   help='skip cpplint during build')
-    opt.add_option('--cpplint-output', type='string',
-                   default='waf', dest='CPPLINT_OUTPUT',
-                   help='select output format (waf, emacs, vs7, eclipse)')
-
-
-def configure(conf):
-    try:
-        conf.find_program('cpplint', var='CPPLINT')
-    except Errors.ConfigurationError:
-        conf.env.CPPLINT_SKIP = True
-
-
-class cpplint_formatter(Logs.formatter, object):
-    def __init__(self, fmt):
-        logging.Formatter.__init__(self, CPPLINT_FORMAT)
-        self.fmt = fmt
-
-    def format(self, rec):
-        if self.fmt == 'waf':
-            result = CPPLINT_RE[self.fmt].match(rec.msg).groupdict()
-            rec.msg = CPPLINT_FORMAT % result
-        if rec.levelno <= logging.INFO:
-            rec.c1 = Logs.colors.CYAN
-        return super(cpplint_formatter, self).format(rec)
-
-
-class cpplint_handler(Logs.log_handler, object):
-    def __init__(self, stream=sys.stderr, **kw):
-        super(cpplint_handler, self).__init__(stream, **kw)
-        self.stream = stream
-
-    def emit(self, rec):
-        rec.stream = self.stream
-        self.emit_override(rec)
-        self.flush()
-
-
-class cpplint_wrapper(object):
-    def __init__(self, logger, threshold, fmt):
-        self.logger = logger
-        self.threshold = threshold
-        self.fmt = fmt
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        if isinstance(exc_value, Utils.subprocess.CalledProcessError):
-            messages = [m for m in exc_value.output.splitlines() 
-                        if 'Done processing' not in m 
-                        and 'Total errors found' not in m]
-            for message in messages:
-                self.write(message)
-            return True
-
-    def write(self, message):
-        global critical_errors
-        result = CPPLINT_RE[self.fmt].match(message)
-        if not result:
-            return
-        level = int(result.groupdict()['confidence'])
-        if level >= self.threshold:
-            critical_errors += 1
-        if level <= 2:
-            self.logger.info(message)
-        elif level <= 4:
-            self.logger.warning(message)
-        else:
-            self.logger.error(message)
-
-
-cpplint_logger = None
-def get_cpplint_logger(fmt):
-    global cpplint_logger
-    if cpplint_logger:
-        return cpplint_logger
-    cpplint_logger = logging.getLogger('cpplint')
-    hdlr = cpplint_handler()
-    hdlr.setFormatter(cpplint_formatter(fmt))
-    cpplint_logger.addHandler(hdlr)
-    cpplint_logger.setLevel(logging.DEBUG)
-    return cpplint_logger
-
-
-class cpplint(Task.Task):
-    color = 'PINK'
-
-    def __init__(self, *k, **kw):
-        super(cpplint, self).__init__(*k, **kw)
-
-    def run(self):
-        global critical_errors
-        with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT):
-            params = {key: str(self.env[key]) for key in self.env if 'CPPLINT_' in key}
-            if params['CPPLINT_OUTPUT'] is 'waf':
-                params['CPPLINT_OUTPUT'] = 'emacs'
-            params['CPPLINT'] = self.env.get_flat('CPPLINT')
-            cmd = Utils.subst_vars(CPPLINT_STR, params)
-            env = self.env.env or None
-            Utils.subprocess.check_output(cmd + self.inputs[0].abspath(),
-                                          stderr=Utils.subprocess.STDOUT,
-                                          env=env, shell=True)
-        return critical_errors
-
-@TaskGen.extension('.h', '.hh', '.hpp', '.hxx')
-def cpplint_includes(self, node):
-    pass
-
-@TaskGen.feature('cpplint')
-@TaskGen.before_method('process_source')
-def post_cpplint(self):
-    if not self.env.CPPLINT_INITIALIZED:
-        for key, value in Options.options.__dict__.items():
-            if not key.startswith('CPPLINT_') or self.env[key]:
-                continue
-            self.env[key] = value
-        self.env.CPPLINT_INITIALIZED = True
-
-    if self.env.CPPLINT_SKIP:
-        return
-
-    if not self.env.CPPLINT_OUTPUT in CPPLINT_RE:
-        return
-
-    for src in self.to_list(getattr(self, 'source', [])):
-        if isinstance(src, Node.Node):
-            node = src
-        else:
-            node = self.path.find_or_declare(src)
-        if not node:
-            self.bld.fatal('Could not find %r' % src)
-        self.create_task('cpplint', node)
diff --git a/waflib/extras/cross_gnu.py b/waflib/extras/cross_gnu.py
deleted file mode 100644
index 309f53b..0000000
--- a/waflib/extras/cross_gnu.py
+++ /dev/null
@@ -1,227 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 vi:ts=4:noexpandtab
-# Tool to provide dedicated variables for cross-compilation
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2014"
-
-"""
-This tool allows to use environment variables to define cross-compilation
-variables intended for build variants.
-
-The variables are obtained from the environment in 3 ways:
-
-1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
-2. By defining HOST_x
-3. By defining ${CHOST//-/_}_x
-
-else one can set ``cfg.env.CHOST`` in ``wscript`` before loading ``cross_gnu``.
-
-Usage:
-
-- In your build script::
-
-	def configure(cfg):
-		...
-		for variant in x_variants:
-			setenv(variant)
-			conf.load('cross_gnu')
-			conf.xcheck_host_var('POUET')
-			...
-
-
-- Then::
-
-	CHOST=arm-hardfloat-linux-gnueabi waf configure
-	env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
-	CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
-	HOST_CC="clang -..." waf configure
-
-This example ``wscript`` compiles to Microchip PIC (xc16-gcc-xyz must be in PATH):
-
-.. code:: python
-
-		from waflib import Configure
-
-		#from https://gist.github.com/rpuntaie/2bddfb5d7b77db26415ee14371289971
-		import waf_variants
-
-		variants='pc fw/variant1 fw/variant2'.split()
-
-		top = "."
-		out = "../build"
-
-		PIC = '33FJ128GP804' #dsPICxxx
-
-		@Configure.conf
-		def gcc_modifier_xc16(cfg):
-				v = cfg.env
-				v.cprogram_PATTERN = '%s.elf'
-				v.LINKFLAGS_cprogram = ','.join(['-Wl','','','--defsym=__MPLAB_BUILD=0','','--script=p'+PIC+'.gld',
-						'--stack=16','--check-sections','--data-init','--pack-data','--handles','--isr','--no-gc-sections',
-						'--fill-upper=0','--stackguard=16','--no-force-link','--smart-io']) #,'--report-mem'])
-				v.CFLAGS_cprogram=['-mcpu='+PIC,'-omf=elf','-mlarge-code','-msmart-io=1',
-						'-msfr-warn=off','-mno-override-inline','-finline','-Winline']
-
-		def configure(cfg):
-				if 'fw' in cfg.variant: #firmware
-						cfg.env.DEST_OS = 'xc16' #cfg.env.CHOST = 'xc16' #works too
-						cfg.load('c cross_gnu') #cfg.env.CHOST becomes ['xc16']
-						...
-				else: #configure for pc SW
-						...
-
-		def build(bld):
-				if 'fw' in bld.variant: #firmware
-						bld.program(source='maintst.c', target='maintst');
-						bld(source='maintst.elf', target='maintst.hex', rule="xc16-bin2hex ${SRC} -a -omf=elf")
-				else: #build for pc SW
-						...
-
-"""
-
-import os
-from waflib import Utils, Configure
-from waflib.Tools import ccroot, gcc
-
-try:
-	from shlex import quote
-except ImportError:
-	from pipes import quote
-
-def get_chost_stuff(conf):
-	"""
-	Get the CHOST environment variable contents
-	"""
-	chost = None
-	chost_envar = None
-	if conf.env.CHOST:
-		chost = conf.env.CHOST[0]
-		chost_envar = chost.replace('-', '_')
-	return chost, chost_envar
-
-
-@Configure.conf
-def xcheck_var(conf, name, wafname=None, cross=False):
-	wafname = wafname or name
-
-	if wafname in conf.env:
-		value = conf.env[wafname]
-		if isinstance(value, str):
-			value = [value]
-	else:
-		envar = os.environ.get(name)
-		if not envar:
-			return
-		value = Utils.to_list(envar) if envar != '' else [envar]
-
-	conf.env[wafname] = value
-	if cross:
-		pretty = 'cross-compilation %s' % wafname
-	else:
-		pretty = wafname
-	conf.msg('Will use %s' % pretty, " ".join(quote(x) for x in value))
-
-@Configure.conf
-def xcheck_host_prog(conf, name, tool, wafname=None):
-	wafname = wafname or name
-
-	chost, chost_envar = get_chost_stuff(conf)
-
-	specific = None
-	if chost:
-		specific = os.environ.get('%s_%s' % (chost_envar, name))
-
-	if specific:
-		value = Utils.to_list(specific)
-		conf.env[wafname] += value
-		conf.msg('Will use cross-compilation %s from %s_%s' % (name, chost_envar, name),
-		 " ".join(quote(x) for x in value))
-		return
-	else:
-		envar = os.environ.get('HOST_%s' % name)
-		if envar is not None:
-			value = Utils.to_list(envar)
-			conf.env[wafname] = value
-			conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
-			 " ".join(quote(x) for x in value))
-			return
-
-	if conf.env[wafname]:
-		return
-
-	value = None
-	if chost:
-		value = '%s-%s' % (chost, tool)
-
-	if value:
-		conf.env[wafname] = value
-		conf.msg('Will use cross-compilation %s from CHOST' % wafname, value)
-
-@Configure.conf
-def xcheck_host_envar(conf, name, wafname=None):
-	wafname = wafname or name
-
-	chost, chost_envar = get_chost_stuff(conf)
-
-	specific = None
-	if chost:
-		specific = os.environ.get('%s_%s' % (chost_envar, name))
-
-	if specific:
-		value = Utils.to_list(specific)
-		conf.env[wafname] += value
-		conf.msg('Will use cross-compilation %s from %s_%s' \
-		 % (name, chost_envar, name),
-		 " ".join(quote(x) for x in value))
-		return
-
-
-	envar = os.environ.get('HOST_%s' % name)
-	if envar is None:
-		return
-
-	value = Utils.to_list(envar) if envar != '' else [envar]
-
-	conf.env[wafname] = value
-	conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
-	 " ".join(quote(x) for x in value))
-
-
-@Configure.conf
-def xcheck_host(conf):
-	conf.xcheck_var('CHOST', cross=True)
-	conf.env.CHOST = conf.env.CHOST or [conf.env.DEST_OS]
-	conf.env.DEST_OS = conf.env.CHOST[0].replace('-','_')
-	conf.xcheck_host_prog('CC', 'gcc')
-	conf.xcheck_host_prog('CXX', 'g++')
-	conf.xcheck_host_prog('LINK_CC', 'gcc')
-	conf.xcheck_host_prog('LINK_CXX', 'g++')
-	conf.xcheck_host_prog('AR', 'ar')
-	conf.xcheck_host_prog('AS', 'as')
-	conf.xcheck_host_prog('LD', 'ld')
-	conf.xcheck_host_envar('CFLAGS')
-	conf.xcheck_host_envar('CXXFLAGS')
-	conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
-	conf.xcheck_host_envar('LIB')
-	conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
-	conf.xcheck_host_envar('PKG_CONFIG_PATH')
-
-	if not conf.env.env:
-		conf.env.env = {}
-		conf.env.env.update(os.environ)
-	if conf.env.PKG_CONFIG_LIBDIR:
-		conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
-	if conf.env.PKG_CONFIG_PATH:
-		conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]
-
-def configure(conf):
-	"""
-	Configuration example for gcc, it will not work for g++/clang/clang++
-	"""
-	conf.xcheck_host()
-	conf.gcc_common_flags()
-	conf.gcc_modifier_platform()
-	conf.cc_load_tools()
-	conf.cc_add_flags()
-	conf.link_add_flags()
diff --git a/waflib/extras/cython.py b/waflib/extras/cython.py
deleted file mode 100644
index 591c274..0000000
--- a/waflib/extras/cython.py
+++ /dev/null
@@ -1,147 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010-2015
-
-import re
-from waflib import Task, Logs
-from waflib.TaskGen import extension
-
-cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*')
-re_cyt = re.compile(r"""
-	^\s*                           # must begin with some whitespace characters
-	(?:from\s+(\w+)(?:\.\w+)*\s+)? # optionally match "from foo(.baz)" and capture foo
-	c?import\s(\w+|[*])            # require "import bar" and capture bar
-	""", re.M | re.VERBOSE)
-
-@extension('.pyx')
-def add_cython_file(self, node):
-	"""
-	Process a *.pyx* file given in the list of source files. No additional
-	feature is required::
-
-		def build(bld):
-			bld(features='c cshlib pyext', source='main.c foo.pyx', target='app')
-	"""
-	ext = '.c'
-	if 'cxx' in self.features:
-		self.env.append_unique('CYTHONFLAGS', '--cplus')
-		ext = '.cc'
-
-	for x in getattr(self, 'cython_includes', []):
-		# TODO re-use these nodes in "scan" below
-		d = self.path.find_dir(x)
-		if d:
-			self.env.append_unique('CYTHONFLAGS', '-I%s' % d.abspath())
-
-	tsk = self.create_task('cython', node, node.change_ext(ext))
-	self.source += tsk.outputs
-
-class cython(Task.Task):
-	run_str = '${CYTHON} ${CYTHONFLAGS} -o ${TGT[0].abspath()} ${SRC}'
-	color   = 'GREEN'
-
-	vars    = ['INCLUDES']
-	"""
-	Rebuild whenever the INCLUDES change. The variables such as CYTHONFLAGS will be appended
-	by the metaclass.
-	"""
-
-	ext_out = ['.h']
-	"""
-	The creation of a .h file is known only after the build has begun, so it is not
-	possible to compute a build order just by looking at the task inputs/outputs.
-	"""
-
-	def runnable_status(self):
-		"""
-		Perform a double-check to add the headers created by cython
-		to the output nodes. The scanner is executed only when the cython task
-		must be executed (optimization).
-		"""
-		ret = super(cython, self).runnable_status()
-		if ret == Task.ASK_LATER:
-			return ret
-		for x in self.generator.bld.raw_deps[self.uid()]:
-			if x.startswith('header:'):
-				self.outputs.append(self.inputs[0].parent.find_or_declare(x.replace('header:', '')))
-		return super(cython, self).runnable_status()
-
-	def post_run(self):
-		for x in self.outputs:
-			if x.name.endswith('.h'):
-				if not x.exists():
-					if Logs.verbose:
-						Logs.warn('Expected %r', x.abspath())
-					x.write('')
-		return Task.Task.post_run(self)
-
-	def scan(self):
-		"""
-		Return the dependent files (.pxd) by looking in the include folders.
-		Put the headers to generate in the custom list "bld.raw_deps".
-		To inspect the scanne results use::
-
-			$ waf clean build --zones=deps
-		"""
-		node = self.inputs[0]
-		txt = node.read()
-
-		mods = set()
-		for m in re_cyt.finditer(txt):
-			if m.group(1):  # matches "from foo import bar"
-				mods.add(m.group(1))
-			else:
-				mods.add(m.group(2))
-
-		Logs.debug('cython: mods %r', mods)
-		incs = getattr(self.generator, 'cython_includes', [])
-		incs = [self.generator.path.find_dir(x) for x in incs]
-		incs.append(node.parent)
-
-		found = []
-		missing = []
-		for x in sorted(mods):
-			for y in incs:
-				k = y.find_resource(x + '.pxd')
-				if k:
-					found.append(k)
-					break
-			else:
-				missing.append(x)
-
-		# the cython file implicitly depends on a pxd file that might be present
-		implicit = node.parent.find_resource(node.name[:-3] + 'pxd')
-		if implicit:
-			found.append(implicit)
-
-		Logs.debug('cython: found %r', found)
-
-		# Now the .h created - store them in bld.raw_deps for later use
-		has_api = False
-		has_public = False
-		for l in txt.splitlines():
-			if cy_api_pat.match(l):
-				if ' api ' in l:
-					has_api = True
-				if ' public ' in l:
-					has_public = True
-		name = node.name.replace('.pyx', '')
-		if has_api:
-			missing.append('header:%s_api.h' % name)
-		if has_public:
-			missing.append('header:%s.h' % name)
-
-		return (found, missing)
-
-def options(ctx):
-	ctx.add_option('--cython-flags', action='store', default='', help='space separated list of flags to pass to cython')
-
-def configure(ctx):
-	if not ctx.env.CC and not ctx.env.CXX:
-		ctx.fatal('Load a C/C++ compiler first')
-	if not ctx.env.PYTHON:
-		ctx.fatal('Load the python tool first!')
-	ctx.find_program('cython', var='CYTHON')
-	if hasattr(ctx.options, 'cython_flags'):
-		ctx.env.CYTHONFLAGS = ctx.options.cython_flags
-
diff --git a/waflib/extras/dcc.py b/waflib/extras/dcc.py
deleted file mode 100644
index c1a57c0..0000000
--- a/waflib/extras/dcc.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Jérôme Carretero, 2011 (zougloub)
-
-from waflib import Options
-from waflib.Tools import ccroot
-from waflib.Configure import conf
-
-@conf
-def find_dcc(conf):
-	conf.find_program(['dcc'], var='CC', path_list=getattr(Options.options, 'diabbindir', ""))
-	conf.env.CC_NAME = 'dcc'
-
-@conf
-def find_dld(conf):
-	conf.find_program(['dld'], var='LINK_CC', path_list=getattr(Options.options, 'diabbindir', ""))
-	conf.env.LINK_CC_NAME = 'dld'
-
-@conf
-def find_dar(conf):
-	conf.find_program(['dar'], var='AR', path_list=getattr(Options.options, 'diabbindir', ""))
-	conf.env.AR_NAME = 'dar'
-	conf.env.ARFLAGS = 'rcs'
-
-@conf
-def find_ddump(conf):
-	conf.find_program(['ddump'], var='DDUMP', path_list=getattr(Options.options, 'diabbindir', ""))
-
-@conf
-def dcc_common_flags(conf):
-	v = conf.env
-	v['CC_SRC_F']            = []
-	v['CC_TGT_F']            = ['-c', '-o']
-
-	# linker
-	if not v['LINK_CC']:
-		v['LINK_CC'] = v['CC']
-	v['CCLNK_SRC_F']         = []
-	v['CCLNK_TGT_F']         = ['-o']
-	v['CPPPATH_ST']          = '-I%s'
-	v['DEFINES_ST']          = '-D%s'
-
-	v['LIB_ST']              = '-l:%s' # template for adding libs
-	v['LIBPATH_ST']          = '-L%s' # template for adding libpaths
-	v['STLIB_ST']            = '-l:%s'
-	v['STLIBPATH_ST']        = '-L%s'
-	v['RPATH_ST']            = '-Wl,-rpath,%s'
-	#v['STLIB_MARKER']        = '-Wl,-Bstatic'
-
-	# program
-	v['cprogram_PATTERN']    = '%s.elf'
-
-	# static lib
-	v['LINKFLAGS_cstlib']    = ['-Wl,-Bstatic']
-	v['cstlib_PATTERN']      = 'lib%s.a'
-
-def configure(conf):
-	conf.find_dcc()
-	conf.find_dar()
-	conf.find_dld()
-	conf.find_ddump()
-	conf.dcc_common_flags()
-	conf.cc_load_tools()
-	conf.cc_add_flags()
-	conf.link_add_flags()
-
-def options(opt):
-	"""
-	Add the ``--with-diab-bindir`` command-line options.
-	"""
-	opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="")
-
diff --git a/waflib/extras/distnet.py b/waflib/extras/distnet.py
deleted file mode 100644
index ff3ed8e..0000000
--- a/waflib/extras/distnet.py
+++ /dev/null
@@ -1,430 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-waf-powered distributed network builds, with a network cache.
-
-Caching files from a server has advantages over a NFS/Samba shared folder:
-
-- builds are much faster because they use local files
-- builds just continue to work in case of a network glitch
-- permissions are much simpler to manage
-"""
-
-import os, urllib, tarfile, re, shutil, tempfile, sys
-from collections import OrderedDict
-from waflib import Context, Utils, Logs
-
-try:
-	from urllib.parse import urlencode
-except ImportError:
-	urlencode = urllib.urlencode
-
-def safe_urlencode(data):
-	x = urlencode(data)
-	try:
-		x = x.encode('utf-8')
-	except Exception:
-		pass
-	return x
-
-try:
-	from urllib.error import URLError
-except ImportError:
-	from urllib2 import URLError
-
-try:
-	from urllib.request import Request, urlopen
-except ImportError:
-	from urllib2 import Request, urlopen
-
-DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
-DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
-TARFORMAT = 'w:bz2'
-TIMEOUT = 60
-REQUIRES = 'requires.txt'
-
-re_com = re.compile(r'\s*#.*', re.M)
-
-def total_version_order(num):
-	lst = num.split('.')
-	template = '%10s' * len(lst)
-	ret = template % tuple(lst)
-	return ret
-
-def get_distnet_cache():
-	return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)
-
-def get_server_url():
-	return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)
-
-def get_download_url():
-	return '%s/download.py' % get_server_url()
-
-def get_upload_url():
-	return '%s/upload.py' % get_server_url()
-
-def get_resolve_url():
-	return '%s/resolve.py' % get_server_url()
-
-def send_package_name():
-	out = getattr(Context.g_module, 'out', 'build')
-	pkgfile = '%s/package_to_upload.tarfile' % out
-	return pkgfile
-
-class package(Context.Context):
-	fun = 'package'
-	cmd = 'package'
-
-	def execute(self):
-		try:
-			files = self.files
-		except AttributeError:
-			files = self.files = []
-
-		Context.Context.execute(self)
-		pkgfile = send_package_name()
-		if not pkgfile in files:
-			if not REQUIRES in files:
-				files.append(REQUIRES)
-			self.make_tarfile(pkgfile, files, add_to_package=False)
-
-	def make_tarfile(self, filename, files, **kw):
-		if kw.get('add_to_package', True):
-			self.files.append(filename)
-
-		with tarfile.open(filename, TARFORMAT) as tar:
-			endname = os.path.split(filename)[-1]
-			endname = endname.split('.')[0] + '/'
-			for x in files:
-				tarinfo = tar.gettarinfo(x, x)
-				tarinfo.uid   = tarinfo.gid   = 0
-				tarinfo.uname = tarinfo.gname = 'root'
-				tarinfo.size = os.stat(x).st_size
-
-				# TODO - more archive creation options?
-				if kw.get('bare', True):
-					tarinfo.name = os.path.split(x)[1]
-				else:
-					tarinfo.name = endname + x # todo, if tuple, then..
-				Logs.debug('distnet: adding %r to %s', tarinfo.name, filename)
-				with open(x, 'rb') as f:
-					tar.addfile(tarinfo, f)
-		Logs.info('Created %s', filename)
-
-class publish(Context.Context):
-	fun = 'publish'
-	cmd = 'publish'
-	def execute(self):
-		if hasattr(Context.g_module, 'publish'):
-			Context.Context.execute(self)
-		mod = Context.g_module
-
-		rfile = getattr(self, 'rfile', send_package_name())
-		if not os.path.isfile(rfile):
-			self.fatal('Create the release file with "waf release" first! %r' % rfile)
-
-		fdata = Utils.readf(rfile, m='rb')
-		data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])
-
-		req = Request(get_upload_url(), data)
-		response = urlopen(req, timeout=TIMEOUT)
-		data = response.read().strip()
-
-		if sys.hexversion>0x300000f:
-			data = data.decode('utf-8')
-
-		if data != 'ok':
-			self.fatal('Could not publish the package %r' % data)
-
-class constraint(object):
-	def __init__(self, line=''):
-		self.required_line = line
-		self.info = []
-
-		line = line.strip()
-		if not line:
-			return
-
-		lst = line.split(',')
-		if lst:
-			self.pkgname = lst[0]
-			self.required_version = lst[1]
-			for k in lst:
-				a, b, c = k.partition('=')
-				if a and c:
-					self.info.append((a, c))
-	def __str__(self):
-		buf = []
-		buf.append(self.pkgname)
-		buf.append(self.required_version)
-		for k in self.info:
-			buf.append('%s=%s' % k)
-		return ','.join(buf)
-
-	def __repr__(self):
-		return "requires %s-%s" % (self.pkgname, self.required_version)
-
-	def human_display(self, pkgname, pkgver):
-		return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)
-
-	def why(self):
-		ret = []
-		for x in self.info:
-			if x[0] == 'reason':
-				ret.append(x[1])
-		return ret
-
-	def add_reason(self, reason):
-		self.info.append(('reason', reason))
-
-def parse_constraints(text):
-	assert(text is not None)
-	constraints = []
-	text = re.sub(re_com, '', text)
-	lines = text.splitlines()
-	for line in lines:
-		line = line.strip()
-		if not line:
-			continue
-		constraints.append(constraint(line))
-	return constraints
-
-def list_package_versions(cachedir, pkgname):
-	pkgdir = os.path.join(cachedir, pkgname)
-	try:
-		versions = os.listdir(pkgdir)
-	except OSError:
-		return []
-	versions.sort(key=total_version_order)
-	versions.reverse()
-	return versions
-
-class package_reader(Context.Context):
-	cmd = 'solver'
-	fun = 'solver'
-
-	def __init__(self, **kw):
-		Context.Context.__init__(self, **kw)
-
-		self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
-		self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
-		self.cache_constraints = {}
-		self.constraints = []
-
-	def compute_dependencies(self, filename=REQUIRES):
-		text = Utils.readf(filename)
-		data = safe_urlencode([('text', text)])
-
-		if '--offline' in sys.argv:
-			self.constraints = self.local_resolve(text)
-		else:
-			req = Request(get_resolve_url(), data)
-			try:
-				response = urlopen(req, timeout=TIMEOUT)
-			except URLError as e:
-				Logs.warn('The package server is down! %r', e)
-				self.constraints = self.local_resolve(text)
-			else:
-				ret = response.read()
-				try:
-					ret = ret.decode('utf-8')
-				except Exception:
-					pass
-				self.trace(ret)
-				self.constraints = parse_constraints(ret)
-		self.check_errors()
-
-	def check_errors(self):
-		errors = False
-		for c in self.constraints:
-			if not c.required_version:
-				errors = True
-
-				reasons = c.why()
-				if len(reasons) == 1:
-					Logs.error('%s but no matching package could be found in this repository', reasons[0])
-				else:
-					Logs.error('Conflicts on package %r:', c.pkgname)
-					for r in reasons:
-						Logs.error('  %s', r)
-		if errors:
-			self.fatal('The package requirements cannot be satisfied!')
-
-	def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
-		try:
-			return self.cache_constraints[(pkgname, pkgver)]
-		except KeyError:
-			text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
-			ret = parse_constraints(text)
-			self.cache_constraints[(pkgname, pkgver)] = ret
-			return ret
-
-	def apply_constraint(self, domain, constraint):
-		vname = constraint.required_version.replace('*', '.*')
-		rev = re.compile(vname, re.M)
-		ret = [x for x in domain if rev.match(x)]
-		return ret
-
-	def trace(self, *k):
-		if getattr(self, 'debug', None):
-			Logs.error(*k)
-
-	def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
-		# breadth first search
-		n_packages_to_versions = dict(packages_to_versions)
-		n_packages_to_constraints = dict(packages_to_constraints)
-
-		self.trace("calling solve with %r    %r %r" % (packages_to_versions, todo, done))
-		done = done + [pkgname]
-
-		constraints = self.load_constraints(pkgname, pkgver)
-		self.trace("constraints %r" % constraints)
-
-		for k in constraints:
-			try:
-				domain = n_packages_to_versions[k.pkgname]
-			except KeyError:
-				domain = list_package_versions(get_distnet_cache(), k.pkgname)
-
-
-			self.trace("constraints?")
-			if not k.pkgname in done:
-				todo = todo + [k.pkgname]
-
-			self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))
-
-			# apply the constraint
-			domain = self.apply_constraint(domain, k)
-
-			self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))
-
-			n_packages_to_versions[k.pkgname] = domain
-
-			# then store the constraint applied
-			constraints = list(packages_to_constraints.get(k.pkgname, []))
-			constraints.append((pkgname, pkgver, k))
-			n_packages_to_constraints[k.pkgname] = constraints
-
-			if not domain:
-				self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
-				return (n_packages_to_versions, n_packages_to_constraints)
-
-		# next package on the todo list
-		if not todo:
-			return (n_packages_to_versions, n_packages_to_constraints)
-
-		n_pkgname = todo[0]
-		n_pkgver = n_packages_to_versions[n_pkgname][0]
-		tmp = dict(n_packages_to_versions)
-		tmp[n_pkgname] = [n_pkgver]
-
-		self.trace("fixed point %s" % n_pkgname)
-
-		return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)
-
-	def get_results(self):
-		return '\n'.join([str(c) for c in self.constraints])
-
-	def solution_to_constraints(self, versions, constraints):
-		solution = []
-		for p in versions:
-			c = constraint()
-			solution.append(c)
-
-			c.pkgname = p
-			if versions[p]:
-				c.required_version = versions[p][0]
-			else:
-				c.required_version = ''
-			for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
-				c.add_reason(c2.human_display(from_pkgname, from_pkgver))
-		return solution
-
-	def local_resolve(self, text):
-		self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
-		p2v = OrderedDict({self.myproject: [self.myversion]})
-		(versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
-		return self.solution_to_constraints(versions, constraints)
-
-	def download_to_file(self, pkgname, pkgver, subdir, tmp):
-		data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
-		req = urlopen(get_download_url(), data, timeout=TIMEOUT)
-		with open(tmp, 'wb') as f:
-			while True:
-				buf = req.read(8192)
-				if not buf:
-					break
-				f.write(buf)
-
-	def extract_tar(self, subdir, pkgdir, tmpfile):
-		with tarfile.open(tmpfile) as f:
-			temp = tempfile.mkdtemp(dir=pkgdir)
-			try:
-				f.extractall(temp)
-				os.rename(temp, os.path.join(pkgdir, subdir))
-			finally:
-				try:
-					shutil.rmtree(temp)
-				except Exception:
-					pass
-
-	def get_pkg_dir(self, pkgname, pkgver, subdir):
-		pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
-		if not os.path.isdir(pkgdir):
-			os.makedirs(pkgdir)
-
-		target = os.path.join(pkgdir, subdir)
-
-		if os.path.exists(target):
-			return target
-
-		(fd, tmp) = tempfile.mkstemp(dir=pkgdir)
-		try:
-			os.close(fd)
-			self.download_to_file(pkgname, pkgver, subdir, tmp)
-			if subdir == REQUIRES:
-				os.rename(tmp, target)
-			else:
-				self.extract_tar(subdir, pkgdir, tmp)
-		finally:
-			try:
-				os.remove(tmp)
-			except OSError:
-				pass
-
-		return target
-
-	def __iter__(self):
-		if not self.constraints:
-			self.compute_dependencies()
-		for x in self.constraints:
-			if x.pkgname == self.myproject:
-				continue
-			yield x
-
-	def execute(self):
-		self.compute_dependencies()
-
-packages = package_reader()
-
-def load_tools(ctx, extra):
-	global packages
-	for c in packages:
-		packages.get_pkg_dir(c.pkgname, c.required_version, extra)
-		noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
-		for x in os.listdir(noarchdir):
-			if x.startswith('waf_') and x.endswith('.py'):
-				ctx.load([x.rstrip('.py')], tooldir=[noarchdir])
-
-def options(opt):
-	opt.add_option('--offline', action='store_true')
-	packages.execute()
-	load_tools(opt, REQUIRES)
-
-def configure(conf):
-	load_tools(conf, conf.variant)
-
-def build(bld):
-	load_tools(bld, bld.variant)
-
diff --git a/waflib/extras/doxygen.py b/waflib/extras/doxygen.py
deleted file mode 100644
index 28f56e9..0000000
--- a/waflib/extras/doxygen.py
+++ /dev/null
@@ -1,227 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-# Thomas Nagy 2008-2010 (ita)
-
-"""
-
-Doxygen support
-
-Variables passed to bld():
-* doxyfile -- the Doxyfile to use
-* doxy_tar -- destination archive for generated documentation (if desired)
-* install_path -- where to install the documentation
-* pars -- dictionary overriding doxygen configuration settings
-
-When using this tool, the wscript will look like:
-
-	def options(opt):
-		opt.load('doxygen')
-
-	def configure(conf):
-		conf.load('doxygen')
-		# check conf.env.DOXYGEN, if it is mandatory
-
-	def build(bld):
-		if bld.env.DOXYGEN:
-			bld(features="doxygen", doxyfile='Doxyfile', ...)
-"""
-
-import os, os.path, re
-from waflib import Task, Utils, Node
-from waflib.TaskGen import feature
-
-DOXY_STR = '"${DOXYGEN}" - '
-DOXY_FMTS = 'html latex man rft xml'.split()
-DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
-c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
-inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
-'''.split())
-
-re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
-re_nl = re.compile('\r*\n', re.M)
-def parse_doxy(txt):
-	tbl = {}
-	txt   = re_rl.sub('', txt)
-	lines = re_nl.split(txt)
-	for x in lines:
-		x = x.strip()
-		if not x or x.startswith('#') or x.find('=') < 0:
-			continue
-		if x.find('+=') >= 0:
-			tmp = x.split('+=')
-			key = tmp[0].strip()
-			if key in tbl:
-				tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
-			else:
-				tbl[key] = '+='.join(tmp[1:]).strip()
-		else:
-			tmp = x.split('=')
-			tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
-	return tbl
-
-class doxygen(Task.Task):
-	vars  = ['DOXYGEN', 'DOXYFLAGS']
-	color = 'BLUE'
-
-	def runnable_status(self):
-		'''
-		self.pars are populated in runnable_status - because this function is being
-		run *before* both self.pars "consumers" - scan() and run()
-
-		set output_dir (node) for the output
-		'''
-
-		for x in self.run_after:
-			if not x.hasrun:
-				return Task.ASK_LATER
-
-		if not getattr(self, 'pars', None):
-			txt = self.inputs[0].read()
-			self.pars = parse_doxy(txt)
-			if self.pars.get('OUTPUT_DIRECTORY'):
-				# Use the path parsed from the Doxyfile as an absolute path
-				output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY'])
-			else:
-				# If no OUTPUT_PATH was specified in the Doxyfile, build path from the Doxyfile name + '.doxy'
-				output_node = self.inputs[0].parent.get_bld().make_node(self.inputs[0].name + '.doxy')
-			output_node.mkdir()
-			self.pars['OUTPUT_DIRECTORY'] = output_node.abspath()
-
-			# Override with any parameters passed to the task generator
-			if getattr(self.generator, 'pars', None):
-				for k, v in self.generator.pars.items():
-					self.pars[k] = v
-
-			self.doxy_inputs = getattr(self, 'doxy_inputs', [])
-			if not self.pars.get('INPUT'):
-				self.doxy_inputs.append(self.inputs[0].parent)
-			else:
-				for i in self.pars.get('INPUT').split():
-					if os.path.isabs(i):
-						node = self.generator.bld.root.find_node(i)
-					else:
-						node = self.inputs[0].parent.find_node(i)
-					if not node:
-						self.generator.bld.fatal('Could not find the doxygen input %r' % i)
-					self.doxy_inputs.append(node)
-
-		if not getattr(self, 'output_dir', None):
-			bld = self.generator.bld
-			# Output path is always an absolute path as it was transformed above.
-			self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
-
-		self.signature()
-		ret = Task.Task.runnable_status(self)
-		if ret == Task.SKIP_ME:
-			# in case the files were removed
-			self.add_install()
-		return ret
-
-	def scan(self):
-		exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
-		exclude_patterns = [pattern.replace('*/', '**/') for pattern in exclude_patterns]
-		file_patterns = self.pars.get('FILE_PATTERNS','').split()
-		if not file_patterns:
-			file_patterns = DOXY_FILE_PATTERNS.split()
-		if self.pars.get('RECURSIVE') == 'YES':
-			file_patterns = ["**/%s" % pattern for pattern in file_patterns]
-		nodes = []
-		names = []
-		for node in self.doxy_inputs:
-			if os.path.isdir(node.abspath()):
-				for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
-					nodes.append(m)
-			else:
-				nodes.append(node)
-		return (nodes, names)
-
-	def run(self):
-		dct = self.pars.copy()
-		code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
-		code = code.encode() # for python 3
-		#fmt = DOXY_STR % (self.inputs[0].parent.abspath())
-		cmd = Utils.subst_vars(DOXY_STR, self.env)
-		env = self.env.env or None
-		proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.inputs[0].parent.abspath())
-		proc.communicate(code)
-		return proc.returncode
-
-	def post_run(self):
-		nodes = self.output_dir.ant_glob('**/*', quiet=True)
-		for x in nodes:
-			self.generator.bld.node_sigs[x] = self.uid()
-		self.add_install()
-		return Task.Task.post_run(self)
-
-	def add_install(self):
-		nodes = self.output_dir.ant_glob('**/*', quiet=True)
-		self.outputs += nodes
-		if getattr(self.generator, 'install_path', None):
-			if not getattr(self.generator, 'doxy_tar', None):
-				self.generator.add_install_files(install_to=self.generator.install_path,
-					install_from=self.outputs,
-					postpone=False,
-					cwd=self.output_dir,
-					relative_trick=True)
-
-class tar(Task.Task):
-	"quick tar creation"
-	run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
-	color   = 'RED'
-	after   = ['doxygen']
-	def runnable_status(self):
-		for x in getattr(self, 'input_tasks', []):
-			if not x.hasrun:
-				return Task.ASK_LATER
-
-		if not getattr(self, 'tar_done_adding', None):
-			# execute this only once
-			self.tar_done_adding = True
-			for x in getattr(self, 'input_tasks', []):
-				self.set_inputs(x.outputs)
-			if not self.inputs:
-				return Task.SKIP_ME
-		return Task.Task.runnable_status(self)
-
-	def __str__(self):
-		tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
-		return '%s: %s\n' % (self.__class__.__name__, tgt_str)
-
-@feature('doxygen')
-def process_doxy(self):
-	if not getattr(self, 'doxyfile', None):
-		self.bld.fatal('no doxyfile variable specified??')
-
-	node = self.doxyfile
-	if not isinstance(node, Node.Node):
-		node = self.path.find_resource(node)
-	if not node:
-		self.bld.fatal('doxygen file %s not found' % self.doxyfile)
-
-	# the task instance
-	dsk = self.create_task('doxygen', node)
-
-	if getattr(self, 'doxy_tar', None):
-		tsk = self.create_task('tar')
-		tsk.input_tasks = [dsk]
-		tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
-		if self.doxy_tar.endswith('bz2'):
-			tsk.env['TAROPTS'] = ['cjf']
-		elif self.doxy_tar.endswith('gz'):
-			tsk.env['TAROPTS'] = ['czf']
-		else:
-			tsk.env['TAROPTS'] = ['cf']
-		if getattr(self, 'install_path', None):
-			self.add_install_files(install_to=self.install_path, install_from=tsk.outputs)
-
-def configure(conf):
-	'''
-	Check if doxygen and tar commands are present in the system
-
-	If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
-	variables will be set. Detection can be controlled by setting DOXYGEN and
-	TAR environmental variables.
-	'''
-
-	conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
-	conf.find_program('tar', var='TAR', mandatory=False)
diff --git a/waflib/extras/dpapi.py b/waflib/extras/dpapi.py
deleted file mode 100644
index b94d482..0000000
--- a/waflib/extras/dpapi.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Matt Clarkson, 2012
-
-'''
-DPAPI access library (http://msdn.microsoft.com/en-us/library/ms995355.aspx)
-This file uses code originally created by Crusher Joe:
-http://article.gmane.org/gmane.comp.python.ctypes/420
-And modified by Wayne Koorts:
-http://stackoverflow.com/questions/463832/using-dpapi-with-python
-'''
-
-from ctypes import windll, byref, cdll, Structure, POINTER, c_char, c_buffer
-from ctypes.wintypes import DWORD
-from waflib.Configure import conf
-
-LocalFree = windll.kernel32.LocalFree
-memcpy = cdll.msvcrt.memcpy
-CryptProtectData = windll.crypt32.CryptProtectData
-CryptUnprotectData = windll.crypt32.CryptUnprotectData
-CRYPTPROTECT_UI_FORBIDDEN = 0x01
-try:
-	extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'.encode('ascii')
-except AttributeError:
-	extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'
-
-class DATA_BLOB(Structure):
-	_fields_ = [
-		('cbData', DWORD),
-		('pbData', POINTER(c_char))
-	]
-
-def get_data(blob_out):
-	cbData = int(blob_out.cbData)
-	pbData = blob_out.pbData
-	buffer = c_buffer(cbData)
-	memcpy(buffer, pbData, cbData)
-	LocalFree(pbData)
-	return buffer.raw
-
-@conf
-def dpapi_encrypt_data(self, input_bytes, entropy = extra_entropy):
-	'''
-	Encrypts data and returns byte string
-
-	:param input_bytes: The data to be encrypted
-	:type input_bytes: String or Bytes
-	:param entropy: Extra entropy to add to the encryption process (optional)
-	:type entropy: String or Bytes
-	'''
-	if not isinstance(input_bytes, bytes) or not isinstance(entropy, bytes):
-		self.fatal('The inputs to dpapi must be bytes')
-	buffer_in      = c_buffer(input_bytes, len(input_bytes))
-	buffer_entropy = c_buffer(entropy, len(entropy))
-	blob_in        = DATA_BLOB(len(input_bytes), buffer_in)
-	blob_entropy   = DATA_BLOB(len(entropy), buffer_entropy)
-	blob_out       = DATA_BLOB()
-
-	if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy), 
-		None, None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
-		return get_data(blob_out)
-	else:
-		self.fatal('Failed to decrypt data')
-
-@conf
-def dpapi_decrypt_data(self, encrypted_bytes, entropy = extra_entropy):
-	'''
-	Decrypts data and returns byte string
-
-	:param encrypted_bytes: The encrypted data
-	:type encrypted_bytes: Bytes
-	:param entropy: Extra entropy to add to the encryption process (optional)
-	:type entropy: String or Bytes
-	'''
-	if not isinstance(encrypted_bytes, bytes) or not isinstance(entropy, bytes):
-		self.fatal('The inputs to dpapi must be bytes')
-	buffer_in      = c_buffer(encrypted_bytes, len(encrypted_bytes))
-	buffer_entropy = c_buffer(entropy, len(entropy))
-	blob_in        = DATA_BLOB(len(encrypted_bytes), buffer_in)
-	blob_entropy   = DATA_BLOB(len(entropy), buffer_entropy)
-	blob_out       = DATA_BLOB()
-	if CryptUnprotectData(byref(blob_in), None, byref(blob_entropy), None,
-		None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
-		return get_data(blob_out)
-	else:
-		self.fatal('Failed to decrypt data')
-
diff --git a/waflib/extras/eclipse.py b/waflib/extras/eclipse.py
deleted file mode 100644
index bb78741..0000000
--- a/waflib/extras/eclipse.py
+++ /dev/null
@@ -1,431 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Eclipse CDT 5.0 generator for Waf
-# Richard Quirk 2009-1011 (New BSD License)
-# Thomas Nagy 2011 (ported to Waf 1.6)
-
-"""
-Usage:
-
-def options(opt):
-	opt.load('eclipse')
-
-$ waf configure eclipse
-"""
-
-import sys, os
-from waflib import Utils, Logs, Context, Build, TaskGen, Scripting, Errors, Node
-from xml.dom.minidom import Document
-
-STANDARD_INCLUDES = [ '/usr/local/include', '/usr/include' ]
-
-oe_cdt = 'org.eclipse.cdt'
-cdt_mk = oe_cdt + '.make.core'
-cdt_core = oe_cdt + '.core'
-cdt_bld = oe_cdt + '.build.core'
-extbuilder_dir = '.externalToolBuilders'
-extbuilder_name = 'Waf_Builder.launch'
-
-class eclipse(Build.BuildContext):
-	cmd = 'eclipse'
-	fun = Scripting.default_cmd
-
-	def execute(self):
-		"""
-		Entry point
-		"""
-		self.restore()
-		if not self.all_envs:
-			self.load_envs()
-		self.recurse([self.run_dir])
-
-		appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
-		self.create_cproject(appname, pythonpath=self.env['ECLIPSE_PYTHON_PATH'])
-
-	# Helper to dump the XML document content to XML with UTF-8 encoding
-	def write_conf_to_xml(self, filename, document):
-		self.srcnode.make_node(filename).write(document.toprettyxml(encoding='UTF-8'), flags='wb')
-
-	def create_cproject(self, appname, workspace_includes=[], pythonpath=[]):
-		"""
-		Create the Eclipse CDT .project and .cproject files
-		@param appname The name that will appear in the Project Explorer
-		@param build The BuildContext object to extract includes from
-		@param workspace_includes Optional project includes to prevent
-			  "Unresolved Inclusion" errors in the Eclipse editor
-		@param pythonpath Optional project specific python paths
-		"""
-		hasc = hasjava = haspython = False
-		source_dirs = []
-		cpppath = self.env['CPPPATH']
-		javasrcpath = []
-		javalibpath = []
-		includes = STANDARD_INCLUDES
-		if sys.platform != 'win32':
-			cc = self.env.CC or self.env.CXX
-			if cc:
-				cmd = cc + ['-xc++', '-E', '-Wp,-v', '-']
-				try:
-					gccout = self.cmd_and_log(cmd, output=Context.STDERR, quiet=Context.BOTH, input='\n'.encode()).splitlines()
-				except Errors.WafError:
-					pass
-				else:
-					includes = []
-					for ipath in gccout:
-						if ipath.startswith(' /'):
-							includes.append(ipath[1:])
-			cpppath += includes
-		Logs.warn('Generating Eclipse CDT project files')
-
-		for g in self.groups:
-			for tg in g:
-				if not isinstance(tg, TaskGen.task_gen):
-					continue
-
-				tg.post()
-
-				# Add local Python modules paths to configuration so object resolving will work in IDE
-				# This may also contain generated files (ie. pyqt5 or protoc) that get picked from build
-				if 'py' in tg.features:
-					pypath = tg.path.relpath()
-					py_installfrom = getattr(tg, 'install_from', None)
-					if isinstance(py_installfrom, Node.Node):
-						pypath = py_installfrom.path_from(self.root.make_node(self.top_dir))
-					if pypath not in pythonpath:
-						pythonpath.append(pypath)
-					haspython = True
-
-				# Add Java source directories so object resolving works in IDE
-				# This may also contain generated files (ie. protoc) that get picked from build
-				if 'javac' in tg.features:
-					java_src = tg.path.relpath()
-					java_srcdir = getattr(tg.javac_task, 'srcdir', None)
-					if java_srcdir:
-						if isinstance(java_srcdir, Node.Node):
-							java_srcdir = [java_srcdir]
-						for x in Utils.to_list(java_srcdir):
-							x = x.path_from(self.root.make_node(self.top_dir))
-							if x not in javasrcpath:
-								javasrcpath.append(x)
-					else:
-						if java_src not in javasrcpath:
-							javasrcpath.append(java_src)
-					hasjava = True
-
-					# Check if there are external dependencies and add them as external jar so they will be resolved by Eclipse
-					usedlibs=getattr(tg, 'use', [])
-					for x in Utils.to_list(usedlibs):
-						for cl in Utils.to_list(tg.env['CLASSPATH_'+x]):
-							if cl not in javalibpath:
-								javalibpath.append(cl)
-
-				if not getattr(tg, 'link_task', None):
-					continue
-
-				features = Utils.to_list(getattr(tg, 'features', ''))
-
-				is_cc = 'c' in features or 'cxx' in features
-
-				incnodes = tg.to_incnodes(tg.to_list(getattr(tg, 'includes', [])) + tg.env['INCLUDES'])
-				for p in incnodes:
-					path = p.path_from(self.srcnode)
-
-					if (path.startswith("/")):
-						cpppath.append(path)
-					else:
-						workspace_includes.append(path)
-
-					if is_cc and path not in source_dirs:
-						source_dirs.append(path)
-
-					hasc = True
-
-		waf_executable = os.path.abspath(sys.argv[0])
-		project = self.impl_create_project(sys.executable, appname, hasc, hasjava, haspython, waf_executable)
-		self.write_conf_to_xml('.project', project)
-
-		if hasc:
-			project = self.impl_create_cproject(sys.executable, waf_executable, appname, workspace_includes, cpppath, source_dirs)
-			self.write_conf_to_xml('.cproject', project)
-
-		if haspython:
-			project = self.impl_create_pydevproject(sys.path, pythonpath)
-			self.write_conf_to_xml('.pydevproject', project)
-
-		if hasjava:
-			project = self.impl_create_javaproject(javasrcpath, javalibpath)
-			self.write_conf_to_xml('.classpath', project)
-
-	def impl_create_project(self, executable, appname, hasc, hasjava, haspython, waf_executable):
-		doc = Document()
-		projectDescription = doc.createElement('projectDescription')
-		self.add(doc, projectDescription, 'name', appname)
-		self.add(doc, projectDescription, 'comment')
-		self.add(doc, projectDescription, 'projects')
-		buildSpec = self.add(doc, projectDescription, 'buildSpec')
-		buildCommand = self.add(doc, buildSpec, 'buildCommand')
-		self.add(doc, buildCommand, 'triggers', 'clean,full,incremental,')
-		arguments = self.add(doc, buildCommand, 'arguments')
-		dictionaries = {}
-
-		# If CDT is present, instruct this one to call waf as it is more flexible (separate build/clean ...)
-		if hasc:
-			self.add(doc, buildCommand, 'name', oe_cdt + '.managedbuilder.core.genmakebuilder')
-			# the default make-style targets are overwritten by the .cproject values
-			dictionaries = {
-					cdt_mk + '.contents': cdt_mk + '.activeConfigSettings',
-					cdt_mk + '.enableAutoBuild': 'false',
-					cdt_mk + '.enableCleanBuild': 'true',
-					cdt_mk + '.enableFullBuild': 'true',
-					}
-		else:
-			# Otherwise for Java/Python an external builder tool is created that will call waf build
-			self.add(doc, buildCommand, 'name', 'org.eclipse.ui.externaltools.ExternalToolBuilder')
-			dictionaries = {
-					'LaunchConfigHandle': '<project>/%s/%s'%(extbuilder_dir, extbuilder_name),
-					}
-			# The definition is in a separate directory XML file
-			try:
-				os.mkdir(extbuilder_dir)
-			except OSError:
-				pass	# Ignore error if already exists
-
-			# Populate here the external builder XML calling waf
-			builder = Document()
-			launchConfiguration = doc.createElement('launchConfiguration')
-			launchConfiguration.setAttribute('type', 'org.eclipse.ui.externaltools.ProgramBuilderLaunchConfigurationType')
-			self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND', 'value': 'false'})
-			self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED', 'value': 'true'})
-			self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_LOCATION', 'value': waf_executable})
-			self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_RUN_BUILD_KINDS', 'value': 'full,incremental,'})
-			self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS', 'value': 'build'})
-			self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_WORKING_DIRECTORY', 'value': '${project_loc}'})
-			builder.appendChild(launchConfiguration)
-			# And write the XML to the file references before
-			self.write_conf_to_xml('%s%s%s'%(extbuilder_dir, os.path.sep, extbuilder_name), builder)
-
-
-		for k, v in dictionaries.items():
-			self.addDictionary(doc, arguments, k, v)
-
-		natures = self.add(doc, projectDescription, 'natures')
-
-		if hasc:
-			nature_list = """
-				core.ccnature
-				managedbuilder.core.ScannerConfigNature
-				managedbuilder.core.managedBuildNature
-				core.cnature
-			""".split()
-			for n in nature_list:
-				self.add(doc, natures, 'nature', oe_cdt + '.' + n)
-
-		if haspython:
-			self.add(doc, natures, 'nature', 'org.python.pydev.pythonNature')
-		if hasjava:
-			self.add(doc, natures, 'nature', 'org.eclipse.jdt.core.javanature')
-
-		doc.appendChild(projectDescription)
-		return doc
-
-	def impl_create_cproject(self, executable, waf_executable, appname, workspace_includes, cpppath, source_dirs=[]):
-		doc = Document()
-		doc.appendChild(doc.createProcessingInstruction('fileVersion', '4.0.0'))
-		cconf_id = cdt_core + '.default.config.1'
-		cproject = doc.createElement('cproject')
-		storageModule = self.add(doc, cproject, 'storageModule',
-				{'moduleId': cdt_core + '.settings'})
-		cconf = self.add(doc, storageModule, 'cconfiguration', {'id':cconf_id})
-
-		storageModule = self.add(doc, cconf, 'storageModule',
-				{'buildSystemId': oe_cdt + '.managedbuilder.core.configurationDataProvider',
-				 'id': cconf_id,
-				 'moduleId': cdt_core + '.settings',
-				 'name': 'Default'})
-
-		self.add(doc, storageModule, 'externalSettings')
-
-		extensions = self.add(doc, storageModule, 'extensions')
-		extension_list = """
-			VCErrorParser
-			MakeErrorParser
-			GCCErrorParser
-			GASErrorParser
-			GLDErrorParser
-		""".split()
-		self.add(doc, extensions, 'extension', {'id': cdt_core + '.ELF', 'point':cdt_core + '.BinaryParser'})
-		for e in extension_list:
-			self.add(doc, extensions, 'extension', {'id': cdt_core + '.' + e, 'point':cdt_core + '.ErrorParser'})
-
-		storageModule = self.add(doc, cconf, 'storageModule',
-				{'moduleId': 'cdtBuildSystem', 'version': '4.0.0'})
-		config = self.add(doc, storageModule, 'configuration',
-					{'artifactName': appname,
-					 'id': cconf_id,
-					 'name': 'Default',
-					 'parent': cdt_bld + '.prefbase.cfg'})
-		folderInfo = self.add(doc, config, 'folderInfo',
-							{'id': cconf_id+'.', 'name': '/', 'resourcePath': ''})
-
-		toolChain = self.add(doc, folderInfo, 'toolChain',
-				{'id': cdt_bld + '.prefbase.toolchain.1',
-				 'name': 'No ToolChain',
-				 'resourceTypeBasedDiscovery': 'false',
-				 'superClass': cdt_bld + '.prefbase.toolchain'})
-
-		self.add(doc, toolChain, 'targetPlatform', {'binaryParser': 'org.eclipse.cdt.core.ELF', 'id': cdt_bld + '.prefbase.toolchain.1', 'name': ''})
-
-		waf_build = '"%s" %s'%(waf_executable, eclipse.fun)
-		waf_clean = '"%s" clean'%(waf_executable)
-		self.add(doc, toolChain, 'builder',
-					{'autoBuildTarget': waf_build,
-					 'command': executable,
-					 'enableAutoBuild': 'false',
-					 'cleanBuildTarget': waf_clean,
-					 'enableIncrementalBuild': 'true',
-					 'id': cdt_bld + '.settings.default.builder.1',
-					 'incrementalBuildTarget': waf_build,
-					 'managedBuildOn': 'false',
-					 'name': 'Gnu Make Builder',
-					 'superClass': cdt_bld + '.settings.default.builder'})
-
-		tool_index = 1;
-		for tool_name in ("Assembly", "GNU C++", "GNU C"):
-			tool = self.add(doc, toolChain, 'tool',
-					{'id': cdt_bld + '.settings.holder.' + str(tool_index),
-					 'name': tool_name,
-					 'superClass': cdt_bld + '.settings.holder'})
-			if cpppath or workspace_includes:
-				incpaths = cdt_bld + '.settings.holder.incpaths'
-				option = self.add(doc, tool, 'option',
-						{'id': incpaths + '.' +  str(tool_index),
-						 'name': 'Include Paths',
-						 'superClass': incpaths,
-						 'valueType': 'includePath'})
-				for i in workspace_includes:
-					self.add(doc, option, 'listOptionValue',
-								{'builtIn': 'false',
-								'value': '"${workspace_loc:/%s/%s}"'%(appname, i)})
-				for i in cpppath:
-					self.add(doc, option, 'listOptionValue',
-								{'builtIn': 'false',
-								'value': '"%s"'%(i)})
-			if tool_name == "GNU C++" or tool_name == "GNU C":
-				self.add(doc,tool,'inputType',{ 'id':'org.eclipse.cdt.build.core.settings.holder.inType.' + str(tool_index), \
-					'languageId':'org.eclipse.cdt.core.gcc' if tool_name == "GNU C" else 'org.eclipse.cdt.core.g++','languageName':tool_name, \
-					'sourceContentType':'org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader', \
-					'superClass':'org.eclipse.cdt.build.core.settings.holder.inType' })
-			tool_index += 1
-
-		if source_dirs:
-			sourceEntries = self.add(doc, config, 'sourceEntries')
-			for i in source_dirs:
-				 self.add(doc, sourceEntries, 'entry',
-							{'excluding': i,
-							'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
-							'kind': 'sourcePath',
-							'name': ''})
-				 self.add(doc, sourceEntries, 'entry',
-							{
-							'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
-							'kind': 'sourcePath',
-							'name': i})
-
-		storageModule = self.add(doc, cconf, 'storageModule',
-							{'moduleId': cdt_mk + '.buildtargets'})
-		buildTargets = self.add(doc, storageModule, 'buildTargets')
-		def addTargetWrap(name, runAll):
-			return self.addTarget(doc, buildTargets, executable, name,
-								'"%s" %s'%(waf_executable, name), runAll)
-		addTargetWrap('configure', True)
-		addTargetWrap('dist', False)
-		addTargetWrap('install', False)
-		addTargetWrap('check', False)
-
-		storageModule = self.add(doc, cproject, 'storageModule',
-							{'moduleId': 'cdtBuildSystem',
-							 'version': '4.0.0'})
-
-		self.add(doc, storageModule, 'project', {'id': '%s.null.1'%appname, 'name': appname})
-
-		doc.appendChild(cproject)
-		return doc
-
-	def impl_create_pydevproject(self, system_path, user_path):
-		# create a pydevproject file
-		doc = Document()
-		doc.appendChild(doc.createProcessingInstruction('eclipse-pydev', 'version="1.0"'))
-		pydevproject = doc.createElement('pydev_project')
-		prop = self.add(doc, pydevproject,
-					   'pydev_property',
-					   'python %d.%d'%(sys.version_info[0], sys.version_info[1]))
-		prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_VERSION')
-		prop = self.add(doc, pydevproject, 'pydev_property', 'Default')
-		prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_INTERPRETER')
-		# add waf's paths
-		wafadmin = [p for p in system_path if p.find('wafadmin') != -1]
-		if wafadmin:
-			prop = self.add(doc, pydevproject, 'pydev_pathproperty',
-					{'name':'org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH'})
-			for i in wafadmin:
-				self.add(doc, prop, 'path', i)
-		if user_path:
-			prop = self.add(doc, pydevproject, 'pydev_pathproperty',
-					{'name':'org.python.pydev.PROJECT_SOURCE_PATH'})
-			for i in user_path:
-				self.add(doc, prop, 'path', '/${PROJECT_DIR_NAME}/'+i)
-
-		doc.appendChild(pydevproject)
-		return doc
-
-	def impl_create_javaproject(self, javasrcpath, javalibpath):
-		# create a .classpath file for java usage
-		doc = Document()
-		javaproject = doc.createElement('classpath')
-		if javasrcpath:
-			for i in javasrcpath:
-				self.add(doc, javaproject, 'classpathentry',
-					{'kind': 'src', 'path': i})
-
-		if javalibpath:
-			for i in javalibpath:
-				self.add(doc, javaproject, 'classpathentry',
-					{'kind': 'lib', 'path': i})
-
-		self.add(doc, javaproject, 'classpathentry', {'kind': 'con', 'path': 'org.eclipse.jdt.launching.JRE_CONTAINER'})
-		self.add(doc, javaproject, 'classpathentry', {'kind': 'output', 'path': self.bldnode.name })
-		doc.appendChild(javaproject)
-		return doc
-
-	def addDictionary(self, doc, parent, k, v):
-		dictionary = self.add(doc, parent, 'dictionary')
-		self.add(doc, dictionary, 'key', k)
-		self.add(doc, dictionary, 'value', v)
-		return dictionary
-
-	def addTarget(self, doc, buildTargets, executable, name, buildTarget, runAllBuilders=True):
-		target = self.add(doc, buildTargets, 'target',
-						{'name': name,
-						 'path': '',
-						 'targetID': oe_cdt + '.build.MakeTargetBuilder'})
-		self.add(doc, target, 'buildCommand', executable)
-		self.add(doc, target, 'buildArguments', None)
-		self.add(doc, target, 'buildTarget', buildTarget)
-		self.add(doc, target, 'stopOnError', 'true')
-		self.add(doc, target, 'useDefaultCommand', 'false')
-		self.add(doc, target, 'runAllBuilders', str(runAllBuilders).lower())
-
-	def add(self, doc, parent, tag, value = None):
-		el = doc.createElement(tag)
-		if (value):
-			if type(value) == type(str()):
-				el.appendChild(doc.createTextNode(value))
-			elif type(value) == type(dict()):
-				self.setAttributes(el, value)
-		parent.appendChild(el)
-		return el
-
-	def setAttributes(self, node, attrs):
-		for k, v in attrs.items():
-			node.setAttribute(k, v)
-
diff --git a/waflib/extras/erlang.py b/waflib/extras/erlang.py
deleted file mode 100644
index 0b93d9a..0000000
--- a/waflib/extras/erlang.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010 (ita)
-# Przemyslaw Rzepecki, 2016
-
-"""
-Erlang support
-"""
-
-import re
-from waflib import Task, TaskGen
-from waflib.TaskGen import feature, after_method, before_method
-# to load the method "to_incnodes" below
-from waflib.Tools import ccroot
-
-# Those flags are required by the Erlang VM to execute/evaluate code in
-# non-interactive mode. It is used in this tool to create Erlang modules
-# documentation and run unit tests. The user can pass additional arguments to the
-# 'erl' command with ERL_FLAGS environment variable.
-EXEC_NON_INTERACTIVE = ['-noshell', '-noinput', '-eval']
-
-def configure(conf):
-	conf.find_program('erlc', var='ERLC')
-	conf.find_program('erl', var='ERL')
-	conf.add_os_flags('ERLC_FLAGS')
-	conf.add_os_flags('ERL_FLAGS')
-	conf.env.ERLC_DEF_PATTERN = '-D%s'
-	conf.env.ERLC_INC_PATTERN = '-I%s'
-
-@TaskGen.extension('.erl')
-def process_erl_node(self, node):
-	tsk = self.create_task('erl', node, node.change_ext('.beam'))
-	tsk.erlc_incnodes = [tsk.outputs[0].parent] + self.to_incnodes(self.includes)
-	tsk.env.append_value('ERLC_INCPATHS', [x.abspath() for x in tsk.erlc_incnodes])
-	tsk.env.append_value('ERLC_DEFINES', self.to_list(getattr(self, 'defines', [])))
-	tsk.env.append_value('ERLC_FLAGS', self.to_list(getattr(self, 'flags', [])))
-	tsk.cwd = tsk.outputs[0].parent
-
-class erl(Task.Task):
-	color = 'GREEN'
-	run_str = '${ERLC} ${ERL_FLAGS} ${ERLC_INC_PATTERN:ERLC_INCPATHS} ${ERLC_DEF_PATTERN:ERLC_DEFINES} ${SRC}'
-
-	def scan(task):
-		node = task.inputs[0]
-
-		deps = []
-		scanned = set([])
-		nodes_to_scan = [node]
-
-		for n in nodes_to_scan:
-			if n.abspath() in scanned:
-				continue
-
-			for i in re.findall(r'-include\("(.*)"\)\.', n.read()):
-				for d in task.erlc_incnodes:
-					r = d.find_node(i)
-					if r:
-						deps.append(r)
-						nodes_to_scan.append(r)
-						break
-			scanned.add(n.abspath())
-
-		return (deps, [])
-
-@TaskGen.extension('.beam')
-def process(self, node):
-	pass
-
-
-class erl_test(Task.Task):
-	color = 'BLUE'
-	run_str = '${ERL} ${ERL_FLAGS} ${ERL_TEST_FLAGS}'
-
-@feature('eunit')
-@after_method('process_source')
-def add_erl_test_run(self):
-	test_modules = [t.outputs[0] for t in self.tasks]
-	test_task = self.create_task('erl_test')
-	test_task.set_inputs(self.source + test_modules)
-	test_task.cwd = test_modules[0].parent
-
-	test_task.env.append_value('ERL_FLAGS', self.to_list(getattr(self, 'flags', [])))
-
-	test_list = ", ".join([m.change_ext("").path_from(test_task.cwd)+":test()" for m in test_modules])
-	test_flag = 'halt(case lists:all(fun(Elem) -> Elem == ok end, [%s]) of true -> 0; false -> 1 end).' % test_list
-	test_task.env.append_value('ERL_TEST_FLAGS', EXEC_NON_INTERACTIVE)
-	test_task.env.append_value('ERL_TEST_FLAGS', test_flag)
-
-
-class edoc(Task.Task):
-	color = 'BLUE'
-	run_str = "${ERL} ${ERL_FLAGS} ${ERL_DOC_FLAGS}"
-	def keyword(self):
-		return 'Generating edoc'
-
-@feature('edoc')
-@before_method('process_source')
-def add_edoc_task(self):
-	# do not process source, it would create double erl->beam task
-	self.meths.remove('process_source')
-	e = self.path.find_resource(self.source)
-	t = e.change_ext('.html')
-	png = t.parent.make_node('erlang.png')
-	css = t.parent.make_node('stylesheet.css')
-	tsk = self.create_task('edoc', e, [t, png, css])
-	tsk.cwd = tsk.outputs[0].parent
-	tsk.env.append_value('ERL_DOC_FLAGS', EXEC_NON_INTERACTIVE)
-	tsk.env.append_value('ERL_DOC_FLAGS', 'edoc:files(["%s"]), halt(0).' % tsk.inputs[0].abspath())
-	# TODO the above can break if a file path contains '"'
-
diff --git a/waflib/extras/fast_partial.py b/waflib/extras/fast_partial.py
deleted file mode 100644
index d5b6144..0000000
--- a/waflib/extras/fast_partial.py
+++ /dev/null
@@ -1,518 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2017-2018 (ita)
-
-"""
-A system for fast partial rebuilds
-
-Creating a large amount of task objects up front can take some time.
-By making a few assumptions, it is possible to avoid posting creating
-task objects for targets that are already up-to-date.
-
-On a silly benchmark the gain observed for 1M tasks can be 5m->10s
-for a single file change.
-
-Usage::
-
-	def options(opt):
-		opt.load('fast_partial')
-
-Assumptions:
-* Mostly for C/C++/Fortran targets with link tasks (object-only targets are not handled)
-* For full project builds: no --targets and no pruning from subfolders
-* The installation phase is ignored
-* `use=` dependencies are specified up front even across build groups
-* Task generator source files are not obtained from globs
-
-Implementation details:
-* The first layer obtains file timestamps to recalculate file hashes only
-  when necessary (similar to md5_tstamp); the timestamps are then stored
-  in a dedicated pickle file
-* A second layer associates each task generator to a file set to help
-  detecting changes. Task generators are to create their tasks only when
-  the related files have been modified. A specific db file is created
-  to store such data (5m -> 1m10)
-* A third layer binds build context proxies onto task generators, replacing
-  the default context. While loading data for the full build uses more memory
-  (4GB -> 9GB), partial builds are then much faster (1m10 -> 13s)
-* A fourth layer enables a 2-level cache on file signatures to
-  reduce the size of the main pickle file (13s -> 10s)
-"""
-
-import os
-from waflib import Build, Context, Errors, Logs, Task, TaskGen, Utils
-from waflib.TaskGen import feature, after_method, taskgen_method
-import waflib.Node
-
-DONE = 0
-DIRTY = 1
-NEEDED = 2
-
-SKIPPABLE = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib', 'cprogram', 'cxxprogram']
-
-TSTAMP_DB = '.wafpickle_tstamp_db_file'
-
-SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split()
-
-class bld_proxy(object):
-	def __init__(self, bld):
-		object.__setattr__(self, 'bld', bld)
-
-		object.__setattr__(self, 'node_class', type('Nod3', (waflib.Node.Node,), {}))
-		self.node_class.__module__ = 'waflib.Node'
-		self.node_class.ctx = self
-
-		object.__setattr__(self, 'root', self.node_class('', None))
-		for x in SAVED_ATTRS:
-			if x != 'root':
-				object.__setattr__(self, x, {})
-
-		self.fix_nodes()
-
-	def __setattr__(self, name, value):
-		bld = object.__getattribute__(self, 'bld')
-		setattr(bld, name, value)
-
-	def __delattr__(self, name):
-		bld = object.__getattribute__(self, 'bld')
-		delattr(bld, name)
-
-	def __getattribute__(self, name):
-		try:
-			return object.__getattribute__(self, name)
-		except AttributeError:
-			bld = object.__getattribute__(self, 'bld')
-			return getattr(bld, name)
-
-	def __call__(self, *k, **kw):
-		return self.bld(*k, **kw)
-
-	def fix_nodes(self):
-		for x in ('srcnode', 'path', 'bldnode'):
-			node = self.root.find_dir(getattr(self.bld, x).abspath())
-			object.__setattr__(self, x, node)
-
-	def set_key(self, store_key):
-		object.__setattr__(self, 'store_key', store_key)
-
-	def fix_tg_path(self, *tgs):
-		# changing Node objects on task generators is possible
-		# yet, all Node objects must belong to the same parent
-		for tg in tgs:
-			tg.path = self.root.make_node(tg.path.abspath())
-
-	def restore(self):
-		dbfn = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
-		Logs.debug('rev_use: reading %s', dbfn)
-		try:
-			data = Utils.readf(dbfn, 'rb')
-		except (EnvironmentError, EOFError):
-			# handle missing file/empty file
-			Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn)
-		else:
-			try:
-				waflib.Node.pickle_lock.acquire()
-				waflib.Node.Nod3 = self.node_class
-				try:
-					data = Build.cPickle.loads(data)
-				except Exception as e:
-					Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e)
-				else:
-					for x in SAVED_ATTRS:
-						object.__setattr__(self, x, data.get(x, {}))
-			finally:
-				waflib.Node.pickle_lock.release()
-		self.fix_nodes()
-
-	def store(self):
-		data = {}
-		for x in Build.SAVED_ATTRS:
-			data[x] = getattr(self, x)
-		db = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
-
-		try:
-			waflib.Node.pickle_lock.acquire()
-			waflib.Node.Nod3 = self.node_class
-			x = Build.cPickle.dumps(data, Build.PROTOCOL)
-		finally:
-			waflib.Node.pickle_lock.release()
-
-		Logs.debug('rev_use: storing %s', db)
-		Utils.writef(db + '.tmp', x, m='wb')
-		try:
-			st = os.stat(db)
-			os.remove(db)
-			if not Utils.is_win32:
-				os.chown(db + '.tmp', st.st_uid, st.st_gid)
-		except (AttributeError, OSError):
-			pass
-		os.rename(db + '.tmp', db)
-
-class bld(Build.BuildContext):
-	def __init__(self, **kw):
-		super(bld, self).__init__(**kw)
-		self.hashes_md5_tstamp = {}
-
-	def __call__(self, *k, **kw):
-		# this is one way of doing it, one could use a task generator method too
-		bld = kw['bld'] = bld_proxy(self)
-		ret = TaskGen.task_gen(*k, **kw)
-		self.task_gen_cache_names = {}
-		self.add_to_group(ret, group=kw.get('group'))
-		ret.bld = bld
-		bld.set_key(ret.path.abspath().replace(os.sep, '') + str(ret.idx))
-		return ret
-
-	def is_dirty(self):
-		return True
-
-	def store_tstamps(self):
-		# Called after a build is finished
-		# For each task generator, record all files involved in task objects
-		# optimization: done only if there was something built
-		do_store = False
-		try:
-			f_deps = self.f_deps
-		except AttributeError:
-			f_deps = self.f_deps = {}
-			self.f_tstamps = {}
-
-		allfiles = set()
-		for g in self.groups:
-			for tg in g:
-				try:
-					staleness = tg.staleness
-				except AttributeError:
-					staleness = DIRTY
-
-				if staleness != DIRTY:
-					# DONE case: there was nothing built
-					# NEEDED case: the tg was brought in because of 'use' propagation
-					# but nothing really changed for them, there may be incomplete
-					# tasks (object files) and in this case it is best to let the next build
-					# figure out if an input/output file changed
-					continue
-
-				do_cache = False
-				for tsk in tg.tasks:
-					if tsk.hasrun == Task.SUCCESS:
-						do_cache = True
-						pass
-					elif tsk.hasrun == Task.SKIPPED:
-						pass
-					else:
-						# one failed task, clear the cache for this tg
-						try:
-							del f_deps[(tg.path.abspath(), tg.idx)]
-						except KeyError:
-							pass
-						else:
-							# just store the new state because there is a change
-							do_store = True
-
-						# skip the rest because there is no valid cache possible
-						break
-				else:
-					if not do_cache:
-						# all skipped, but is there anything in cache?
-						try:
-							f_deps[(tg.path.abspath(), tg.idx)]
-						except KeyError:
-							# probably cleared because a wscript file changed
-							# store it
-							do_cache = True
-
-					if do_cache:
-
-						# there was a rebuild, store the data structure too
-						tg.bld.store()
-
-						# all tasks skipped but no cache
-						# or a successful task build
-						do_store = True
-						st = set()
-						for tsk in tg.tasks:
-							st.update(tsk.inputs)
-							st.update(self.node_deps.get(tsk.uid(), []))
-
-						# TODO do last/when loading the tgs?
-						lst = []
-						for k in ('wscript', 'wscript_build'):
-							n = tg.path.find_node(k)
-							if n:
-								n.get_bld_sig()
-								lst.append(n.abspath())
-
-						lst.extend(sorted(x.abspath() for x in st))
-						allfiles.update(lst)
-						f_deps[(tg.path.abspath(), tg.idx)] = lst
-
-		for x in allfiles:
-			# f_tstamps has everything, while md5_tstamp can be relatively empty on partial builds
-			self.f_tstamps[x] = self.hashes_md5_tstamp[x][0]
-
-		if do_store:
-			dbfn = os.path.join(self.variant_dir, TSTAMP_DB)
-			Logs.debug('rev_use: storing %s', dbfn)
-			dbfn_tmp = dbfn + '.tmp'
-			x = Build.cPickle.dumps([self.f_tstamps, f_deps], Build.PROTOCOL)
-			Utils.writef(dbfn_tmp, x, m='wb')
-			os.rename(dbfn_tmp, dbfn)
-			Logs.debug('rev_use: stored %s', dbfn)
-
-	def store(self):
-		self.store_tstamps()
-		if self.producer.dirty:
-			Build.BuildContext.store(self)
-
-	def compute_needed_tgs(self):
-		# assume the 'use' keys are not modified during the build phase
-
-		dbfn = os.path.join(self.variant_dir, TSTAMP_DB)
-		Logs.debug('rev_use: Loading %s', dbfn)
-		try:
-			data = Utils.readf(dbfn, 'rb')
-		except (EnvironmentError, EOFError):
-			Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn)
-			self.f_deps = {}
-			self.f_tstamps = {}
-		else:
-			try:
-				self.f_tstamps, self.f_deps = Build.cPickle.loads(data)
-			except Exception as e:
-				Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e)
-				self.f_deps = {}
-				self.f_tstamps = {}
-			else:
-				Logs.debug('rev_use: Loaded %s', dbfn)
-
-
-		# 1. obtain task generators that contain rebuilds
-		# 2. obtain the 'use' graph and its dual
-		stales = set()
-		reverse_use_map = Utils.defaultdict(list)
-		use_map = Utils.defaultdict(list)
-
-		for g in self.groups:
-			for tg in g:
-				if tg.is_stale():
-					stales.add(tg)
-
-				try:
-					lst = tg.use = Utils.to_list(tg.use)
-				except AttributeError:
-					pass
-				else:
-					for x in lst:
-						try:
-							xtg = self.get_tgen_by_name(x)
-						except Errors.WafError:
-							pass
-						else:
-							use_map[tg].append(xtg)
-							reverse_use_map[xtg].append(tg)
-
-		Logs.debug('rev_use: found %r stale tgs', len(stales))
-
-		# 3. dfs to post downstream tg as stale
-		visited = set()
-		def mark_down(tg):
-			if tg in visited:
-				return
-			visited.add(tg)
-			Logs.debug('rev_use: marking down %r as stale', tg.name)
-			tg.staleness = DIRTY
-			for x in reverse_use_map[tg]:
-				mark_down(x)
-		for tg in stales:
-			mark_down(tg)
-
-		# 4. dfs to find ancestors tg to mark as needed
-		self.needed_tgs = needed_tgs = set()
-		def mark_needed(tg):
-			if tg in needed_tgs:
-				return
-			needed_tgs.add(tg)
-			if tg.staleness == DONE:
-				Logs.debug('rev_use: marking up %r as needed', tg.name)
-				tg.staleness = NEEDED
-			for x in use_map[tg]:
-				mark_needed(x)
-		for xx in visited:
-			mark_needed(xx)
-
-		# so we have the whole tg trees to post in the set "needed"
-		# load their build trees
-		for tg in needed_tgs:
-			tg.bld.restore()
-			tg.bld.fix_tg_path(tg)
-
-		# the stale ones should be fully build, while the needed ones
-		# may skip a few tasks, see create_compiled_task and apply_link_after below
-		Logs.debug('rev_use: amount of needed task gens: %r', len(needed_tgs))
-
-	def post_group(self):
-		# assumption: we can ignore the folder/subfolders cuts
-		def tgpost(tg):
-			try:
-				f = tg.post
-			except AttributeError:
-				pass
-			else:
-				f()
-
-		if not self.targets or self.targets == '*':
-			for tg in self.groups[self.current_group]:
-				# this can cut quite a lot of tg objects
-				if tg in self.needed_tgs:
-					tgpost(tg)
-		else:
-			# default implementation
-			return Build.BuildContext.post_group()
-
-	def get_build_iterator(self):
-		if not self.targets or self.targets == '*':
-			self.compute_needed_tgs()
-		return Build.BuildContext.get_build_iterator(self)
-
-@taskgen_method
-def is_stale(self):
-	# assume no globs
-	self.staleness = DIRTY
-
-	# 1. the case of always stale targets
-	if getattr(self, 'always_stale', False):
-		return True
-
-	# 2. check if the db file exists
-	db = os.path.join(self.bld.variant_dir, Context.DBFILE)
-	try:
-		dbstat = os.stat(db).st_mtime
-	except OSError:
-		Logs.debug('rev_use: must post %r because this is a clean build')
-		return True
-
-	# 3. check if the configuration changed
-	if os.stat(self.bld.bldnode.find_node('c4che/build.config.py').abspath()).st_mtime > dbstat:
-		Logs.debug('rev_use: must post %r because the configuration has changed', self.name)
-		return True
-
-	# 3.a any tstamp data?
-	try:
-		f_deps = self.bld.f_deps
-	except AttributeError:
-		Logs.debug('rev_use: must post %r because there is no f_deps', self.name)
-		return True
-
-	# 4. check if this is the first build (no cache)
-	try:
-		lst = f_deps[(self.path.abspath(), self.idx)]
-	except KeyError:
-		Logs.debug('rev_use: must post %r because there it has no cached data', self.name)
-		return True
-
-	try:
-		cache = self.bld.cache_tstamp_rev_use
-	except AttributeError:
-		cache = self.bld.cache_tstamp_rev_use = {}
-
-	# 5. check the timestamp of each dependency files listed is unchanged
-	f_tstamps = self.bld.f_tstamps
-	for x in lst:
-		try:
-			old_ts = f_tstamps[x]
-		except KeyError:
-			Logs.debug('rev_use: must post %r because %r is not in cache', self.name, x)
-			return True
-
-		try:
-			try:
-				ts = cache[x]
-			except KeyError:
-				ts = cache[x] = os.stat(x).st_mtime
-		except OSError:
-			del f_deps[(self.path.abspath(), self.idx)]
-			Logs.debug('rev_use: must post %r because %r does not exist anymore', self.name, x)
-			return True
-		else:
-			if ts != old_ts:
-				Logs.debug('rev_use: must post %r because the timestamp on %r changed %r %r', self.name, x, old_ts, ts)
-				return True
-
-	self.staleness = DONE
-	return False
-
-@taskgen_method
-def create_compiled_task(self, name, node):
-	# skip the creation of object files
-	# assumption: object-only targets are not skippable
-	if self.staleness == NEEDED:
-		# only libraries/programs can skip object files
-		for x in SKIPPABLE:
-			if x in self.features:
-				return None
-
-	out = '%s.%d.o' % (node.name, self.idx)
-	task = self.create_task(name, node, node.parent.find_or_declare(out))
-	try:
-		self.compiled_tasks.append(task)
-	except AttributeError:
-		self.compiled_tasks = [task]
-	return task
-
-@feature(*SKIPPABLE)
-@after_method('apply_link')
-def apply_link_after(self):
-	# cprogram/cxxprogram might be unnecessary
-	if self.staleness != NEEDED:
-		return
-	for tsk in self.tasks:
-		tsk.hasrun = Task.SKIPPED
-
-def path_from(self, node):
-	# handle nodes of distinct types
-	if node.ctx is not self.ctx:
-		node = self.ctx.root.make_node(node.abspath())
-	return self.default_path_from(node)
-waflib.Node.Node.default_path_from = waflib.Node.Node.path_from
-waflib.Node.Node.path_from = path_from
-
-def h_file(self):
-	# similar to md5_tstamp.py, but with 2-layer cache
-	# global_cache for the build context common for all task generators
-	# local_cache for the build context proxy (one by task generator)
-	#
-	# the global cache is not persistent
-	# the local cache is persistent and meant for partial builds
-	#
-	# assume all calls are made from a single thread
-	#
-	filename = self.abspath()
-	st = os.stat(filename)
-
-	global_cache = self.ctx.bld.hashes_md5_tstamp
-	local_cache = self.ctx.hashes_md5_tstamp
-
-	if filename in global_cache:
-		# value already calculated in this build
-		cval = global_cache[filename]
-
-		# the value in global cache is assumed to be calculated once
-		# reverifying it could cause task generators
-		# to get distinct tstamp values, thus missing rebuilds
-		local_cache[filename] = cval
-		return cval[1]
-
-	if filename in local_cache:
-		cval = local_cache[filename]
-		if cval[0] == st.st_mtime:
-			# correct value from a previous build
-			# put it in the global cache
-			global_cache[filename] = cval
-			return cval[1]
-
-	ret = Utils.h_file(filename)
-	local_cache[filename] = global_cache[filename] = (st.st_mtime, ret)
-	return ret
-waflib.Node.Node.h_file = h_file
-
diff --git a/waflib/extras/fc_bgxlf.py b/waflib/extras/fc_bgxlf.py
deleted file mode 100644
index cca1810..0000000
--- a/waflib/extras/fc_bgxlf.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-from waflib.Tools import fc, fc_config, fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].insert(0, 'fc_bgxlf')
-
-@conf
-def find_bgxlf(conf):
-	fc = conf.find_program(['bgxlf2003_r','bgxlf2003'], var='FC')
-	conf.get_xlf_version(fc)
-	conf.env.FC_NAME = 'BGXLF'
-
-@conf
-def bg_flags(self):
-	self.env.SONAME_ST		 = ''
-	self.env.FCSHLIB_MARKER	= ''
-	self.env.FCSTLIB_MARKER	= ''
-	self.env.FCFLAGS_fcshlib   = ['-fPIC']
-	self.env.LINKFLAGS_fcshlib = ['-G', '-Wl,-bexpfull']
-
-def configure(conf):
-	conf.find_bgxlf()
-	conf.find_ar()
-	conf.fc_flags()
-	conf.fc_add_flags()
-	conf.xlf_flags()
-	conf.bg_flags()
-
diff --git a/waflib/extras/fc_cray.py b/waflib/extras/fc_cray.py
deleted file mode 100644
index da733fa..0000000
--- a/waflib/extras/fc_cray.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib.Tools import fc, fc_config, fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].append('fc_cray')
-
-@conf
-def find_crayftn(conf):
-	"""Find the Cray fortran compiler (will look in the environment variable 'FC')"""
-	fc = conf.find_program(['crayftn'], var='FC')
-	conf.get_crayftn_version(fc)
-	conf.env.FC_NAME = 'CRAY'
-	conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
-
-@conf
-def crayftn_flags(conf):
-	v = conf.env
-	v['_FCMODOUTFLAGS']  = ['-em', '-J.'] # enable module files and put them in the current directory
-	v['FCFLAGS_DEBUG'] = ['-m1'] # more verbose compiler warnings
-	v['FCFLAGS_fcshlib']   = ['-h pic']
-	v['LINKFLAGS_fcshlib'] = ['-h shared']
-
-	v['FCSTLIB_MARKER'] = '-h static'
-	v['FCSHLIB_MARKER'] = '-h dynamic'
-
-@conf
-def get_crayftn_version(conf, fc):
-		version_re = re.compile(r"Cray Fortran\s*:\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
-		cmd = fc + ['-V']
-		out,err = fc_config.getoutput(conf, cmd, stdin=False)
-		if out:
-			match = version_re(out)
-		else:
-			match = version_re(err)
-		if not match:
-				conf.fatal('Could not determine the Cray Fortran compiler version.')
-		k = match.groupdict()
-		conf.env['FC_VERSION'] = (k['major'], k['minor'])
-
-def configure(conf):
-	conf.find_crayftn()
-	conf.find_ar()
-	conf.fc_flags()
-	conf.fc_add_flags()
-	conf.crayftn_flags()
-
diff --git a/waflib/extras/fc_nag.py b/waflib/extras/fc_nag.py
deleted file mode 100644
index edcb218..0000000
--- a/waflib/extras/fc_nag.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].insert(0, 'fc_nag')
-
-@conf
-def find_nag(conf):
-	"""Find the NAG Fortran Compiler (will look in the environment variable 'FC')"""
-
-	fc = conf.find_program(['nagfor'], var='FC')
-	conf.get_nag_version(fc)
-	conf.env.FC_NAME = 'NAG'
-	conf.env.FC_MOD_CAPITALIZATION = 'lower'
-
-@conf
-def nag_flags(conf):
-	v = conf.env
-	v.FCFLAGS_DEBUG = ['-C=all']
-	v.FCLNK_TGT_F = ['-o', '']
-	v.FC_TGT_F = ['-c', '-o', '']
-
-@conf
-def nag_modifier_platform(conf):
-	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
-	nag_modifier_func = getattr(conf, 'nag_modifier_' + dest_os, None)
-	if nag_modifier_func:
-		nag_modifier_func()
-
-@conf
-def get_nag_version(conf, fc):
-	"""Get the NAG compiler version"""
-
-	version_re = re.compile(r"^NAG Fortran Compiler *Release *(?P<major>\d*)\.(?P<minor>\d*)", re.M).search
-	cmd = fc + ['-V']
-
-	out, err = fc_config.getoutput(conf,cmd,stdin=False)
-	if out:
-		match = version_re(out)
-		if not match:
-			match = version_re(err)
-	else: match = version_re(err)
-	if not match:
-		conf.fatal('Could not determine the NAG version.')
-	k = match.groupdict()
-	conf.env['FC_VERSION'] = (k['major'], k['minor'])
-
-def configure(conf):
-	conf.find_nag()
-	conf.find_ar()
-	conf.fc_flags()
-	conf.fc_add_flags()
-	conf.nag_flags()
-	conf.nag_modifier_platform()
-
diff --git a/waflib/extras/fc_nec.py b/waflib/extras/fc_nec.py
deleted file mode 100644
index 67c8680..0000000
--- a/waflib/extras/fc_nec.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib.Tools import fc, fc_config, fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].append('fc_nec')
-
-@conf
-def find_sxfc(conf):
-	"""Find the NEC fortran compiler (will look in the environment variable 'FC')"""
-	fc = conf.find_program(['sxf90','sxf03'], var='FC')
-	conf.get_sxfc_version(fc)
-	conf.env.FC_NAME = 'NEC'
-	conf.env.FC_MOD_CAPITALIZATION = 'lower'
-
-@conf
-def sxfc_flags(conf):
-	v = conf.env
-	v['_FCMODOUTFLAGS']  = [] # enable module files and put them in the current directory
-	v['FCFLAGS_DEBUG'] = [] # more verbose compiler warnings
-	v['FCFLAGS_fcshlib']   = []
-	v['LINKFLAGS_fcshlib'] = []
-
-	v['FCSTLIB_MARKER'] = ''
-	v['FCSHLIB_MARKER'] = ''
-
-@conf
-def get_sxfc_version(conf, fc):
-	version_re = re.compile(r"FORTRAN90/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
-	cmd = fc + ['-V']
-	out,err = fc_config.getoutput(conf, cmd, stdin=False)
-	if out:
-		match = version_re(out)
-	else:
-		match = version_re(err)
-	if not match:
-		version_re=re.compile(r"NEC Fortran 2003 Compiler for\s*(?P<major>\S*)\s*\(c\)\s*(?P<minor>\d*)",re.I).search
-		if out:
-			match = version_re(out)
-		else:
-			match = version_re(err)
-		if not match:
-			conf.fatal('Could not determine the NEC Fortran compiler version.')
-	k = match.groupdict()
-	conf.env['FC_VERSION'] = (k['major'], k['minor'])
-
-def configure(conf):
-	conf.find_sxfc()
-	conf.find_program('sxar',var='AR')
-	conf.add_os_flags('ARFLAGS')
-	if not conf.env.ARFLAGS:
-		conf.env.ARFLAGS=['rcs']
-
-	conf.fc_flags()
-	conf.fc_add_flags()
-	conf.sxfc_flags()
diff --git a/waflib/extras/fc_nfort.py b/waflib/extras/fc_nfort.py
deleted file mode 100644
index c25886b..0000000
--- a/waflib/extras/fc_nfort.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Detection of the NEC Fortran compiler for Aurora Tsubasa
-
-import re
-from waflib.Tools import fc,fc_config,fc_scan
-from waflib.Configure import conf
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].append('fc_nfort')
-
-@conf
-def find_nfort(conf):
-	fc=conf.find_program(['nfort'],var='FC')
-	conf.get_nfort_version(fc)
-	conf.env.FC_NAME='NFORT'
-	conf.env.FC_MOD_CAPITALIZATION='lower'
-
-@conf
-def nfort_flags(conf):
-	v=conf.env
-	v['_FCMODOUTFLAGS']=[]
-	v['FCFLAGS_DEBUG']=[]
-	v['FCFLAGS_fcshlib']=[]
-	v['LINKFLAGS_fcshlib']=[]
-	v['FCSTLIB_MARKER']=''
-	v['FCSHLIB_MARKER']=''
-
-@conf
-def get_nfort_version(conf,fc):
-	version_re=re.compile(r"nfort\s*\(NFORT\)\s*(?P<major>\d+)\.(?P<minor>\d+)\.",re.I).search
-	cmd=fc+['--version']
-	out,err=fc_config.getoutput(conf,cmd,stdin=False)
-	if out:
-		match=version_re(out)
-	else:
-		match=version_re(err)
-	if not match:
-		return(False)
-		conf.fatal('Could not determine the NEC NFORT Fortran compiler version.')
-	else:
-		k=match.groupdict()
-		conf.env['FC_VERSION']=(k['major'],k['minor'])
-
-def configure(conf):
-	conf.find_nfort()
-	conf.find_program('nar',var='AR')
-	conf.add_os_flags('ARFLAGS')
-	if not conf.env.ARFLAGS:
-		conf.env.ARFLAGS=['rcs']
-	conf.fc_flags()
-	conf.fc_add_flags()
-	conf.nfort_flags()
diff --git a/waflib/extras/fc_open64.py b/waflib/extras/fc_open64.py
deleted file mode 100644
index 413719f..0000000
--- a/waflib/extras/fc_open64.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].insert(0, 'fc_open64')
-
-@conf
-def find_openf95(conf):
-	"""Find the Open64 Fortran Compiler (will look in the environment variable 'FC')"""
-
-	fc = conf.find_program(['openf95', 'openf90'], var='FC')
-	conf.get_open64_version(fc)
-	conf.env.FC_NAME = 'OPEN64'
-	conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
-
-@conf
-def openf95_flags(conf):
-	v = conf.env
-	v['FCFLAGS_DEBUG'] = ['-fullwarn']
-
-@conf
-def openf95_modifier_platform(conf):
-	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
-	openf95_modifier_func = getattr(conf, 'openf95_modifier_' + dest_os, None)
-	if openf95_modifier_func:
-		openf95_modifier_func()
-
-@conf
-def get_open64_version(conf, fc):
-	"""Get the Open64 compiler version"""
-
-	version_re = re.compile(r"Open64 Compiler Suite: *Version *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
-	cmd = fc + ['-version']
-
-	out, err = fc_config.getoutput(conf,cmd,stdin=False)
-	if out:
-		match = version_re(out)
-	else:
-		match = version_re(err)
-	if not match:
-		conf.fatal('Could not determine the Open64 version.')
-	k = match.groupdict()
-	conf.env['FC_VERSION'] = (k['major'], k['minor'])
-
-def configure(conf):
-	conf.find_openf95()
-	conf.find_ar()
-	conf.fc_flags()
-	conf.fc_add_flags()
-	conf.openf95_flags()
-	conf.openf95_modifier_platform()
-
diff --git a/waflib/extras/fc_pgfortran.py b/waflib/extras/fc_pgfortran.py
deleted file mode 100644
index afb2817..0000000
--- a/waflib/extras/fc_pgfortran.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib.Tools import fc, fc_config, fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].append('fc_pgfortran')
-
-@conf
-def find_pgfortran(conf):
-	"""Find the PGI fortran compiler (will look in the environment variable 'FC')"""
-	fc = conf.find_program(['pgfortran', 'pgf95', 'pgf90'], var='FC')
-	conf.get_pgfortran_version(fc)
-	conf.env.FC_NAME = 'PGFC'
-
-@conf
-def pgfortran_flags(conf):
-	v = conf.env
-	v['FCFLAGS_fcshlib']   = ['-shared']
-	v['FCFLAGS_DEBUG'] = ['-Minform=inform', '-Mstandard'] # why not
-	v['FCSTLIB_MARKER'] = '-Bstatic'
-	v['FCSHLIB_MARKER'] = '-Bdynamic'
-	v['SONAME_ST']	  = '-soname %s'
-
-@conf
-def get_pgfortran_version(conf,fc):
-		version_re = re.compile(r"The Portland Group", re.I).search
-		cmd = fc + ['-V']
-		out,err = fc_config.getoutput(conf, cmd, stdin=False)
-		if out:
-			match = version_re(out)
-		else:
-			match = version_re(err)
-		if not match:
-				conf.fatal('Could not verify PGI signature')
-		cmd = fc + ['-help=variable']
-		out,err = fc_config.getoutput(conf, cmd, stdin=False)
-		if out.find('COMPVER')<0:
-				conf.fatal('Could not determine the compiler type')
-		k = {}
-		prevk = ''
-		out = out.splitlines()
-		for line in out:
-				lst = line.partition('=')
-				if lst[1] == '=':
-						key = lst[0].rstrip()
-						if key == '':
-							key = prevk
-						val = lst[2].rstrip()
-						k[key] = val
-				else:
-					prevk = line.partition(' ')[0]
-		def isD(var):
-				return var in k
-		def isT(var):
-				return var in k and k[var]!='0'
-		conf.env['FC_VERSION'] = (k['COMPVER'].split('.'))
-
-def configure(conf):
-	conf.find_pgfortran()
-	conf.find_ar()
-	conf.fc_flags()
-	conf.fc_add_flags()
-	conf.pgfortran_flags()
-
diff --git a/waflib/extras/fc_solstudio.py b/waflib/extras/fc_solstudio.py
deleted file mode 100644
index 53766df..0000000
--- a/waflib/extras/fc_solstudio.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].append('fc_solstudio')
-
-@conf
-def find_solstudio(conf):
-	"""Find the Solaris Studio compiler (will look in the environment variable 'FC')"""
-
-	fc = conf.find_program(['sunf95', 'f95', 'sunf90', 'f90'], var='FC')
-	conf.get_solstudio_version(fc)
-	conf.env.FC_NAME = 'SOL'
-
-@conf
-def solstudio_flags(conf):
-	v = conf.env
-	v['FCFLAGS_fcshlib'] = ['-Kpic']
-	v['FCFLAGS_DEBUG'] = ['-w3']
-	v['LINKFLAGS_fcshlib'] = ['-G']
-	v['FCSTLIB_MARKER'] = '-Bstatic'
-	v['FCSHLIB_MARKER'] = '-Bdynamic'
-	v['SONAME_ST']      = '-h %s'
-
-@conf
-def solstudio_modifier_platform(conf):
-	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
-	solstudio_modifier_func = getattr(conf, 'solstudio_modifier_' + dest_os, None)
-	if solstudio_modifier_func:
-		solstudio_modifier_func()
-
-@conf
-def get_solstudio_version(conf, fc):
-	"""Get the compiler version"""
-
-	version_re = re.compile(r"Sun Fortran 95 *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
-	cmd = fc + ['-V']
-
-	out, err = fc_config.getoutput(conf,cmd,stdin=False)
-	if out:
-		match = version_re(out)
-	else:
-		match = version_re(err)
-	if not match:
-		conf.fatal('Could not determine the Sun Studio Fortran version.')
-	k = match.groupdict()
-	conf.env['FC_VERSION'] = (k['major'], k['minor'])
-
-def configure(conf):
-	conf.find_solstudio()
-	conf.find_ar()
-	conf.fc_flags()
-	conf.fc_add_flags()
-	conf.solstudio_flags()
-	conf.solstudio_modifier_platform()
-
diff --git a/waflib/extras/fc_xlf.py b/waflib/extras/fc_xlf.py
deleted file mode 100644
index 5a3da03..0000000
--- a/waflib/extras/fc_xlf.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib import Utils,Errors
-from waflib.Tools import fc,fc_config,fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['aix'].insert(0, 'fc_xlf')
-
-@conf
-def find_xlf(conf):
-	"""Find the xlf program (will look in the environment variable 'FC')"""
-
-	fc = conf.find_program(['xlf2003_r', 'xlf2003', 'xlf95_r', 'xlf95', 'xlf90_r', 'xlf90', 'xlf_r', 'xlf'], var='FC')
-	conf.get_xlf_version(fc)
-	conf.env.FC_NAME='XLF'
-
-@conf
-def xlf_flags(conf):
-	v = conf.env
-	v['FCDEFINES_ST'] = '-WF,-D%s'
-	v['FCFLAGS_fcshlib'] = ['-qpic=small']
-	v['FCFLAGS_DEBUG'] = ['-qhalt=w']
-	v['LINKFLAGS_fcshlib'] = ['-Wl,-shared']
-
-@conf
-def xlf_modifier_platform(conf):
-	dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
-	xlf_modifier_func = getattr(conf, 'xlf_modifier_' + dest_os, None)
-	if xlf_modifier_func:
-		xlf_modifier_func()
-
-@conf
-def get_xlf_version(conf, fc):
-	"""Get the compiler version"""
-
-	cmd = fc + ['-qversion']
-	try:
-		out, err = conf.cmd_and_log(cmd, output=0)
-	except Errors.WafError:
-		conf.fatal('Could not find xlf %r' % cmd)
-
-	for v in (r"IBM XL Fortran.* V(?P<major>\d*)\.(?P<minor>\d*)",):
-		version_re = re.compile(v, re.I).search
-		match = version_re(out or err)
-		if match:
-			k = match.groupdict()
-			conf.env['FC_VERSION'] = (k['major'], k['minor'])
-			break
-	else:
-		conf.fatal('Could not determine the XLF version.')
-
-def configure(conf):
-	conf.find_xlf()
-	conf.find_ar()
-	conf.fc_flags()
-	conf.fc_add_flags()
-	conf.xlf_flags()
-	conf.xlf_modifier_platform()
-
diff --git a/waflib/extras/file_to_object.py b/waflib/extras/file_to_object.py
deleted file mode 100644
index 1393b51..0000000
--- a/waflib/extras/file_to_object.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-# Tool to embed file into objects
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2014"
-
-"""
-
-This tool allows to embed file contents in object files (.o).
-It is not exactly portable, and the file contents are reachable
-using various non-portable fashions.
-The goal here is to provide a functional interface to the embedding
-of file data in objects.
-See the ``playground/embedded_resources`` example for an example.
-
-Usage::
-
-   bld(
-    name='pipeline',
-     # ^ Reference this in use="..." for things using the generated code
-    features='file_to_object',
-    source='some.file',
-     # ^ Name of the file to embed in binary section.
-   )
-
-Known issues:
-
-- Destination is named like source, with extension renamed to .o
-  eg. some.file -> some.o
-
-"""
-
-import os
-from waflib import Task, TaskGen, Errors
-
-def filename_c_escape(x):
-	return x.replace("\\", "\\\\")
-
-class file_to_object_s(Task.Task):
-	color = 'CYAN'
-	vars = ['DEST_CPU', 'DEST_BINFMT']
-
-	def run(self):
-		name = []
-		for i, x in enumerate(self.inputs[0].name):
-			if x.isalnum():
-				name.append(x)
-			else:
-				name.append('_')
-		file = self.inputs[0].abspath()
-		size = os.path.getsize(file)
-		if self.env.DEST_CPU in ('x86_64', 'ia', 'aarch64'):
-			unit = 'quad'
-			align = 8
-		elif self.env.DEST_CPU in ('x86','arm', 'thumb', 'm68k'):
-			unit = 'long'
-			align = 4
-		else:
-			raise Errors.WafError("Unsupported DEST_CPU, please report bug!")
-
-		file = filename_c_escape(file)
-		name = "_binary_" + "".join(name)
-		rodata = ".section .rodata"
-		if self.env.DEST_BINFMT == "mac-o":
-			name = "_" + name
-			rodata = ".section __TEXT,__const"
-
-		with open(self.outputs[0].abspath(), 'w') as f:
-			f.write(\
-"""
-	.global %(name)s_start
-	.global %(name)s_end
-	.global %(name)s_size
-	%(rodata)s
-%(name)s_start:
-	.incbin "%(file)s"
-%(name)s_end:
-	.align %(align)d
-%(name)s_size:
-	.%(unit)s 0x%(size)x
-""" % locals())
-
-class file_to_object_c(Task.Task):
-	color = 'CYAN'
-	def run(self):
-		name = []
-		for i, x in enumerate(self.inputs[0].name):
-			if x.isalnum():
-				name.append(x)
-			else:
-				name.append('_')
-		file = self.inputs[0].abspath()
-		size = os.path.getsize(file)
-
-		name = "_binary_" + "".join(name)
-
-		data = self.inputs[0].read('rb')
-		lines, line = [], []
-		for idx_byte, byte in enumerate(data):
-			line.append(byte)
-			if len(line) > 15 or idx_byte == size-1:
-				lines.append(", ".join(("0x%02x" % ord(x)) for x in line))
-				line = []
-		data = ",\n ".join(lines)
-
-		self.outputs[0].write(\
-"""
-unsigned long %(name)s_size = %(size)dL;
-char const %(name)s_start[] = {
- %(data)s
-};
-char const %(name)s_end[] = {};
-""" % locals())
-
-@TaskGen.feature('file_to_object')
-@TaskGen.before_method('process_source')
-def tg_file_to_object(self):
-	bld = self.bld
-	sources = self.to_nodes(self.source)
-	targets = []
-	for src in sources:
-		if bld.env.F2O_METHOD == ["asm"]:
-			tgt = src.parent.find_or_declare(src.name + '.f2o.s')
-			tsk = self.create_task('file_to_object_s', src, tgt)
-			tsk.cwd = src.parent.abspath() # verify
-		else:
-			tgt = src.parent.find_or_declare(src.name + '.f2o.c')
-			tsk = self.create_task('file_to_object_c', src, tgt)
-			tsk.cwd = src.parent.abspath() # verify
-		targets.append(tgt)
-	self.source = targets
-
-def configure(conf):
-	conf.load('gas')
-	conf.env.F2O_METHOD = ["c"]
-
diff --git a/waflib/extras/fluid.py b/waflib/extras/fluid.py
deleted file mode 100644
index 4814a35..0000000
--- a/waflib/extras/fluid.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/python
-# encoding: utf-8
-# Grygoriy Fuchedzhy 2009
-
-"""
-Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjunction with the 'cxx' feature.
-"""
-
-from waflib import Task
-from waflib.TaskGen import extension
-
-class fluid(Task.Task):
-	color   = 'BLUE'
-	ext_out = ['.h']
-	run_str = '${FLUID} -c -o ${TGT[0].abspath()} -h ${TGT[1].abspath()} ${SRC}'
-
-@extension('.fl')
-def process_fluid(self, node):
-	"""add the .fl to the source list; the cxx file generated will be compiled when possible"""
-	cpp = node.change_ext('.cpp')
-	hpp = node.change_ext('.hpp')
-	self.create_task('fluid', node, [cpp, hpp])
-
-	if 'cxx' in self.features:
-		self.source.append(cpp)
-
-def configure(conf):
-	conf.find_program('fluid', var='FLUID')
-	conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
-
diff --git a/waflib/extras/freeimage.py b/waflib/extras/freeimage.py
deleted file mode 100644
index f27e525..0000000
--- a/waflib/extras/freeimage.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-#
-# written by Sylvain Rouquette, 2011
-
-'''
-To add the freeimage tool to the waf file:
-$ ./waf-light --tools=compat15,freeimage
-	or, if you have waf >= 1.6.2
-$ ./waf update --files=freeimage
-
-The wscript will look like:
-
-def options(opt):
-	opt.load('compiler_cxx freeimage')
-
-def configure(conf):
-	conf.load('compiler_cxx freeimage')
-
-	# you can call check_freeimage with some parameters.
-	# It's optional on Linux, it's 'mandatory' on Windows if
-	# you didn't use --fi-path on the command-line
-
-	# conf.check_freeimage(path='FreeImage/Dist', fip=True)
-
-def build(bld):
-	bld(source='main.cpp', target='app', use='FREEIMAGE')
-'''
-
-from waflib import Utils
-from waflib.Configure import conf
-
-
-def options(opt):
-	opt.add_option('--fi-path', type='string', default='', dest='fi_path',
-				   help='''path to the FreeImage directory \
-						where the files are e.g. /FreeImage/Dist''')
-	opt.add_option('--fip', action='store_true', default=False, dest='fip',
-				   help='link with FreeImagePlus')
-	opt.add_option('--fi-static', action='store_true',
-				   default=False, dest='fi_static',
-				   help="link as shared libraries")
-
-
-@conf
-def check_freeimage(self, path=None, fip=False):
-	self.start_msg('Checking FreeImage')
-	if not self.env['CXX']:
-		self.fatal('you must load compiler_cxx before loading freeimage')
-	prefix = self.options.fi_static and 'ST' or ''
-	platform = Utils.unversioned_sys_platform()
-	if platform == 'win32':
-		if not path:
-			self.fatal('you must specify the path to FreeImage. \
-					   use --fi-path=/FreeImage/Dist')
-		else:
-			self.env['INCLUDES_FREEIMAGE'] = path
-			self.env['%sLIBPATH_FREEIMAGE' % prefix] = path
-	libs = ['FreeImage']
-	if self.options.fip:
-		libs.append('FreeImagePlus')
-	if platform == 'win32':
-		self.env['%sLIB_FREEIMAGE' % prefix] = libs
-	else:
-		self.env['%sLIB_FREEIMAGE' % prefix] = [i.lower() for i in libs]
-	self.end_msg('ok')
-
-
-def configure(conf):
-	platform = Utils.unversioned_sys_platform()
-	if platform == 'win32' and not conf.options.fi_path:
-		return
-	conf.check_freeimage(conf.options.fi_path, conf.options.fip)
-
diff --git a/waflib/extras/fsb.py b/waflib/extras/fsb.py
deleted file mode 100644
index 1b8f398..0000000
--- a/waflib/extras/fsb.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-"""
-Fully sequential builds
-
-The previous tasks from task generators are re-processed, and this may lead to speed issues
-Yet, if you are using this, speed is probably a minor concern
-"""
-
-from waflib import Build
-
-def options(opt):
-	pass
-
-def configure(conf):
-	pass
-
-class FSBContext(Build.BuildContext):
-	def __call__(self, *k, **kw):
-		ret = Build.BuildContext.__call__(self, *k, **kw)
-
-		# evaluate the results immediately
-		Build.BuildContext.compile(self)
-
-		return ret
-
-	def compile(self):
-		pass
-
diff --git a/waflib/extras/fsc.py b/waflib/extras/fsc.py
deleted file mode 100644
index c67e70b..0000000
--- a/waflib/extras/fsc.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-"""
-Experimental F# stuff
-
-FSC="mono /path/to/fsc.exe" waf configure build
-"""
-
-from waflib import Utils, Task
-from waflib.TaskGen import before_method, after_method, feature
-from waflib.Tools import ccroot, cs
-
-ccroot.USELIB_VARS['fsc'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
-
-@feature('fs')
-@before_method('process_source')
-def apply_fsc(self):
-	cs_nodes = []
-	no_nodes = []
-	for x in self.to_nodes(self.source):
-		if x.name.endswith('.fs'):
-			cs_nodes.append(x)
-		else:
-			no_nodes.append(x)
-	self.source = no_nodes
-
-	bintype = getattr(self, 'type', self.gen.endswith('.dll') and 'library' or 'exe')
-	self.cs_task = tsk = self.create_task('fsc', cs_nodes, self.path.find_or_declare(self.gen))
-	tsk.env.CSTYPE = '/target:%s' % bintype
-	tsk.env.OUT    = '/out:%s' % tsk.outputs[0].abspath()
-
-	inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
-	if inst_to:
-		# note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
-		mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
-		self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
-
-feature('fs')(cs.use_cs)
-after_method('apply_fsc')(cs.use_cs)
-
-feature('fs')(cs.debug_cs)
-after_method('apply_fsc', 'use_cs')(cs.debug_cs)
-
-class fsc(Task.Task):
-	"""
-	Compile F# files
-	"""
-	color   = 'YELLOW'
-	run_str = '${FSC} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
-
-def configure(conf):
-	"""
-	Find a F# compiler, set the variable FSC for the compiler and FS_NAME (mono or fsc)
-	"""
-	conf.find_program(['fsc.exe', 'fsharpc'], var='FSC')
-	conf.env.ASS_ST = '/r:%s'
-	conf.env.RES_ST = '/resource:%s'
-
-	conf.env.FS_NAME = 'fsc'
-	if str(conf.env.FSC).lower().find('fsharpc') > -1:
-		conf.env.FS_NAME = 'mono'
-
diff --git a/waflib/extras/gccdeps.py b/waflib/extras/gccdeps.py
deleted file mode 100644
index bfabe72..0000000
--- a/waflib/extras/gccdeps.py
+++ /dev/null
@@ -1,214 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008-2010 (ita)
-
-"""
-Execute the tasks with gcc -MD, read the dependencies from the .d file
-and prepare the dependency calculation for the next run.
-This affects the cxx class, so make sure to load Qt5 after this tool.
-
-Usage::
-
-	def options(opt):
-		opt.load('compiler_cxx')
-	def configure(conf):
-		conf.load('compiler_cxx gccdeps')
-"""
-
-import os, re, threading
-from waflib import Task, Logs, Utils, Errors
-from waflib.Tools import c_preproc
-from waflib.TaskGen import before_method, feature
-
-lock = threading.Lock()
-
-gccdeps_flags = ['-MD']
-if not c_preproc.go_absolute:
-	gccdeps_flags = ['-MMD']
-
-# Third-party tools are allowed to add extra names in here with append()
-supported_compilers = ['gcc', 'icc', 'clang']
-
-def scan(self):
-	if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
-		return super(self.derived_gccdeps, self).scan()
-	nodes = self.generator.bld.node_deps.get(self.uid(), [])
-	names = []
-	return (nodes, names)
-
-re_o = re.compile(r"\.o$")
-re_splitter = re.compile(r'(?<!\\)\s+') # split by space, except when spaces are escaped
-
-def remove_makefile_rule_lhs(line):
-	# Splitting on a plain colon would accidentally match inside a
-	# Windows absolute-path filename, so we must search for a colon
-	# followed by whitespace to find the divider between LHS and RHS
-	# of the Makefile rule.
-	rulesep = ': '
-
-	sep_idx = line.find(rulesep)
-	if sep_idx >= 0:
-		return line[sep_idx + 2:]
-	else:
-		return line
-
-def path_to_node(base_node, path, cached_nodes):
-	# Take the base node and the path and return a node
-	# Results are cached because searching the node tree is expensive
-	# The following code is executed by threads, it is not safe, so a lock is needed...
-	if getattr(path, '__hash__'):
-		node_lookup_key = (base_node, path)
-	else:
-		# Not hashable, assume it is a list and join into a string
-		node_lookup_key = (base_node, os.path.sep.join(path))
-	try:
-		lock.acquire()
-		node = cached_nodes[node_lookup_key]
-	except KeyError:
-		node = base_node.find_resource(path)
-		cached_nodes[node_lookup_key] = node
-	finally:
-		lock.release()
-	return node
-
-def post_run(self):
-	if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
-		return super(self.derived_gccdeps, self).post_run()
-
-	name = self.outputs[0].abspath()
-	name = re_o.sub('.d', name)
-	try:
-		txt = Utils.readf(name)
-	except EnvironmentError:
-		Logs.error('Could not find a .d dependency file, are cflags/cxxflags overwritten?')
-		raise
-	#os.remove(name)
-
-	# Compilers have the choice to either output the file's dependencies
-	# as one large Makefile rule:
-	#
-	#   /path/to/file.o: /path/to/dep1.h \
-	#                    /path/to/dep2.h \
-	#                    /path/to/dep3.h \
-	#                    ...
-	#
-	# or as many individual rules:
-	#
-	#   /path/to/file.o: /path/to/dep1.h
-	#   /path/to/file.o: /path/to/dep2.h
-	#   /path/to/file.o: /path/to/dep3.h
-	#   ...
-	#
-	# So the first step is to sanitize the input by stripping out the left-
-	# hand side of all these lines. After that, whatever remains are the
-	# implicit dependencies of task.outputs[0]
-	txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()])
-
-	# Now join all the lines together
-	txt = txt.replace('\\\n', '')
-
-	val = txt.strip()
-	val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x]
-
-	nodes = []
-	bld = self.generator.bld
-
-	# Dynamically bind to the cache
-	try:
-		cached_nodes = bld.cached_nodes
-	except AttributeError:
-		cached_nodes = bld.cached_nodes = {}
-
-	for x in val:
-
-		node = None
-		if os.path.isabs(x):
-			node = path_to_node(bld.root, x, cached_nodes)
-		else:
-			# TODO waf 1.9 - single cwd value
-			path = getattr(bld, 'cwdx', bld.bldnode)
-			# when calling find_resource, make sure the path does not contain '..'
-			x = [k for k in Utils.split_path(x) if k and k != '.']
-			while '..' in x:
-				idx = x.index('..')
-				if idx == 0:
-					x = x[1:]
-					path = path.parent
-				else:
-					del x[idx]
-					del x[idx-1]
-
-			node = path_to_node(path, x, cached_nodes)
-
-		if not node:
-			raise ValueError('could not find %r for %r' % (x, self))
-		if id(node) == id(self.inputs[0]):
-			# ignore the source file, it is already in the dependencies
-			# this way, successful config tests may be retrieved from the cache
-			continue
-		nodes.append(node)
-
-	Logs.debug('deps: gccdeps for %s returned %s', self, nodes)
-
-	bld.node_deps[self.uid()] = nodes
-	bld.raw_deps[self.uid()] = []
-
-	try:
-		del self.cache_sig
-	except AttributeError:
-		pass
-
-	Task.Task.post_run(self)
-
-def sig_implicit_deps(self):
-	if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
-		return super(self.derived_gccdeps, self).sig_implicit_deps()
-	try:
-		return Task.Task.sig_implicit_deps(self)
-	except Errors.WafError:
-		return Utils.SIG_NIL
-
-def wrap_compiled_task(classname):
-	derived_class = type(classname, (Task.classes[classname],), {})
-	derived_class.derived_gccdeps = derived_class
-	derived_class.post_run = post_run
-	derived_class.scan = scan
-	derived_class.sig_implicit_deps = sig_implicit_deps
-
-for k in ('c', 'cxx'):
-	if k in Task.classes:
-		wrap_compiled_task(k)
-
-@before_method('process_source')
-@feature('force_gccdeps')
-def force_gccdeps(self):
-	self.env.ENABLE_GCCDEPS = ['c', 'cxx']
-
-def configure(conf):
-	# in case someone provides a --enable-gccdeps command-line option
-	if not getattr(conf.options, 'enable_gccdeps', True):
-		return
-
-	global gccdeps_flags
-	flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
-	if conf.env.CC_NAME in supported_compilers:
-		try:
-			conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))
-		except Errors.ConfigurationError:
-			pass
-		else:
-			conf.env.append_value('CFLAGS', flags)
-			conf.env.append_unique('ENABLE_GCCDEPS', 'c')
-
-	if conf.env.CXX_NAME in supported_compilers:
-		try:
-			conf.check(fragment='int main() { return 0; }', features='cxx force_gccdeps', cxxflags=flags, msg='Checking for cxx flags %r' % ''.join(flags))
-		except Errors.ConfigurationError:
-			pass
-		else:
-			conf.env.append_value('CXXFLAGS', flags)
-			conf.env.append_unique('ENABLE_GCCDEPS', 'cxx')
-
-def options(opt):
-	raise ValueError('Do not load gccdeps options')
-
diff --git a/waflib/extras/gdbus.py b/waflib/extras/gdbus.py
deleted file mode 100644
index 0e0476e..0000000
--- a/waflib/extras/gdbus.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Copyright Garmin International or its subsidiaries, 2018
-#
-# Heavily based on dbus.py
-
-"""
-Compiles dbus files with **gdbus-codegen**
-Typical usage::
-	def options(opt):
-		opt.load('compiler_c gdbus')
-	def configure(conf):
-		conf.load('compiler_c gdbus')
-	def build(bld):
-		tg = bld.program(
-			includes = '.',
-			source = bld.path.ant_glob('*.c'),
-			target = 'gnome-hello')
-		tg.add_gdbus_file('test.xml', 'com.example.example.', 'Example')
-"""
-
-from waflib import Task, Errors, Utils
-from waflib.TaskGen import taskgen_method, before_method
-
-@taskgen_method
-def add_gdbus_file(self, filename, prefix, namespace, export=False):
-	"""
-	Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
-	:param filename: xml file to compile
-	:type filename: string
-	:param prefix: interface prefix (--interface-prefix=prefix)
-	:type prefix: string
-	:param mode: C namespace (--c-namespace=namespace)
-	:type mode: string
-	:param export: Export Headers?
-	:type export: boolean
-	"""
-	if not hasattr(self, 'gdbus_lst'):
-		self.gdbus_lst = []
-	if not 'process_gdbus' in self.meths:
-		self.meths.append('process_gdbus')
-	self.gdbus_lst.append([filename, prefix, namespace, export])
-
-@before_method('process_source')
-def process_gdbus(self):
-	"""
-	Processes the dbus files stored in the attribute *gdbus_lst* to create :py:class:`gdbus_binding_tool` instances.
-	"""
-	output_node = self.path.get_bld().make_node(['gdbus', self.get_name()])
-	sources = []
-
-	for filename, prefix, namespace, export in getattr(self, 'gdbus_lst', []):
-		node = self.path.find_resource(filename)
-		if not node:
-			raise Errors.WafError('file not found ' + filename)
-		c_file = output_node.find_or_declare(node.change_ext('.c').name)
-		h_file = output_node.find_or_declare(node.change_ext('.h').name)
-		tsk = self.create_task('gdbus_binding_tool', node, [c_file, h_file])
-		tsk.cwd = output_node.abspath()
-
-		tsk.env.GDBUS_CODEGEN_INTERFACE_PREFIX = prefix
-		tsk.env.GDBUS_CODEGEN_NAMESPACE = namespace
-		tsk.env.GDBUS_CODEGEN_OUTPUT = node.change_ext('').name
-		sources.append(c_file)
-
-	if sources:
-		output_node.mkdir()
-		self.source = Utils.to_list(self.source) + sources
-		self.includes = [output_node] + self.to_incnodes(getattr(self, 'includes', []))
-		if export:
-			self.export_includes = [output_node] + self.to_incnodes(getattr(self, 'export_includes', []))
-
-class gdbus_binding_tool(Task.Task):
-	"""
-	Compiles a dbus file
-	"""
-	color   = 'BLUE'
-	ext_out = ['.h', '.c']
-	run_str = '${GDBUS_CODEGEN} --interface-prefix ${GDBUS_CODEGEN_INTERFACE_PREFIX} --generate-c-code ${GDBUS_CODEGEN_OUTPUT} --c-namespace ${GDBUS_CODEGEN_NAMESPACE} --c-generate-object-manager ${SRC[0].abspath()}'
-	shell = True
-
-def configure(conf):
-	"""
-	Detects the program gdbus-codegen and sets ``conf.env.GDBUS_CODEGEN``
-	"""
-	conf.find_program('gdbus-codegen', var='GDBUS_CODEGEN')
-
diff --git a/waflib/extras/gob2.py b/waflib/extras/gob2.py
deleted file mode 100644
index b4fa3b9..0000000
--- a/waflib/extras/gob2.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-
-from waflib import TaskGen
-
-TaskGen.declare_chain(
-	name = 'gob2',
-	rule = '${GOB2} -o ${TGT[0].bld_dir()} ${GOB2FLAGS} ${SRC}',
-	ext_in = '.gob',
-	ext_out = '.c'
-)
-
-def configure(conf):
-	conf.find_program('gob2', var='GOB2')
-	conf.env['GOB2FLAGS'] = ''
-
diff --git a/waflib/extras/halide.py b/waflib/extras/halide.py
deleted file mode 100644
index 6078e38..0000000
--- a/waflib/extras/halide.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-# Halide code generation tool
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2014"
-
-"""
-
-Tool to run `Halide <http://halide-lang.org>`_ code generators.
-
-Usage::
-
-   bld(
-    name='pipeline',
-     # ^ Reference this in use="..." for things using the generated code
-    #target=['pipeline.o', 'pipeline.h']
-     # ^ by default, name.{o,h} is added, but you can set the outputs here
-    features='halide',
-    halide_env="HL_TRACE=1 HL_TARGET=host-opencl-gpu_debug",
-     # ^ Environment passed to the generator,
-     # can be a dict, k/v list, or string.
-    args=[],
-     # ^ Command-line arguments to the generator (optional),
-     # eg. to give parameters to the scheduling
-    source='pipeline_gen',
-     # ^ Name of the source executable
-   )
-
-
-Known issues:
-
-
-- Currently only supports Linux (no ".exe")
-
-- Doesn't rerun on input modification when input is part of a build
-  chain, and has been modified externally.
-
-"""
-
-import os
-from waflib import Task, Utils, Options, TaskGen, Errors
-
-class run_halide_gen(Task.Task):
-	color = 'CYAN'
-	vars = ['HALIDE_ENV', 'HALIDE_ARGS']
-	run_str = "${SRC[0].abspath()} ${HALIDE_ARGS}"
-	def __str__(self):
-		stuff = "halide"
-		stuff += ("[%s]" % (",".join(
-		 ('%s=%s' % (k,v)) for k, v in sorted(self.env.env.items()))))
-		return Task.Task.__str__(self).replace(self.__class__.__name__,
-		 stuff)
-
-@TaskGen.feature('halide')
-@TaskGen.before_method('process_source')
-def halide(self):
-	Utils.def_attrs(self,
-	 args=[],
-	 halide_env={},
-	)
-
-	bld = self.bld
-
-	env = self.halide_env
-	try:
-		if isinstance(env, str):
-			env = dict(x.split('=') for x in env.split())
-		elif isinstance(env, list):
-			env = dict(x.split('=') for x in env)
-		assert isinstance(env, dict)
-	except Exception as e:
-		if not isinstance(e, ValueError) \
-		 and not isinstance(e, AssertionError):
-			raise
-		raise Errors.WafError(
-		 "halide_env must be under the form" \
-		 " {'HL_x':'a', 'HL_y':'b'}" \
-		 " or ['HL_x=y', 'HL_y=b']" \
-		 " or 'HL_x=y HL_y=b'")
-
-	src = self.to_nodes(self.source)
-	assert len(src) == 1, "Only one source expected"
-	src = src[0]
-
-	args = Utils.to_list(self.args)
-
-	def change_ext(src, ext):
-		# Return a node with a new extension, in an appropriate folder
-		name = src.name
-		xpos = src.name.rfind('.')
-		if xpos == -1:
-			xpos = len(src.name)
-		newname = name[:xpos] + ext
-		if src.is_child_of(bld.bldnode):
-			node = src.get_src().parent.find_or_declare(newname)
-		else:
-			node = bld.bldnode.find_or_declare(newname)
-		return node
-
-	def to_nodes(self, lst, path=None):
-		tmp = []
-		path = path or self.path
-		find = path.find_or_declare
-
-		if isinstance(lst, self.path.__class__):
-			lst = [lst]
-
-		for x in Utils.to_list(lst):
-			if isinstance(x, str):
-				node = find(x)
-			else:
-				node = x
-			tmp.append(node)
-		return tmp
-
-	tgt = to_nodes(self, self.target)
-	if not tgt:
-		tgt = [change_ext(src, '.o'), change_ext(src, '.h')]
-	cwd = tgt[0].parent.abspath()
-	task = self.create_task('run_halide_gen', src, tgt, cwd=cwd)
-	task.env.append_unique('HALIDE_ARGS', args)
-	if task.env.env == []:
-		task.env.env = {}
-	task.env.env.update(env)
-	task.env.HALIDE_ENV = " ".join(("%s=%s" % (k,v)) for (k,v) in sorted(env.items()))
-	task.env.HALIDE_ARGS = args
-
-	try:
-		self.compiled_tasks.append(task)
-	except AttributeError:
-		self.compiled_tasks = [task]
-	self.source = []
-
-def configure(conf):
-	if Options.options.halide_root is None:
-		conf.check_cfg(package='Halide', args='--cflags --libs')
-	else:
-		halide_root = Options.options.halide_root
-		conf.env.INCLUDES_HALIDE = [ os.path.join(halide_root, "include") ]
-		conf.env.LIBPATH_HALIDE = [ os.path.join(halide_root, "lib") ]
-		conf.env.LIB_HALIDE = ["Halide"]
-
-		# You might want to add this, while upstream doesn't fix it
-		#conf.env.LIB_HALIDE += ['ncurses', 'dl', 'pthread']
-
-def options(opt):
-	opt.add_option('--halide-root',
-	 help="path to Halide include and lib files",
-	)
-
diff --git a/waflib/extras/javatest.py b/waflib/extras/javatest.py
deleted file mode 100755
index 979b8d8..0000000
--- a/waflib/extras/javatest.py
+++ /dev/null
@@ -1,118 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Federico Pellegrin, 2017 (fedepell)
-
-"""
-Provides Java Unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
-task via the **javatest** feature.
-
-This gives the possibility to run unit test and have them integrated into the
-standard waf unit test environment. It has been tested with TestNG and JUnit
-but should be easily expandable to other frameworks given the flexibility of
-ut_str provided by the standard waf unit test environment.
-
-Example usage:
-
-def options(opt):
-	opt.load('java waf_unit_test javatest')
-
-def configure(conf):
-	conf.load('java javatest')
-
-def build(bld):
-	
-	[ ... mainprog is built here ... ]
-
-	bld(features = 'javac javatest',
-		srcdir     = 'test/', 
-		outdir     = 'test', 
-		sourcepath = ['test'],
-		classpath  = [ 'src' ], 
-		basedir    = 'test', 
-		use = ['JAVATEST', 'mainprog'], # mainprog is the program being tested in src/
-		ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}',
-		jtest_source = bld.path.ant_glob('test/*.xml'),
-	)
-
-
-At command line the CLASSPATH where to find the testing environment and the
-test runner (default TestNG) that will then be seen in the environment as
-CLASSPATH_JAVATEST (then used for use) and JTRUNNER and can be used for
-dependencies and ut_str generation.
-
-Example configure for TestNG:
-	waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar --jtrunner=org.testng.TestNG
-		 or as default runner is TestNG:
-	waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar
-
-Example configure for JUnit:
-	waf configure --jtpath=/tmp/junit.jar --jtrunner=org.junit.runner.JUnitCore
-
-The runner class presence on the system is checked for at configuration stage.
-
-"""
-
-import os
-from waflib import Task, TaskGen, Options
-
-@TaskGen.feature('javatest')
-@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath')
-def make_javatest(self):
-	"""
-	Creates a ``utest`` task with a populated environment for Java Unit test execution
-
-	"""
-	tsk = self.create_task('utest')
-	tsk.set_run_after(self.javac_task)
-
-	# Put test input files as waf_unit_test relies on that for some prints and log generation
-	# If jtest_source is there, this is specially useful for passing XML for TestNG
-	# that contain test specification, use that as inputs, otherwise test sources
-	if getattr(self, 'jtest_source', None):
-		tsk.inputs = self.to_nodes(self.jtest_source)
-	else:
-		if self.javac_task.srcdir[0].exists():
-			tsk.inputs = self.javac_task.srcdir[0].ant_glob('**/*.java', remove=False)
-
-	if getattr(self, 'ut_str', None):
-		self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
-		tsk.vars = lst + tsk.vars
-
-	if getattr(self, 'ut_cwd', None):
-		if isinstance(self.ut_cwd, str):
-			# we want a Node instance
-			if os.path.isabs(self.ut_cwd):
-				self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
-			else:
-				self.ut_cwd = self.path.make_node(self.ut_cwd)
-	else:
-		self.ut_cwd = self.bld.bldnode
-
-	# Get parent CLASSPATH and add output dir of test, we run from wscript dir
-	# We have to change it from list to the standard java -cp format (: separated)
-	tsk.env.CLASSPATH = ':'.join(self.env.CLASSPATH) + ':' + self.outdir.abspath()
-
-	if not self.ut_cwd.exists():
-		self.ut_cwd.mkdir()
-
-	if not hasattr(self, 'ut_env'):
-		self.ut_env = dict(os.environ)
-
-def configure(ctx):
-	cp = ctx.env.CLASSPATH or '.'
-	if getattr(Options.options, 'jtpath', None):
-		ctx.env.CLASSPATH_JAVATEST = getattr(Options.options, 'jtpath').split(':')
-		cp += ':' + getattr(Options.options, 'jtpath')
-
-	if getattr(Options.options, 'jtrunner', None):
-		ctx.env.JTRUNNER = getattr(Options.options, 'jtrunner')
-
-	if ctx.check_java_class(ctx.env.JTRUNNER, with_classpath=cp):
-		ctx.fatal('Could not run test class %r' % ctx.env.JTRUNNER)
-
-def options(opt):
-	opt.add_option('--jtpath', action='store', default='', dest='jtpath',
-		help='Path to jar(s) needed for javatest execution, colon separated, if not in the system CLASSPATH')
-	opt.add_option('--jtrunner', action='store', default='org.testng.TestNG', dest='jtrunner',
-		help='Class to run javatest test [default: org.testng.TestNG]')
-
diff --git a/waflib/extras/kde4.py b/waflib/extras/kde4.py
deleted file mode 100644
index aed9bfb..0000000
--- a/waflib/extras/kde4.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
-
-"""
-Support for the KDE4 libraries and msgfmt
-"""
-
-import os, re
-from waflib import Task, Utils
-from waflib.TaskGen import feature
-
-@feature('msgfmt')
-def apply_msgfmt(self):
-	"""
-	Process all languages to create .mo files and to install them::
-
-		def build(bld):
-			bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}')
-	"""
-	for lang in self.to_list(self.langs):
-		node = self.path.find_resource(lang+'.po')
-		task = self.create_task('msgfmt', node, node.change_ext('.mo'))
-
-		langname = lang.split('/')
-		langname = langname[-1]
-
-		inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}')
-
-		self.add_install_as(
-			inst_to = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
-			inst_from = task.outputs[0],
-			chmod = getattr(self, 'chmod', Utils.O644))
-
-class msgfmt(Task.Task):
-	"""
-	Transform .po files into .mo files
-	"""
-	color   = 'BLUE'
-	run_str = '${MSGFMT} ${SRC} -o ${TGT}'
-
-def configure(self):
-	"""
-	Detect kde4-config and set various variables for the *use* system::
-
-		def options(opt):
-			opt.load('compiler_cxx kde4')
-		def configure(conf):
-			conf.load('compiler_cxx kde4')
-		def build(bld):
-			bld.program(source='main.c', target='app', use='KDECORE KIO KHTML')
-	"""
-	kdeconfig = self.find_program('kde4-config')
-	prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip()
-	fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
-	try:
-		os.stat(fname)
-	except OSError:
-		fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
-		try:
-			os.stat(fname)
-		except OSError:
-			self.fatal('could not open %s' % fname)
-
-	try:
-		txt = Utils.readf(fname)
-	except EnvironmentError:
-		self.fatal('could not read %s' % fname)
-
-	txt = txt.replace('\\\n', '\n')
-	fu = re.compile('#(.*)\n')
-	txt = fu.sub('', txt)
-
-	setregexp = re.compile(r'([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
-	found = setregexp.findall(txt)
-
-	for (_, key, val) in found:
-		#print key, val
-		self.env[key] = val
-
-	# well well, i could just write an interpreter for cmake files
-	self.env['LIB_KDECORE']= ['kdecore']
-	self.env['LIB_KDEUI']  = ['kdeui']
-	self.env['LIB_KIO']    = ['kio']
-	self.env['LIB_KHTML']  = ['khtml']
-	self.env['LIB_KPARTS'] = ['kparts']
-
-	self.env['LIBPATH_KDECORE']  = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR]
-	self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']]
-	self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE'])
-
-	self.find_program('msgfmt', var='MSGFMT')
-
diff --git a/waflib/extras/local_rpath.py b/waflib/extras/local_rpath.py
deleted file mode 100644
index b2507e1..0000000
--- a/waflib/extras/local_rpath.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-from waflib.TaskGen import after_method, feature
-
-@after_method('propagate_uselib_vars')
-@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib')
-def add_rpath_stuff(self):
-	all = self.to_list(getattr(self, 'use', []))
-	while all:
-		name = all.pop()
-		try:
-			tg = self.bld.get_tgen_by_name(name)
-		except:
-			continue
-		self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
-		all.extend(self.to_list(getattr(tg, 'use', [])))
-
diff --git a/waflib/extras/lv2.py b/waflib/extras/lv2.py
deleted file mode 100644
index ffcb2e7..0000000
--- a/waflib/extras/lv2.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import os
-import sys
-
-from waflib import Logs
-from waflib import Options
-
-def options(opt):
-    conf_opts = opt.get_option_group('Configuration options')
-    conf_opts.add_option('--lv2-user', action='store_true', default=False, dest='lv2_user',
-                         help='install LV2 bundles to user location')
-    conf_opts.add_option('--lv2dir', type='string',
-                         help='LV2 bundles [Default: LIBDIR/lv2]')
-
-def register_lv2_path(conf, path):
-    """Return the default LV2_PATH to use for this system"""
-    if 'LV2_PATH' not in conf.run_env and 'LV2_PATH' not in os.environ:
-        conf.run_env['LV2_PATH'] = [conf.env['LV2DIR']]
-
-    conf.run_env.append_unique('LV2_PATH', path)
-
-def default_lv2_path(conf):
-    """Return the default LV2_PATH for the build target as a list"""
-    if conf.env.DEST_OS == 'darwin':
-        return ['~/Library/Audio/Plug-Ins/LV2',
-                '~/.lv2',
-                '/usr/local/lib/lv2',
-                '/usr/lib/lv2',
-                '/Library/Audio/Plug-Ins/LV2']
-    elif conf.env.DEST_OS == 'haiku':
-        return ['~/.lv2',
-                '/boot/common/add-ons/lv2']
-    elif conf.env.DEST_OS == 'win32':
-        return ['%APPDATA%\\\\LV2',
-                '%COMMONPROGRAMFILES%\\\\LV2']
-    else:
-        libdirname = os.path.basename(conf.env.LIBDIR)
-        return ['~/.lv2',
-                '/usr/%s/lv2' % libdirname,
-                '/usr/local/%s/lv2' % libdirname]
-    
-def configure(conf):
-    def env_path(parent_dir_var, name):
-        parent = os.getenv(parent_dir_var)
-        if parent:
-            return os.path.join(parent, name)
-        else:
-            Logs.warn('Environment variable %s unset, using LIBDIR\n' % parent_dir_var)
-            return os.path.join(conf.env['LIBDIR'], name)
-
-    def normpath(path):
-        if sys.platform == 'win32':
-            return os.path.normpath(path).replace('\\', '/')
-        else:
-            return os.path.normpath(path)
-
-    if Options.options.lv2dir:
-        conf.env['LV2DIR'] = Options.options.lv2dir
-    elif Options.options.lv2_user:
-        if conf.env.DEST_OS == 'darwin':
-            conf.env['LV2DIR'] = env_path('HOME', 'Library/Audio/Plug-Ins/LV2')
-        elif conf.env.DEST_OS == 'win32':
-            conf.env['LV2DIR'] = env_path('APPDATA', 'LV2')
-        else:
-            conf.env['LV2DIR'] = env_path('HOME', '.lv2')
-    else:
-        if conf.env.DEST_OS == 'darwin':
-            conf.env['LV2DIR'] = '/Library/Audio/Plug-Ins/LV2'
-        elif conf.env.DEST_OS == 'win32':
-            conf.env['LV2DIR'] = env_path('COMMONPROGRAMFILES', 'LV2')
-        else:
-            conf.env['LV2DIR'] = os.path.join(conf.env['LIBDIR'], 'lv2')
-
-    # Add default LV2_PATH to runtime environment for tests that use plugins
-    if 'LV2_PATH' not in os.environ:
-        conf.run_env['LV2_PATH'] = default_lv2_path(conf)
diff --git a/waflib/extras/make.py b/waflib/extras/make.py
deleted file mode 100644
index 933d9ca..0000000
--- a/waflib/extras/make.py
+++ /dev/null
@@ -1,142 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-"""
-A make-like way of executing the build, following the relationships between inputs/outputs
-
-This algorithm will lead to slower builds, will not be as flexible as "waf build", but
-it might be useful for building data files (?)
-
-It is likely to break in the following cases:
-- files are created dynamically (no inputs or outputs)
-- headers
-- building two files from different groups
-"""
-
-import re
-from waflib import Options, Task
-from waflib.Build import BuildContext
-
-class MakeContext(BuildContext):
-	'''executes tasks in a step-by-step manner, following dependencies between inputs/outputs'''
-	cmd = 'make'
-	fun = 'build'
-
-	def __init__(self, **kw):
-		super(MakeContext, self).__init__(**kw)
-		self.files = Options.options.files
-
-	def get_build_iterator(self):
-		if not self.files:
-			while 1:
-				yield super(MakeContext, self).get_build_iterator()
-
-		for g in self.groups:
-			for tg in g:
-				try:
-					f = tg.post
-				except AttributeError:
-					pass
-				else:
-					f()
-
-			provides = {}
-			uses = {}
-			all_tasks = []
-			tasks = []
-			for pat in self.files.split(','):
-				matcher = self.get_matcher(pat)
-				for tg in g:
-					if isinstance(tg, Task.Task):
-						lst = [tg]
-					else:
-						lst = tg.tasks
-					for tsk in lst:
-						all_tasks.append(tsk)
-
-						do_exec = False
-						for node in tsk.inputs:
-							try:
-								uses[node].append(tsk)
-							except:
-								uses[node] = [tsk]
-
-							if matcher(node, output=False):
-								do_exec = True
-								break
-
-						for node in tsk.outputs:
-							try:
-								provides[node].append(tsk)
-							except:
-								provides[node] = [tsk]
-
-							if matcher(node, output=True):
-								do_exec = True
-								break
-						if do_exec:
-							tasks.append(tsk)
-
-			# so we have the tasks that we need to process, the list of all tasks,
-			# the map of the tasks providing nodes, and the map of tasks using nodes
-
-			if not tasks:
-				# if there are no tasks matching, return everything in the current group
-				result = all_tasks
-			else:
-				# this is like a big filter...
-				result = set()
-				seen = set()
-				cur = set(tasks)
-				while cur:
-					result |= cur
-					tosee = set()
-					for tsk in cur:
-						for node in tsk.inputs:
-							if node in seen:
-								continue
-							seen.add(node)
-							tosee |= set(provides.get(node, []))
-					cur = tosee
-				result = list(result)
-
-			Task.set_file_constraints(result)
-			Task.set_precedence_constraints(result)
-			yield result
-
-		while 1:
-			yield []
-
-	def get_matcher(self, pat):
-		# this returns a function
-		inn = True
-		out = True
-		if pat.startswith('in:'):
-			out = False
-			pat = pat.replace('in:', '')
-		elif pat.startswith('out:'):
-			inn = False
-			pat = pat.replace('out:', '')
-
-		anode = self.root.find_node(pat)
-		pattern = None
-		if not anode:
-			if not pat.startswith('^'):
-				pat = '^.+?%s' % pat
-			if not pat.endswith('$'):
-				pat = '%s$' % pat
-			pattern = re.compile(pat)
-
-		def match(node, output):
-			if output and not out:
-				return False
-			if not output and not inn:
-				return False
-
-			if anode:
-				return anode == node
-			else:
-				return pattern.match(node.abspath())
-		return match
-
diff --git a/waflib/extras/midl.py b/waflib/extras/midl.py
deleted file mode 100644
index 43e6cf9..0000000
--- a/waflib/extras/midl.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env python
-# Issue 1185 ultrix gmail com
-
-"""
-Microsoft Interface Definition Language support.  Given ComObject.idl, this tool
-will generate ComObject.tlb ComObject_i.h ComObject_i.c ComObject_p.c and dlldata.c
-
-To declare targets using midl::
-
-	def configure(conf):
-		conf.load('msvc')
-		conf.load('midl')
-
-	def build(bld):
-		bld(
-			features='c cshlib',
-			# Note: ComObject_i.c is generated from ComObject.idl
-			source = 'main.c ComObject.idl ComObject_i.c',
-			target = 'ComObject.dll')
-"""
-
-from waflib import Task, Utils
-from waflib.TaskGen import feature, before_method
-import os
-
-def configure(conf):
-	conf.find_program(['midl'], var='MIDL')
-
-	conf.env.MIDLFLAGS = [
-		'/nologo',
-		'/D',
-		'_DEBUG',
-		'/W1',
-		'/char',
-		'signed',
-		'/Oicf',
-	]
-
-@feature('c', 'cxx')
-@before_method('process_source')
-def idl_file(self):
-	# Do this before process_source so that the generated header can be resolved
-	# when scanning source dependencies.
-	idl_nodes = []
-	src_nodes = []
-	for node in Utils.to_list(self.source):
-		if str(node).endswith('.idl'):
-			idl_nodes.append(node)
-		else:
-			src_nodes.append(node)
-
-	for node in self.to_nodes(idl_nodes):
-		t = node.change_ext('.tlb')
-		h = node.change_ext('_i.h')
-		c = node.change_ext('_i.c')
-		p = node.change_ext('_p.c')
-		d = node.parent.find_or_declare('dlldata.c')
-		self.create_task('midl', node, [t, h, c, p, d])
-
-	self.source = src_nodes
-
-class midl(Task.Task):
-	"""
-	Compile idl files
-	"""
-	color   = 'YELLOW'
-	run_str = '${MIDL} ${MIDLFLAGS} ${CPPPATH_ST:INCLUDES} /tlb ${TGT[0].bldpath()} /header ${TGT[1].bldpath()} /iid ${TGT[2].bldpath()} /proxy ${TGT[3].bldpath()} /dlldata ${TGT[4].bldpath()} ${SRC}'
-	before  = ['winrc']
-
diff --git a/waflib/extras/msvcdeps.py b/waflib/extras/msvcdeps.py
deleted file mode 100644
index fc1ecd4..0000000
--- a/waflib/extras/msvcdeps.py
+++ /dev/null
@@ -1,256 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Copyright Garmin International or its subsidiaries, 2012-2013
-
-'''
-Off-load dependency scanning from Python code to MSVC compiler
-
-This tool is safe to load in any environment; it will only activate the
-MSVC exploits when it finds that a particular taskgen uses MSVC to
-compile.
-
-Empirical testing shows about a 10% execution time savings from using
-this tool as compared to c_preproc.
-
-The technique of gutting scan() and pushing the dependency calculation
-down to post_run() is cribbed from gccdeps.py.
-
-This affects the cxx class, so make sure to load Qt5 after this tool.
-
-Usage::
-
-	def options(opt):
-		opt.load('compiler_cxx')
-	def configure(conf):
-		conf.load('compiler_cxx msvcdeps')
-'''
-
-import os, sys, tempfile, threading
-
-from waflib import Context, Errors, Logs, Task, Utils
-from waflib.Tools import c_preproc, c, cxx, msvc
-from waflib.TaskGen import feature, before_method
-
-lock = threading.Lock()
-nodes = {} # Cache the path -> Node lookup
-
-PREPROCESSOR_FLAG = '/showIncludes'
-INCLUDE_PATTERN = 'Note: including file:'
-
-# Extensible by outside tools
-supported_compilers = ['msvc']
-
-@feature('c', 'cxx')
-@before_method('process_source')
-def apply_msvcdeps_flags(taskgen):
-	if taskgen.env.CC_NAME not in supported_compilers:
-		return
-
-	for flag in ('CFLAGS', 'CXXFLAGS'):
-		if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
-			taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
-
-	# Figure out what casing conventions the user's shell used when
-	# launching Waf
-	(drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath())
-	taskgen.msvcdeps_drive_lowercase = drive == drive.lower()
-
-def path_to_node(base_node, path, cached_nodes):
-	# Take the base node and the path and return a node
-	# Results are cached because searching the node tree is expensive
-	# The following code is executed by threads, it is not safe, so a lock is needed...
-	if getattr(path, '__hash__'):
-		node_lookup_key = (base_node, path)
-	else:
-		# Not hashable, assume it is a list and join into a string
-		node_lookup_key = (base_node, os.path.sep.join(path))
-	try:
-		lock.acquire()
-		node = cached_nodes[node_lookup_key]
-	except KeyError:
-		node = base_node.find_resource(path)
-		cached_nodes[node_lookup_key] = node
-	finally:
-		lock.release()
-	return node
-
-def post_run(self):
-	if self.env.CC_NAME not in supported_compilers:
-		return super(self.derived_msvcdeps, self).post_run()
-
-	# TODO this is unlikely to work with netcache
-	if getattr(self, 'cached', None):
-		return Task.Task.post_run(self)
-
-	bld = self.generator.bld
-	unresolved_names = []
-	resolved_nodes = []
-
-	lowercase = self.generator.msvcdeps_drive_lowercase
-	correct_case_path = bld.path.abspath()
-	correct_case_path_len = len(correct_case_path)
-	correct_case_path_norm = os.path.normcase(correct_case_path)
-
-	# Dynamically bind to the cache
-	try:
-		cached_nodes = bld.cached_nodes
-	except AttributeError:
-		cached_nodes = bld.cached_nodes = {}
-
-	for path in self.msvcdeps_paths:
-		node = None
-		if os.path.isabs(path):
-			# Force drive letter to match conventions of main source tree
-			drive, tail = os.path.splitdrive(path)
-
-			if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm:
-				# Path is in the sandbox, force it to be correct.  MSVC sometimes returns a lowercase path.
-				path = correct_case_path + path[correct_case_path_len:]
-			else:
-				# Check the drive letter
-				if lowercase and (drive != drive.lower()):
-					path = drive.lower() + tail
-				elif (not lowercase) and (drive != drive.upper()):
-					path = drive.upper() + tail
-			node = path_to_node(bld.root, path, cached_nodes)
-		else:
-			base_node = bld.bldnode
-			# when calling find_resource, make sure the path does not begin by '..'
-			path = [k for k in Utils.split_path(path) if k and k != '.']
-			while path[0] == '..':
-				path = path[1:]
-				base_node = base_node.parent
-
-			node = path_to_node(base_node, path, cached_nodes)
-
-		if not node:
-			raise ValueError('could not find %r for %r' % (path, self))
-		else:
-			if not c_preproc.go_absolute:
-				if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
-					# System library
-					Logs.debug('msvcdeps: Ignoring system include %r', node)
-					continue
-
-			if id(node) == id(self.inputs[0]):
-				# Self-dependency
-				continue
-
-			resolved_nodes.append(node)
-
-	bld.node_deps[self.uid()] = resolved_nodes
-	bld.raw_deps[self.uid()] = unresolved_names
-
-	try:
-		del self.cache_sig
-	except AttributeError:
-		pass
-
-	Task.Task.post_run(self)
-
-def scan(self):
-	if self.env.CC_NAME not in supported_compilers:
-		return super(self.derived_msvcdeps, self).scan()
-
-	resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
-	unresolved_names = []
-	return (resolved_nodes, unresolved_names)
-
-def sig_implicit_deps(self):
-	if self.env.CC_NAME not in supported_compilers:
-		return super(self.derived_msvcdeps, self).sig_implicit_deps()
-
-	try:
-		return Task.Task.sig_implicit_deps(self)
-	except Errors.WafError:
-		return Utils.SIG_NIL
-
-def exec_command(self, cmd, **kw):
-	if self.env.CC_NAME not in supported_compilers:
-		return super(self.derived_msvcdeps, self).exec_command(cmd, **kw)
-
-	if not 'cwd' in kw:
-		kw['cwd'] = self.get_cwd()
-
-	if self.env.PATH:
-		env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
-		env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
-
-	# The Visual Studio IDE adds an environment variable that causes
-	# the MS compiler to send its textual output directly to the
-	# debugging window rather than normal stdout/stderr.
-	#
-	# This is unrecoverably bad for this tool because it will cause
-	# all the dependency scanning to see an empty stdout stream and
-	# assume that the file being compiled uses no headers.
-	#
-	# See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
-	#
-	# Attempting to repair the situation by deleting the offending
-	# envvar at this point in tool execution will not be good enough--
-	# its presence poisons the 'waf configure' step earlier. We just
-	# want to put a sanity check here in order to help developers
-	# quickly diagnose the issue if an otherwise-good Waf tree
-	# is then executed inside the MSVS IDE.
-	assert 'VS_UNICODE_OUTPUT' not in kw['env']
-
-	cmd, args = self.split_argfile(cmd)
-	try:
-		(fd, tmp) = tempfile.mkstemp()
-		os.write(fd, '\r\n'.join(args).encode())
-		os.close(fd)
-
-		self.msvcdeps_paths = []
-		kw['env'] = kw.get('env', os.environ.copy())
-		kw['cwd'] = kw.get('cwd', os.getcwd())
-		kw['quiet'] = Context.STDOUT
-		kw['output'] = Context.STDOUT
-
-		out = []
-		if Logs.verbose:
-			Logs.debug('argfile: @%r -> %r', tmp, args)
-		try:
-			raw_out = self.generator.bld.cmd_and_log(cmd + ['@' + tmp], **kw)
-			ret = 0
-		except Errors.WafError as e:
-			raw_out = e.stdout
-			ret = e.returncode
-
-		for line in raw_out.splitlines():
-			if line.startswith(INCLUDE_PATTERN):
-				inc_path = line[len(INCLUDE_PATTERN):].strip()
-				Logs.debug('msvcdeps: Regex matched %s', inc_path)
-				self.msvcdeps_paths.append(inc_path)
-			else:
-				out.append(line)
-
-		# Pipe through the remaining stdout content (not related to /showIncludes)
-		if self.generator.bld.logger:
-			self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
-		else:
-			sys.stdout.write(os.linesep.join(out) + os.linesep)
-
-		return ret
-	finally:
-		try:
-			os.remove(tmp)
-		except OSError:
-			# anti-virus and indexers can keep files open -_-
-			pass
-
-
-def wrap_compiled_task(classname):
-	derived_class = type(classname, (Task.classes[classname],), {})
-	derived_class.derived_msvcdeps = derived_class
-	derived_class.post_run = post_run
-	derived_class.scan = scan
-	derived_class.sig_implicit_deps = sig_implicit_deps
-	derived_class.exec_command = exec_command
-
-for k in ('c', 'cxx'):
-	if k in Task.classes:
-		wrap_compiled_task(k)
-
-def options(opt):
-	raise ValueError('Do not load msvcdeps options')
-
diff --git a/waflib/extras/msvs.py b/waflib/extras/msvs.py
deleted file mode 100644
index 8aa2db0..0000000
--- a/waflib/extras/msvs.py
+++ /dev/null
@@ -1,1048 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Avalanche Studios 2009-2011
-# Thomas Nagy 2011
-
-"""
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
-1. Redistributions of source code must retain the above copyright
-   notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
-   notice, this list of conditions and the following disclaimer in the
-   documentation and/or other materials provided with the distribution.
-
-3. The name of the author may not be used to endorse or promote products
-   derived from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
-IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
-INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
-IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
-"""
-
-"""
-To add this tool to your project:
-def options(conf):
-	opt.load('msvs')
-
-It can be a good idea to add the sync_exec tool too.
-
-To generate solution files:
-$ waf configure msvs
-
-To customize the outputs, provide subclasses in your wscript files::
-
-	from waflib.extras import msvs
-	class vsnode_target(msvs.vsnode_target):
-		def get_build_command(self, props):
-			# likely to be required
-			return "waf.bat build"
-		def collect_source(self):
-			# likely to be required
-			...
-	class msvs_bar(msvs.msvs_generator):
-		def init(self):
-			msvs.msvs_generator.init(self)
-			self.vsnode_target = vsnode_target
-
-The msvs class re-uses the same build() function for reading the targets (task generators),
-you may therefore specify msvs settings on the context object::
-
-	def build(bld):
-		bld.solution_name = 'foo.sln'
-		bld.waf_command = 'waf.bat'
-		bld.projects_dir = bld.srcnode.make_node('.depproj')
-		bld.projects_dir.mkdir()
-
-For visual studio 2008, the command is called 'msvs2008', and the classes
-such as vsnode_target are wrapped by a decorator class 'wrap_2008' to
-provide special functionality.
-
-To customize platform toolsets, pass additional parameters, for example::
-
-	class msvs_2013(msvs.msvs_generator):
-		cmd = 'msvs2013'
-		numver = '13.00'
-		vsver = '2013'
-		platform_toolset_ver = 'v120'
-
-ASSUMPTIONS:
-* a project can be either a directory or a target, vcxproj files are written only for targets that have source files
-* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
-"""
-
-import os, re, sys
-import uuid # requires python 2.5
-from waflib.Build import BuildContext
-from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
-
-HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
-
-PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
-<Project DefaultTargets="Build" ToolsVersion="4.0"
-	xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-
-	<ItemGroup Label="ProjectConfigurations">
-		${for b in project.build_properties}
-		<ProjectConfiguration Include="${b.configuration}|${b.platform}">
-			<Configuration>${b.configuration}</Configuration>
-			<Platform>${b.platform}</Platform>
-		</ProjectConfiguration>
-		${endfor}
-	</ItemGroup>
-
-	<PropertyGroup Label="Globals">
-		<ProjectGuid>{${project.uuid}}</ProjectGuid>
-		<Keyword>MakeFileProj</Keyword>
-		<ProjectName>${project.name}</ProjectName>
-	</PropertyGroup>
-	<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
-
-	${for b in project.build_properties}
-	<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'" Label="Configuration">
-		<ConfigurationType>Makefile</ConfigurationType>
-		<OutDir>${b.outdir}</OutDir>
-		<PlatformToolset>${project.platform_toolset_ver}</PlatformToolset>
-	</PropertyGroup>
-	${endfor}
-
-	<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
-	<ImportGroup Label="ExtensionSettings">
-	</ImportGroup>
-
-	${for b in project.build_properties}
-	<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
-		<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
-	</ImportGroup>
-	${endfor}
-
-	${for b in project.build_properties}
-	<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
-		<NMakeBuildCommandLine>${xml:project.get_build_command(b)}</NMakeBuildCommandLine>
-		<NMakeReBuildCommandLine>${xml:project.get_rebuild_command(b)}</NMakeReBuildCommandLine>
-		<NMakeCleanCommandLine>${xml:project.get_clean_command(b)}</NMakeCleanCommandLine>
-		<NMakeIncludeSearchPath>${xml:b.includes_search_path}</NMakeIncludeSearchPath>
-		<NMakePreprocessorDefinitions>${xml:b.preprocessor_definitions};$(NMakePreprocessorDefinitions)</NMakePreprocessorDefinitions>
-		<IncludePath>${xml:b.includes_search_path}</IncludePath>
-		<ExecutablePath>$(ExecutablePath)</ExecutablePath>
-
-		${if getattr(b, 'output_file', None)}
-		<NMakeOutput>${xml:b.output_file}</NMakeOutput>
-		${endif}
-		${if getattr(b, 'deploy_dir', None)}
-		<RemoteRoot>${xml:b.deploy_dir}</RemoteRoot>
-		${endif}
-	</PropertyGroup>
-	${endfor}
-
-	${for b in project.build_properties}
-		${if getattr(b, 'deploy_dir', None)}
-	<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
-		<Deploy>
-			<DeploymentType>CopyToHardDrive</DeploymentType>
-		</Deploy>
-	</ItemDefinitionGroup>
-		${endif}
-	${endfor}
-
-	<ItemGroup>
-		${for x in project.source}
-		<${project.get_key(x)} Include='${x.win32path()}' />
-		${endfor}
-	</ItemGroup>
-	<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
-	<ImportGroup Label="ExtensionTargets">
-	</ImportGroup>
-</Project>
-'''
-
-FILTER_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
-<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-	<ItemGroup>
-		${for x in project.source}
-			<${project.get_key(x)} Include="${x.win32path()}">
-				<Filter>${project.get_filter_name(x.parent)}</Filter>
-			</${project.get_key(x)}>
-		${endfor}
-	</ItemGroup>
-	<ItemGroup>
-		${for x in project.dirs()}
-			<Filter Include="${project.get_filter_name(x)}">
-				<UniqueIdentifier>{${project.make_uuid(x.win32path())}}</UniqueIdentifier>
-			</Filter>
-		${endfor}
-	</ItemGroup>
-</Project>
-'''
-
-PROJECT_2008_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
-<VisualStudioProject ProjectType="Visual C++" Version="9,00"
-	Name="${xml: project.name}" ProjectGUID="{${project.uuid}}"
-	Keyword="MakeFileProj"
-	TargetFrameworkVersion="196613">
-	<Platforms>
-		${if project.build_properties}
-		${for b in project.build_properties}
-		   <Platform Name="${xml: b.platform}" />
-		${endfor}
-		${else}
-		   <Platform Name="Win32" />
-		${endif}
-	</Platforms>
-	<ToolFiles>
-	</ToolFiles>
-	<Configurations>
-		${if project.build_properties}
-		${for b in project.build_properties}
-		<Configuration
-			Name="${xml: b.configuration}|${xml: b.platform}"
-			IntermediateDirectory="$ConfigurationName"
-			OutputDirectory="${xml: b.outdir}"
-			ConfigurationType="0">
-			<Tool
-				Name="VCNMakeTool"
-				BuildCommandLine="${xml: project.get_build_command(b)}"
-				ReBuildCommandLine="${xml: project.get_rebuild_command(b)}"
-				CleanCommandLine="${xml: project.get_clean_command(b)}"
-				${if getattr(b, 'output_file', None)}
-				Output="${xml: b.output_file}"
-				${endif}
-				PreprocessorDefinitions="${xml: b.preprocessor_definitions}"
-				IncludeSearchPath="${xml: b.includes_search_path}"
-				ForcedIncludes=""
-				ForcedUsingAssemblies=""
-				AssemblySearchPath=""
-				CompileAsManaged=""
-			/>
-		</Configuration>
-		${endfor}
-		${else}
-			<Configuration Name="Release|Win32" >
-		</Configuration>
-		${endif}
-	</Configurations>
-	<References>
-	</References>
-	<Files>
-${project.display_filter()}
-	</Files>
-</VisualStudioProject>
-'''
-
-SOLUTION_TEMPLATE = '''Microsoft Visual Studio Solution File, Format Version ${project.numver}
-# Visual Studio ${project.vsver}
-${for p in project.all_projects}
-Project("{${p.ptype()}}") = "${p.name}", "${p.title}", "{${p.uuid}}"
-EndProject${endfor}
-Global
-	GlobalSection(SolutionConfigurationPlatforms) = preSolution
-		${if project.all_projects}
-		${for (configuration, platform) in project.all_projects[0].ctx.project_configurations()}
-		${configuration}|${platform} = ${configuration}|${platform}
-		${endfor}
-		${endif}
-	EndGlobalSection
-	GlobalSection(ProjectConfigurationPlatforms) = postSolution
-		${for p in project.all_projects}
-			${if hasattr(p, 'source')}
-			${for b in p.build_properties}
-		{${p.uuid}}.${b.configuration}|${b.platform}.ActiveCfg = ${b.configuration}|${b.platform}
-			${if getattr(p, 'is_active', None)}
-		{${p.uuid}}.${b.configuration}|${b.platform}.Build.0 = ${b.configuration}|${b.platform}
-			${endif}
-			${if getattr(p, 'is_deploy', None)}
-		{${p.uuid}}.${b.configuration}|${b.platform}.Deploy.0 = ${b.configuration}|${b.platform}
-			${endif}
-			${endfor}
-			${endif}
-		${endfor}
-	EndGlobalSection
-	GlobalSection(SolutionProperties) = preSolution
-		HideSolutionNode = FALSE
-	EndGlobalSection
-	GlobalSection(NestedProjects) = preSolution
-	${for p in project.all_projects}
-		${if p.parent}
-		{${p.uuid}} = {${p.parent.uuid}}
-		${endif}
-	${endfor}
-	EndGlobalSection
-EndGlobal
-'''
-
-COMPILE_TEMPLATE = '''def f(project):
-	lst = []
-	def xml_escape(value):
-		return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
-
-	%s
-
-	#f = open('cmd.txt', 'w')
-	#f.write(str(lst))
-	#f.close()
-	return ''.join(lst)
-'''
-reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
-def compile_template(line):
-	"""
-	Compile a template expression into a python function (like jsps, but way shorter)
-	"""
-	extr = []
-	def repl(match):
-		g = match.group
-		if g('dollar'):
-			return "$"
-		elif g('backslash'):
-			return "\\"
-		elif g('subst'):
-			extr.append(g('code'))
-			return "<<|@|>>"
-		return None
-
-	line2 = reg_act.sub(repl, line)
-	params = line2.split('<<|@|>>')
-	assert(extr)
-
-
-	indent = 0
-	buf = []
-	app = buf.append
-
-	def app(txt):
-		buf.append(indent * '\t' + txt)
-
-	for x in range(len(extr)):
-		if params[x]:
-			app("lst.append(%r)" % params[x])
-
-		f = extr[x]
-		if f.startswith(('if', 'for')):
-			app(f + ':')
-			indent += 1
-		elif f.startswith('py:'):
-			app(f[3:])
-		elif f.startswith(('endif', 'endfor')):
-			indent -= 1
-		elif f.startswith(('else', 'elif')):
-			indent -= 1
-			app(f + ':')
-			indent += 1
-		elif f.startswith('xml:'):
-			app('lst.append(xml_escape(%s))' % f[4:])
-		else:
-			#app('lst.append((%s) or "cannot find %s")' % (f, f))
-			app('lst.append(%s)' % f)
-
-	if extr:
-		if params[-1]:
-			app("lst.append(%r)" % params[-1])
-
-	fun = COMPILE_TEMPLATE % "\n\t".join(buf)
-	#print(fun)
-	return Task.funex(fun)
-
-
-re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
-def rm_blank_lines(txt):
-	txt = re_blank.sub('\r\n', txt)
-	return txt
-
-BOM = '\xef\xbb\xbf'
-try:
-	BOM = bytes(BOM, 'latin-1') # python 3
-except TypeError:
-	pass
-
-def stealth_write(self, data, flags='wb'):
-	try:
-		unicode
-	except NameError:
-		data = data.encode('utf-8') # python 3
-	else:
-		data = data.decode(sys.getfilesystemencoding(), 'replace')
-		data = data.encode('utf-8')
-
-	if self.name.endswith(('.vcproj', '.vcxproj')):
-		data = BOM + data
-
-	try:
-		txt = self.read(flags='rb')
-		if txt != data:
-			raise ValueError('must write')
-	except (IOError, ValueError):
-		self.write(data, flags=flags)
-	else:
-		Logs.debug('msvs: skipping %s', self.win32path())
-Node.Node.stealth_write = stealth_write
-
-re_win32 = re.compile(r'^([/\\]cygdrive)?[/\\]([a-z])([^a-z0-9_-].*)', re.I)
-def win32path(self):
-	p = self.abspath()
-	m = re_win32.match(p)
-	if m:
-		return "%s:%s" % (m.group(2).upper(), m.group(3))
-	return p
-Node.Node.win32path = win32path
-
-re_quote = re.compile("[^a-zA-Z0-9-]")
-def quote(s):
-	return re_quote.sub("_", s)
-
-def xml_escape(value):
-	return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
-
-def make_uuid(v, prefix = None):
-	"""
-	simple utility function
-	"""
-	if isinstance(v, dict):
-		keys = list(v.keys())
-		keys.sort()
-		tmp = str([(k, v[k]) for k in keys])
-	else:
-		tmp = str(v)
-	d = Utils.md5(tmp.encode()).hexdigest().upper()
-	if prefix:
-		d = '%s%s' % (prefix, d[8:])
-	gid = uuid.UUID(d, version = 4)
-	return str(gid).upper()
-
-def diff(node, fromnode):
-	# difference between two nodes, but with "(..)" instead of ".."
-	c1 = node
-	c2 = fromnode
-
-	c1h = c1.height()
-	c2h = c2.height()
-
-	lst = []
-	up = 0
-
-	while c1h > c2h:
-		lst.append(c1.name)
-		c1 = c1.parent
-		c1h -= 1
-
-	while c2h > c1h:
-		up += 1
-		c2 = c2.parent
-		c2h -= 1
-
-	while id(c1) != id(c2):
-		lst.append(c1.name)
-		up += 1
-
-		c1 = c1.parent
-		c2 = c2.parent
-
-	for i in range(up):
-		lst.append('(..)')
-	lst.reverse()
-	return tuple(lst)
-
-class build_property(object):
-	pass
-
-class vsnode(object):
-	"""
-	Abstract class representing visual studio elements
-	We assume that all visual studio nodes have a uuid and a parent
-	"""
-	def __init__(self, ctx):
-		self.ctx = ctx # msvs context
-		self.name = '' # string, mandatory
-		self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
-		self.uuid = '' # string, mandatory
-		self.parent = None # parent node for visual studio nesting
-
-	def get_waf(self):
-		"""
-		Override in subclasses...
-		"""
-		return 'cd /d "%s" & %s' % (self.ctx.srcnode.win32path(), getattr(self.ctx, 'waf_command', 'waf.bat'))
-
-	def ptype(self):
-		"""
-		Return a special uuid for projects written in the solution file
-		"""
-		pass
-
-	def write(self):
-		"""
-		Write the project file, by default, do nothing
-		"""
-		pass
-
-	def make_uuid(self, val):
-		"""
-		Alias for creating uuid values easily (the templates cannot access global variables)
-		"""
-		return make_uuid(val)
-
-class vsnode_vsdir(vsnode):
-	"""
-	Nodes representing visual studio folders (which do not match the filesystem tree!)
-	"""
-	VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
-	def __init__(self, ctx, uuid, name, vspath=''):
-		vsnode.__init__(self, ctx)
-		self.title = self.name = name
-		self.uuid = uuid
-		self.vspath = vspath or name
-
-	def ptype(self):
-		return self.VS_GUID_SOLUTIONFOLDER
-
-class vsnode_project(vsnode):
-	"""
-	Abstract class representing visual studio project elements
-	A project is assumed to be writable, and has a node representing the file to write to
-	"""
-	VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
-	def ptype(self):
-		return self.VS_GUID_VCPROJ
-
-	def __init__(self, ctx, node):
-		vsnode.__init__(self, ctx)
-		self.path = node
-		self.uuid = make_uuid(node.win32path())
-		self.name = node.name
-		self.platform_toolset_ver = getattr(ctx, 'platform_toolset_ver', None)
-		self.title = self.path.win32path()
-		self.source = [] # list of node objects
-		self.build_properties = [] # list of properties (nmake commands, output dir, etc)
-
-	def dirs(self):
-		"""
-		Get the list of parent folders of the source files (header files included)
-		for writing the filters
-		"""
-		lst = []
-		def add(x):
-			if x.height() > self.tg.path.height() and x not in lst:
-				lst.append(x)
-				add(x.parent)
-		for x in self.source:
-			add(x.parent)
-		return lst
-
-	def write(self):
-		Logs.debug('msvs: creating %r', self.path)
-
-		# first write the project file
-		template1 = compile_template(PROJECT_TEMPLATE)
-		proj_str = template1(self)
-		proj_str = rm_blank_lines(proj_str)
-		self.path.stealth_write(proj_str)
-
-		# then write the filter
-		template2 = compile_template(FILTER_TEMPLATE)
-		filter_str = template2(self)
-		filter_str = rm_blank_lines(filter_str)
-		tmp = self.path.parent.make_node(self.path.name + '.filters')
-		tmp.stealth_write(filter_str)
-
-	def get_key(self, node):
-		"""
-		required for writing the source files
-		"""
-		name = node.name
-		if name.endswith(('.cpp', '.c')):
-			return 'ClCompile'
-		return 'ClInclude'
-
-	def collect_properties(self):
-		"""
-		Returns a list of triplet (configuration, platform, output_directory)
-		"""
-		ret = []
-		for c in self.ctx.configurations:
-			for p in self.ctx.platforms:
-				x = build_property()
-				x.outdir = ''
-
-				x.configuration = c
-				x.platform = p
-
-				x.preprocessor_definitions = ''
-				x.includes_search_path = ''
-
-				# can specify "deploy_dir" too
-				ret.append(x)
-		self.build_properties = ret
-
-	def get_build_params(self, props):
-		opt = '--execsolution=%s' % self.ctx.get_solution_node().win32path()
-		return (self.get_waf(), opt)
-
-	def get_build_command(self, props):
-		return "%s build %s" % self.get_build_params(props)
-
-	def get_clean_command(self, props):
-		return "%s clean %s" % self.get_build_params(props)
-
-	def get_rebuild_command(self, props):
-		return "%s clean build %s" % self.get_build_params(props)
-
-	def get_filter_name(self, node):
-		lst = diff(node, self.tg.path)
-		return '\\'.join(lst) or '.'
-
-class vsnode_alias(vsnode_project):
-	def __init__(self, ctx, node, name):
-		vsnode_project.__init__(self, ctx, node)
-		self.name = name
-		self.output_file = ''
-
-class vsnode_build_all(vsnode_alias):
-	"""
-	Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
-	This is the only alias enabled by default
-	"""
-	def __init__(self, ctx, node, name='build_all_projects'):
-		vsnode_alias.__init__(self, ctx, node, name)
-		self.is_active = True
-
-class vsnode_install_all(vsnode_alias):
-	"""
-	Fake target used to emulate the behaviour of "make install"
-	"""
-	def __init__(self, ctx, node, name='install_all_projects'):
-		vsnode_alias.__init__(self, ctx, node, name)
-
-	def get_build_command(self, props):
-		return "%s build install %s" % self.get_build_params(props)
-
-	def get_clean_command(self, props):
-		return "%s clean %s" % self.get_build_params(props)
-
-	def get_rebuild_command(self, props):
-		return "%s clean build install %s" % self.get_build_params(props)
-
-class vsnode_project_view(vsnode_alias):
-	"""
-	Fake target used to emulate a file system view
-	"""
-	def __init__(self, ctx, node, name='project_view'):
-		vsnode_alias.__init__(self, ctx, node, name)
-		self.tg = self.ctx() # fake one, cannot remove
-		self.exclude_files = Node.exclude_regs + '''
-waf-2*
-waf3-2*/**
-.waf-2*
-.waf3-2*/**
-**/*.sdf
-**/*.suo
-**/*.ncb
-**/%s
-		''' % Options.lockfile
-
-	def collect_source(self):
-		# this is likely to be slow
-		self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
-
-	def get_build_command(self, props):
-		params = self.get_build_params(props) + (self.ctx.cmd,)
-		return "%s %s %s" % params
-
-	def get_clean_command(self, props):
-		return ""
-
-	def get_rebuild_command(self, props):
-		return self.get_build_command(props)
-
-class vsnode_target(vsnode_project):
-	"""
-	Visual studio project representing a targets (programs, libraries, etc) and bound
-	to a task generator
-	"""
-	def __init__(self, ctx, tg):
-		"""
-		A project is more or less equivalent to a file/folder
-		"""
-		base = getattr(ctx, 'projects_dir', None) or tg.path
-		node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
-		vsnode_project.__init__(self, ctx, node)
-		self.name = quote(tg.name)
-		self.tg     = tg  # task generator
-
-	def get_build_params(self, props):
-		"""
-		Override the default to add the target name
-		"""
-		opt = '--execsolution=%s' % self.ctx.get_solution_node().win32path()
-		if getattr(self, 'tg', None):
-			opt += " --targets=%s" % self.tg.name
-		return (self.get_waf(), opt)
-
-	def collect_source(self):
-		tg = self.tg
-		source_files = tg.to_nodes(getattr(tg, 'source', []))
-		include_dirs = Utils.to_list(getattr(tg, 'msvs_includes', []))
-		include_files = []
-		for x in include_dirs:
-			if isinstance(x, str):
-				x = tg.path.find_node(x)
-			if x:
-				lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
-				include_files.extend(lst)
-
-		# remove duplicates
-		self.source.extend(list(set(source_files + include_files)))
-		self.source.sort(key=lambda x: x.win32path())
-
-	def collect_properties(self):
-		"""
-		Visual studio projects are associated with platforms and configurations (for building especially)
-		"""
-		super(vsnode_target, self).collect_properties()
-		for x in self.build_properties:
-			x.outdir = self.path.parent.win32path()
-			x.preprocessor_definitions = ''
-			x.includes_search_path = ''
-
-			try:
-				tsk = self.tg.link_task
-			except AttributeError:
-				pass
-			else:
-				x.output_file = tsk.outputs[0].win32path()
-				x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
-				x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
-
-class msvs_generator(BuildContext):
-	'''generates a visual studio 2010 solution'''
-	cmd = 'msvs'
-	fun = 'build'
-	numver = '11.00' # Visual Studio Version Number
-	vsver  = '2010'  # Visual Studio Version Year
-	platform_toolset_ver = 'v110' # Platform Toolset Version Number
-
-	def init(self):
-		"""
-		Some data that needs to be present
-		"""
-		if not getattr(self, 'configurations', None):
-			self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
-		if not getattr(self, 'platforms', None):
-			self.platforms = ['Win32']
-		if not getattr(self, 'all_projects', None):
-			self.all_projects = []
-		if not getattr(self, 'project_extension', None):
-			self.project_extension = '.vcxproj'
-		if not getattr(self, 'projects_dir', None):
-			self.projects_dir = self.srcnode.make_node('.depproj')
-			self.projects_dir.mkdir()
-
-		# bind the classes to the object, so that subclass can provide custom generators
-		if not getattr(self, 'vsnode_vsdir', None):
-			self.vsnode_vsdir = vsnode_vsdir
-		if not getattr(self, 'vsnode_target', None):
-			self.vsnode_target = vsnode_target
-		if not getattr(self, 'vsnode_build_all', None):
-			self.vsnode_build_all = vsnode_build_all
-		if not getattr(self, 'vsnode_install_all', None):
-			self.vsnode_install_all = vsnode_install_all
-		if not getattr(self, 'vsnode_project_view', None):
-			self.vsnode_project_view = vsnode_project_view
-
-		self.numver = self.__class__.numver
-		self.vsver  = self.__class__.vsver
-		self.platform_toolset_ver = self.__class__.platform_toolset_ver
-
-	def execute(self):
-		"""
-		Entry point
-		"""
-		self.restore()
-		if not self.all_envs:
-			self.load_envs()
-		self.recurse([self.run_dir])
-
-		# user initialization
-		self.init()
-
-		# two phases for creating the solution
-		self.collect_projects() # add project objects into "self.all_projects"
-		self.write_files() # write the corresponding project and solution files
-
-	def collect_projects(self):
-		"""
-		Fill the list self.all_projects with project objects
-		Fill the list of build targets
-		"""
-		self.collect_targets()
-		self.add_aliases()
-		self.collect_dirs()
-		default_project = getattr(self, 'default_project', None)
-		def sortfun(x):
-			if x.name == default_project:
-				return ''
-			return getattr(x, 'path', None) and x.path.win32path() or x.name
-		self.all_projects.sort(key=sortfun)
-
-	def write_files(self):
-		"""
-		Write the project and solution files from the data collected
-		so far. It is unlikely that you will want to change this
-		"""
-		for p in self.all_projects:
-			p.write()
-
-		# and finally write the solution file
-		node = self.get_solution_node()
-		node.parent.mkdir()
-		Logs.warn('Creating %r', node)
-		template1 = compile_template(SOLUTION_TEMPLATE)
-		sln_str = template1(self)
-		sln_str = rm_blank_lines(sln_str)
-		node.stealth_write(sln_str)
-
-	def get_solution_node(self):
-		"""
-		The solution filename is required when writing the .vcproj files
-		return self.solution_node and if it does not exist, make one
-		"""
-		try:
-			return self.solution_node
-		except AttributeError:
-			pass
-
-		solution_name = getattr(self, 'solution_name', None)
-		if not solution_name:
-			solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.sln'
-		if os.path.isabs(solution_name):
-			self.solution_node = self.root.make_node(solution_name)
-		else:
-			self.solution_node = self.srcnode.make_node(solution_name)
-		return self.solution_node
-
-	def project_configurations(self):
-		"""
-		Helper that returns all the pairs (config,platform)
-		"""
-		ret = []
-		for c in self.configurations:
-			for p in self.platforms:
-				ret.append((c, p))
-		return ret
-
-	def collect_targets(self):
-		"""
-		Process the list of task generators
-		"""
-		for g in self.groups:
-			for tg in g:
-				if not isinstance(tg, TaskGen.task_gen):
-					continue
-
-				if not hasattr(tg, 'msvs_includes'):
-					tg.msvs_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
-				tg.post()
-				if not getattr(tg, 'link_task', None):
-					continue
-
-				p = self.vsnode_target(self, tg)
-				p.collect_source() # delegate this processing
-				p.collect_properties()
-				self.all_projects.append(p)
-
-	def add_aliases(self):
-		"""
-		Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
-		We also add an alias for "make install" (disabled by default)
-		"""
-		base = getattr(self, 'projects_dir', None) or self.tg.path
-
-		node_project = base.make_node('build_all_projects' + self.project_extension) # Node
-		p_build = self.vsnode_build_all(self, node_project)
-		p_build.collect_properties()
-		self.all_projects.append(p_build)
-
-		node_project = base.make_node('install_all_projects' + self.project_extension) # Node
-		p_install = self.vsnode_install_all(self, node_project)
-		p_install.collect_properties()
-		self.all_projects.append(p_install)
-
-		node_project = base.make_node('project_view' + self.project_extension) # Node
-		p_view = self.vsnode_project_view(self, node_project)
-		p_view.collect_source()
-		p_view.collect_properties()
-		self.all_projects.append(p_view)
-
-		n = self.vsnode_vsdir(self, make_uuid(self.srcnode.win32path() + 'build_aliases'), "build_aliases")
-		p_build.parent = p_install.parent = p_view.parent = n
-		self.all_projects.append(n)
-
-	def collect_dirs(self):
-		"""
-		Create the folder structure in the Visual studio project view
-		"""
-		seen = {}
-		def make_parents(proj):
-			# look at a project, try to make a parent
-			if getattr(proj, 'parent', None):
-				# aliases already have parents
-				return
-			x = proj.iter_path
-			if x in seen:
-				proj.parent = seen[x]
-				return
-
-			# There is not vsnode_vsdir for x.
-			# So create a project representing the folder "x"
-			n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.win32path()), x.name)
-			n.iter_path = x.parent
-			self.all_projects.append(n)
-
-			# recurse up to the project directory
-			if x.height() > self.srcnode.height() + 1:
-				make_parents(n)
-
-		for p in self.all_projects[:]: # iterate over a copy of all projects
-			if not getattr(p, 'tg', None):
-				# but only projects that have a task generator
-				continue
-
-			# make a folder for each task generator
-			p.iter_path = p.tg.path
-			make_parents(p)
-
-def wrap_2008(cls):
-	class dec(cls):
-		def __init__(self, *k, **kw):
-			cls.__init__(self, *k, **kw)
-			self.project_template = PROJECT_2008_TEMPLATE
-
-		def display_filter(self):
-
-			root = build_property()
-			root.subfilters = []
-			root.sourcefiles = []
-			root.source = []
-			root.name = ''
-
-			@Utils.run_once
-			def add_path(lst):
-				if not lst:
-					return root
-				child = build_property()
-				child.subfilters = []
-				child.sourcefiles = []
-				child.source = []
-				child.name = lst[-1]
-
-				par = add_path(lst[:-1])
-				par.subfilters.append(child)
-				return child
-
-			for x in self.source:
-				# this crap is for enabling subclasses to override get_filter_name
-				tmp = self.get_filter_name(x.parent)
-				tmp = tmp != '.' and tuple(tmp.split('\\')) or ()
-				par = add_path(tmp)
-				par.source.append(x)
-
-			def display(n):
-				buf = []
-				for x in n.source:
-					buf.append('<File RelativePath="%s" FileType="%s"/>\n' % (xml_escape(x.win32path()), self.get_key(x)))
-				for x in n.subfilters:
-					buf.append('<Filter Name="%s">' % xml_escape(x.name))
-					buf.append(display(x))
-					buf.append('</Filter>')
-				return '\n'.join(buf)
-
-			return display(root)
-
-		def get_key(self, node):
-			"""
-			If you do not want to let visual studio use the default file extensions,
-			override this method to return a value:
-				0: C/C++ Code, 1: C++ Class, 2: C++ Header File, 3: C++ Form,
-				4: C++ Control, 5: Text File, 6: DEF File, 7: IDL File,
-				8: Makefile, 9: RGS File, 10: RC File, 11: RES File, 12: XSD File,
-				13: XML File, 14: HTML File, 15: CSS File, 16: Bitmap, 17: Icon,
-				18: Resx File, 19: BSC File, 20: XSX File, 21: C++ Web Service,
-				22: ASAX File, 23: Asp Page, 24: Document, 25: Discovery File,
-				26: C# File, 27: eFileTypeClassDiagram, 28: MHTML Document,
-				29: Property Sheet, 30: Cursor, 31: Manifest, 32: eFileTypeRDLC
-			"""
-			return ''
-
-		def write(self):
-			Logs.debug('msvs: creating %r', self.path)
-			template1 = compile_template(self.project_template)
-			proj_str = template1(self)
-			proj_str = rm_blank_lines(proj_str)
-			self.path.stealth_write(proj_str)
-
-	return dec
-
-class msvs_2008_generator(msvs_generator):
-	'''generates a visual studio 2008 solution'''
-	cmd = 'msvs2008'
-	fun = msvs_generator.fun
-	numver = '10.00'
-	vsver = '2008'
-
-	def init(self):
-		if not getattr(self, 'project_extension', None):
-			self.project_extension = '_2008.vcproj'
-		if not getattr(self, 'solution_name', None):
-			self.solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '_2008.sln'
-
-		if not getattr(self, 'vsnode_target', None):
-			self.vsnode_target = wrap_2008(vsnode_target)
-		if not getattr(self, 'vsnode_build_all', None):
-			self.vsnode_build_all = wrap_2008(vsnode_build_all)
-		if not getattr(self, 'vsnode_install_all', None):
-			self.vsnode_install_all = wrap_2008(vsnode_install_all)
-		if not getattr(self, 'vsnode_project_view', None):
-			self.vsnode_project_view = wrap_2008(vsnode_project_view)
-
-		msvs_generator.init(self)
-
-def options(ctx):
-	"""
-	If the msvs option is used, try to detect if the build is made from visual studio
-	"""
-	ctx.add_option('--execsolution', action='store', help='when building with visual studio, use a build state file')
-
-	old = BuildContext.execute
-	def override_build_state(ctx):
-		def lock(rm, add):
-			uns = ctx.options.execsolution.replace('.sln', rm)
-			uns = ctx.root.make_node(uns)
-			try:
-				uns.delete()
-			except OSError:
-				pass
-
-			uns = ctx.options.execsolution.replace('.sln', add)
-			uns = ctx.root.make_node(uns)
-			try:
-				uns.write('')
-			except EnvironmentError:
-				pass
-
-		if ctx.options.execsolution:
-			ctx.launch_dir = Context.top_dir # force a build for the whole project (invalid cwd when called by visual studio)
-			lock('.lastbuildstate', '.unsuccessfulbuild')
-			old(ctx)
-			lock('.unsuccessfulbuild', '.lastbuildstate')
-		else:
-			old(ctx)
-	BuildContext.execute = override_build_state
-
diff --git a/waflib/extras/netcache_client.py b/waflib/extras/netcache_client.py
deleted file mode 100644
index dc49048..0000000
--- a/waflib/extras/netcache_client.py
+++ /dev/null
@@ -1,390 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011-2015 (ita)
-
-"""
-A client for the network cache (playground/netcache/). Launch the server with:
-./netcache_server, then use it for the builds by adding the following:
-
-	def build(bld):
-		bld.load('netcache_client')
-
-The parameters should be present in the environment in the form:
-	NETCACHE=host:port waf configure build
-
-Or in a more detailed way:
-	NETCACHE_PUSH=host:port NETCACHE_PULL=host:port waf configure build
-
-where:
-	host: host where the server resides, by default localhost
-	port: by default push on 11001 and pull on 12001
-
-Use the server provided in playground/netcache/Netcache.java
-"""
-
-import os, socket, time, atexit, sys
-from waflib import Task, Logs, Utils, Build, Runner
-from waflib.Configure import conf
-
-BUF = 8192 * 16
-HEADER_SIZE = 128
-MODES = ['PUSH', 'PULL', 'PUSH_PULL']
-STALE_TIME = 30 # seconds
-
-GET = 'GET'
-PUT = 'PUT'
-LST = 'LST'
-BYE = 'BYE'
-
-all_sigs_in_cache = (0.0, [])
-
-def put_data(conn, data):
-	if sys.hexversion > 0x3000000:
-		data = data.encode('latin-1')
-	cnt = 0
-	while cnt < len(data):
-		sent = conn.send(data[cnt:])
-		if sent == 0:
-			raise RuntimeError('connection ended')
-		cnt += sent
-
-push_connections = Runner.Queue(0)
-pull_connections = Runner.Queue(0)
-def get_connection(push=False):
-	# return a new connection... do not forget to release it!
-	try:
-		if push:
-			ret = push_connections.get(block=False)
-		else:
-			ret = pull_connections.get(block=False)
-	except Exception:
-		ret = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-		if push:
-			ret.connect(Task.push_addr)
-		else:
-			ret.connect(Task.pull_addr)
-	return ret
-
-def release_connection(conn, msg='', push=False):
-	if conn:
-		if push:
-			push_connections.put(conn)
-		else:
-			pull_connections.put(conn)
-
-def close_connection(conn, msg=''):
-	if conn:
-		data = '%s,%s' % (BYE, msg)
-		try:
-			put_data(conn, data.ljust(HEADER_SIZE))
-		except:
-			pass
-		try:
-			conn.close()
-		except:
-			pass
-
-def close_all():
-	for q in (push_connections, pull_connections):
-		while q.qsize():
-			conn = q.get()
-			try:
-				close_connection(conn)
-			except:
-				# ignore errors when cleaning up
-				pass
-atexit.register(close_all)
-
-def read_header(conn):
-	cnt = 0
-	buf = []
-	while cnt < HEADER_SIZE:
-		data = conn.recv(HEADER_SIZE - cnt)
-		if not data:
-			#import traceback
-			#traceback.print_stack()
-			raise ValueError('connection ended when reading a header %r' % buf)
-		buf.append(data)
-		cnt += len(data)
-	if sys.hexversion > 0x3000000:
-		ret = ''.encode('latin-1').join(buf)
-		ret = ret.decode('latin-1')
-	else:
-		ret = ''.join(buf)
-	return ret
-
-def check_cache(conn, ssig):
-	"""
-	List the files on the server, this is an optimization because it assumes that
-	concurrent builds are rare
-	"""
-	global all_sigs_in_cache
-	if not STALE_TIME:
-		return
-	if time.time() - all_sigs_in_cache[0] > STALE_TIME:
-
-		params = (LST,'')
-		put_data(conn, ','.join(params).ljust(HEADER_SIZE))
-
-		# read what is coming back
-		ret = read_header(conn)
-		size = int(ret.split(',')[0])
-
-		buf = []
-		cnt = 0
-		while cnt < size:
-			data = conn.recv(min(BUF, size-cnt))
-			if not data:
-				raise ValueError('connection ended %r %r' % (cnt, size))
-			buf.append(data)
-			cnt += len(data)
-
-		if sys.hexversion > 0x3000000:
-			ret = ''.encode('latin-1').join(buf)
-			ret = ret.decode('latin-1')
-		else:
-			ret = ''.join(buf)
-
-		all_sigs_in_cache = (time.time(), ret.splitlines())
-		Logs.debug('netcache: server cache has %r entries', len(all_sigs_in_cache[1]))
-
-	if not ssig in all_sigs_in_cache[1]:
-		raise ValueError('no file %s in cache' % ssig)
-
-class MissingFile(Exception):
-	pass
-
-def recv_file(conn, ssig, count, p):
-	check_cache(conn, ssig)
-
-	params = (GET, ssig, str(count))
-	put_data(conn, ','.join(params).ljust(HEADER_SIZE))
-	data = read_header(conn)
-
-	size = int(data.split(',')[0])
-
-	if size == -1:
-		raise MissingFile('no file %s - %s in cache' % (ssig, count))
-
-	# get the file, writing immediately
-	# TODO a tmp file would be better
-	f = open(p, 'wb')
-	cnt = 0
-	while cnt < size:
-		data = conn.recv(min(BUF, size-cnt))
-		if not data:
-			raise ValueError('connection ended %r %r' % (cnt, size))
-		f.write(data)
-		cnt += len(data)
-	f.close()
-
-def sock_send(conn, ssig, cnt, p):
-	#print "pushing %r %r %r" % (ssig, cnt, p)
-	size = os.stat(p).st_size
-	params = (PUT, ssig, str(cnt), str(size))
-	put_data(conn, ','.join(params).ljust(HEADER_SIZE))
-	f = open(p, 'rb')
-	cnt = 0
-	while cnt < size:
-		r = f.read(min(BUF, size-cnt))
-		while r:
-			k = conn.send(r)
-			if not k:
-				raise ValueError('connection ended')
-			cnt += k
-			r = r[k:]
-
-def can_retrieve_cache(self):
-	if not Task.pull_addr:
-		return False
-	if not self.outputs:
-		return False
-	self.cached = False
-
-	cnt = 0
-	sig = self.signature()
-	ssig = Utils.to_hex(self.uid() + sig)
-
-	conn = None
-	err = False
-	try:
-		try:
-			conn = get_connection()
-			for node in self.outputs:
-				p = node.abspath()
-				recv_file(conn, ssig, cnt, p)
-				cnt += 1
-		except MissingFile as e:
-			Logs.debug('netcache: file is not in the cache %r', e)
-			err = True
-		except Exception as e:
-			Logs.debug('netcache: could not get the files %r', self.outputs)
-			if Logs.verbose > 1:
-				Logs.debug('netcache: exception %r', e)
-			err = True
-
-			# broken connection? remove this one
-			close_connection(conn)
-			conn = None
-		else:
-			Logs.debug('netcache: obtained %r from cache', self.outputs)
-
-	finally:
-		release_connection(conn)
-	if err:
-		return False
-
-	self.cached = True
-	return True
-
-@Utils.run_once
-def put_files_cache(self):
-	if not Task.push_addr:
-		return
-	if not self.outputs:
-		return
-	if getattr(self, 'cached', None):
-		return
-
-	#print "called put_files_cache", id(self)
-	bld = self.generator.bld
-	sig = self.signature()
-	ssig = Utils.to_hex(self.uid() + sig)
-
-	conn = None
-	cnt = 0
-	try:
-		for node in self.outputs:
-			# We could re-create the signature of the task with the signature of the outputs
-			# in practice, this means hashing the output files
-			# this is unnecessary
-			try:
-				if not conn:
-					conn = get_connection(push=True)
-				sock_send(conn, ssig, cnt, node.abspath())
-				Logs.debug('netcache: sent %r', node)
-			except Exception as e:
-				Logs.debug('netcache: could not push the files %r', e)
-
-				# broken connection? remove this one
-				close_connection(conn)
-				conn = None
-			cnt += 1
-	finally:
-		release_connection(conn, push=True)
-
-	bld.task_sigs[self.uid()] = self.cache_sig
-
-def hash_env_vars(self, env, vars_lst):
-	# reimplement so that the resulting hash does not depend on local paths
-	if not env.table:
-		env = env.parent
-		if not env:
-			return Utils.SIG_NIL
-
-	idx = str(id(env)) + str(vars_lst)
-	try:
-		cache = self.cache_env
-	except AttributeError:
-		cache = self.cache_env = {}
-	else:
-		try:
-			return self.cache_env[idx]
-		except KeyError:
-			pass
-
-	v = str([env[a] for a in vars_lst])
-	v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
-	m = Utils.md5()
-	m.update(v.encode())
-	ret = m.digest()
-
-	Logs.debug('envhash: %r %r', ret, v)
-
-	cache[idx] = ret
-
-	return ret
-
-def uid(self):
-	# reimplement so that the signature does not depend on local paths
-	try:
-		return self.uid_
-	except AttributeError:
-		m = Utils.md5()
-		src = self.generator.bld.srcnode
-		up = m.update
-		up(self.__class__.__name__.encode())
-		for x in self.inputs + self.outputs:
-			up(x.path_from(src).encode())
-		self.uid_ = m.digest()
-		return self.uid_
-
-
-def make_cached(cls):
-	if getattr(cls, 'nocache', None):
-		return
-
-	m1 = cls.run
-	def run(self):
-		if getattr(self, 'nocache', False):
-			return m1(self)
-		if self.can_retrieve_cache():
-			return 0
-		return m1(self)
-	cls.run = run
-
-	m2 = cls.post_run
-	def post_run(self):
-		if getattr(self, 'nocache', False):
-			return m2(self)
-		bld = self.generator.bld
-		ret = m2(self)
-		if bld.cache_global:
-			self.put_files_cache()
-		if hasattr(self, 'chmod'):
-			for node in self.outputs:
-				os.chmod(node.abspath(), self.chmod)
-		return ret
-	cls.post_run = post_run
-
-@conf
-def setup_netcache(ctx, push_addr, pull_addr):
-	Task.Task.can_retrieve_cache = can_retrieve_cache
-	Task.Task.put_files_cache = put_files_cache
-	Task.Task.uid = uid
-	Task.push_addr = push_addr
-	Task.pull_addr = pull_addr
-	Build.BuildContext.hash_env_vars = hash_env_vars
-	ctx.cache_global = True
-
-	for x in Task.classes.values():
-		make_cached(x)
-
-def build(bld):
-	if not 'NETCACHE' in os.environ and not 'NETCACHE_PULL' in os.environ and not 'NETCACHE_PUSH' in os.environ:
-		Logs.warn('Setting  NETCACHE_PULL=127.0.0.1:11001 and NETCACHE_PUSH=127.0.0.1:12001')
-		os.environ['NETCACHE_PULL'] = '127.0.0.1:12001'
-		os.environ['NETCACHE_PUSH'] = '127.0.0.1:11001'
-
-	if 'NETCACHE' in os.environ:
-		if not 'NETCACHE_PUSH' in os.environ:
-			os.environ['NETCACHE_PUSH'] = os.environ['NETCACHE']
-		if not 'NETCACHE_PULL' in os.environ:
-			os.environ['NETCACHE_PULL'] = os.environ['NETCACHE']
-
-	v = os.environ['NETCACHE_PULL']
-	if v:
-		h, p = v.split(':')
-		pull_addr = (h, int(p))
-	else:
-		pull_addr = None
-
-	v = os.environ['NETCACHE_PUSH']
-	if v:
-		h, p = v.split(':')
-		push_addr = (h, int(p))
-	else:
-		push_addr = None
-
-	setup_netcache(bld, push_addr, pull_addr)
-
diff --git a/waflib/extras/objcopy.py b/waflib/extras/objcopy.py
deleted file mode 100644
index 82d8359..0000000
--- a/waflib/extras/objcopy.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/python
-# Grygoriy Fuchedzhy 2010
-
-"""
-Support for converting linked targets to ihex, srec or binary files using
-objcopy. Use the 'objcopy' feature in conjunction with the 'cc' or 'cxx'
-feature. The 'objcopy' feature uses the following attributes:
-
-objcopy_bfdname		Target object format name (eg. ihex, srec, binary).
-					   Defaults to ihex.
-objcopy_target		 File name used for objcopy output. This defaults to the
-					   target name with objcopy_bfdname as extension.
-objcopy_install_path   Install path for objcopy_target file. Defaults to ${PREFIX}/fw.
-objcopy_flags		  Additional flags passed to objcopy.
-"""
-
-from waflib.Utils import def_attrs
-from waflib import Task
-from waflib.TaskGen import feature, after_method
-
-class objcopy(Task.Task):
-	run_str = '${OBJCOPY} -O ${TARGET_BFDNAME} ${OBJCOPYFLAGS} ${SRC} ${TGT}'
-	color   = 'CYAN'
-
-@feature('objcopy')
-@after_method('apply_link')
-def map_objcopy(self):
-	def_attrs(self,
-	   objcopy_bfdname = 'ihex',
-	   objcopy_target = None,
-	   objcopy_install_path = "${PREFIX}/firmware",
-	   objcopy_flags = '')
-
-	link_output = self.link_task.outputs[0]
-	if not self.objcopy_target:
-		self.objcopy_target = link_output.change_ext('.' + self.objcopy_bfdname).name
-	task = self.create_task('objcopy', src=link_output, tgt=self.path.find_or_declare(self.objcopy_target))
-
-	task.env.append_unique('TARGET_BFDNAME', self.objcopy_bfdname)
-	try:
-		task.env.append_unique('OBJCOPYFLAGS', getattr(self, 'objcopy_flags'))
-	except AttributeError:
-		pass
-
-	if self.objcopy_install_path:
-		self.add_install_files(install_to=self.objcopy_install_path, install_from=task.outputs[0])
-
-def configure(ctx):
-	ctx.find_program('objcopy', var='OBJCOPY', mandatory=True)
-
diff --git a/waflib/extras/ocaml.py b/waflib/extras/ocaml.py
deleted file mode 100644
index 7d785c6..0000000
--- a/waflib/extras/ocaml.py
+++ /dev/null
@@ -1,348 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
-
-"ocaml support"
-
-import os, re
-from waflib import Utils, Task
-from waflib.Logs import error
-from waflib.TaskGen import feature, before_method, after_method, extension
-
-EXT_MLL = ['.mll']
-EXT_MLY = ['.mly']
-EXT_MLI = ['.mli']
-EXT_MLC = ['.c']
-EXT_ML  = ['.ml']
-
-open_re = re.compile(r'^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
-foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
-def filter_comments(txt):
-	meh = [0]
-	def repl(m):
-		if m.group(1):
-			meh[0] += 1
-		elif m.group(2):
-			meh[0] -= 1
-		elif not meh[0]:
-			return m.group()
-		return ''
-	return foo.sub(repl, txt)
-
-def scan(self):
-	node = self.inputs[0]
-	code = filter_comments(node.read())
-
-	global open_re
-	names = []
-	import_iterator = open_re.finditer(code)
-	if import_iterator:
-		for import_match in import_iterator:
-			names.append(import_match.group(1))
-	found_lst = []
-	raw_lst = []
-	for name in names:
-		nd = None
-		for x in self.incpaths:
-			nd = x.find_resource(name.lower()+'.ml')
-			if not nd:
-				nd = x.find_resource(name+'.ml')
-			if nd:
-				found_lst.append(nd)
-				break
-		else:
-			raw_lst.append(name)
-
-	return (found_lst, raw_lst)
-
-native_lst=['native', 'all', 'c_object']
-bytecode_lst=['bytecode', 'all']
-
-@feature('ocaml')
-def init_ml(self):
-	Utils.def_attrs(self,
-		type = 'all',
-		incpaths_lst = [],
-		bld_incpaths_lst = [],
-		mlltasks = [],
-		mlytasks = [],
-		mlitasks = [],
-		native_tasks = [],
-		bytecode_tasks = [],
-		linktasks = [],
-		bytecode_env = None,
-		native_env = None,
-		compiled_tasks = [],
-		includes = '',
-		uselib = '',
-		are_deps_set = 0)
-
-@feature('ocaml')
-@after_method('init_ml')
-def init_envs_ml(self):
-
-	self.islibrary = getattr(self, 'islibrary', False)
-
-	global native_lst, bytecode_lst
-	self.native_env = None
-	if self.type in native_lst:
-		self.native_env = self.env.derive()
-		if self.islibrary:
-			self.native_env['OCALINKFLAGS']   = '-a'
-
-	self.bytecode_env = None
-	if self.type in bytecode_lst:
-		self.bytecode_env = self.env.derive()
-		if self.islibrary:
-			self.bytecode_env['OCALINKFLAGS'] = '-a'
-
-	if self.type == 'c_object':
-		self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
-
-@feature('ocaml')
-@before_method('apply_vars_ml')
-@after_method('init_envs_ml')
-def apply_incpaths_ml(self):
-	inc_lst = self.includes.split()
-	lst = self.incpaths_lst
-	for dir in inc_lst:
-		node = self.path.find_dir(dir)
-		if not node:
-			error("node not found: " + str(dir))
-			continue
-		if not node in lst:
-			lst.append(node)
-		self.bld_incpaths_lst.append(node)
-	# now the nodes are added to self.incpaths_lst
-
-@feature('ocaml')
-@before_method('process_source')
-def apply_vars_ml(self):
-	for i in self.incpaths_lst:
-		if self.bytecode_env:
-			app = self.bytecode_env.append_value
-			app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
-
-		if self.native_env:
-			app = self.native_env.append_value
-			app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
-
-	varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
-	for name in self.uselib.split():
-		for vname in varnames:
-			cnt = self.env[vname+'_'+name]
-			if cnt:
-				if self.bytecode_env:
-					self.bytecode_env.append_value(vname, cnt)
-				if self.native_env:
-					self.native_env.append_value(vname, cnt)
-
-@feature('ocaml')
-@after_method('process_source')
-def apply_link_ml(self):
-
-	if self.bytecode_env:
-		ext = self.islibrary and '.cma' or '.run'
-
-		linktask = self.create_task('ocalink')
-		linktask.bytecode = 1
-		linktask.set_outputs(self.path.find_or_declare(self.target + ext))
-		linktask.env = self.bytecode_env
-		self.linktasks.append(linktask)
-
-	if self.native_env:
-		if self.type == 'c_object':
-			ext = '.o'
-		elif self.islibrary:
-			ext = '.cmxa'
-		else:
-			ext = ''
-
-		linktask = self.create_task('ocalinkx')
-		linktask.set_outputs(self.path.find_or_declare(self.target + ext))
-		linktask.env = self.native_env
-		self.linktasks.append(linktask)
-
-		# we produce a .o file to be used by gcc
-		self.compiled_tasks.append(linktask)
-
-@extension(*EXT_MLL)
-def mll_hook(self, node):
-	mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'))
-	mll_task.env = self.native_env.derive()
-	self.mlltasks.append(mll_task)
-
-	self.source.append(mll_task.outputs[0])
-
-@extension(*EXT_MLY)
-def mly_hook(self, node):
-	mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')])
-	mly_task.env = self.native_env.derive()
-	self.mlytasks.append(mly_task)
-	self.source.append(mly_task.outputs[0])
-
-	task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'))
-	task.env = self.native_env.derive()
-
-@extension(*EXT_MLI)
-def mli_hook(self, node):
-	task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'))
-	task.env = self.native_env.derive()
-	self.mlitasks.append(task)
-
-@extension(*EXT_MLC)
-def mlc_hook(self, node):
-	task = self.create_task('ocamlcc', node, node.change_ext('.o'))
-	task.env = self.native_env.derive()
-	self.compiled_tasks.append(task)
-
-@extension(*EXT_ML)
-def ml_hook(self, node):
-	if self.native_env:
-		task = self.create_task('ocamlx', node, node.change_ext('.cmx'))
-		task.env = self.native_env.derive()
-		task.incpaths = self.bld_incpaths_lst
-		self.native_tasks.append(task)
-
-	if self.bytecode_env:
-		task = self.create_task('ocaml', node, node.change_ext('.cmo'))
-		task.env = self.bytecode_env.derive()
-		task.bytecode = 1
-		task.incpaths = self.bld_incpaths_lst
-		self.bytecode_tasks.append(task)
-
-def compile_may_start(self):
-
-	if not getattr(self, 'flag_deps', ''):
-		self.flag_deps = 1
-
-		# the evil part is that we can only compute the dependencies after the
-		# source files can be read (this means actually producing the source files)
-		if getattr(self, 'bytecode', ''):
-			alltasks = self.generator.bytecode_tasks
-		else:
-			alltasks = self.generator.native_tasks
-
-		self.signature() # ensure that files are scanned - unfortunately
-		tree = self.generator.bld
-		for node in self.inputs:
-			lst = tree.node_deps[self.uid()]
-			for depnode in lst:
-				for t in alltasks:
-					if t == self:
-						continue
-					if depnode in t.inputs:
-						self.set_run_after(t)
-
-		# TODO necessary to get the signature right - for now
-		delattr(self, 'cache_sig')
-		self.signature()
-
-	return Task.Task.runnable_status(self)
-
-class ocamlx(Task.Task):
-	"""native caml compilation"""
-	color   = 'GREEN'
-	run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
-	scan    = scan
-	runnable_status = compile_may_start
-
-class ocaml(Task.Task):
-	"""bytecode caml compilation"""
-	color   = 'GREEN'
-	run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
-	scan    = scan
-	runnable_status = compile_may_start
-
-class ocamlcmi(Task.Task):
-	"""interface generator (the .i files?)"""
-	color   = 'BLUE'
-	run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}'
-	before  = ['ocamlcc', 'ocaml', 'ocamlcc']
-
-class ocamlcc(Task.Task):
-	"""ocaml to c interfaces"""
-	color   = 'GREEN'
-	run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}'
-
-class ocamllex(Task.Task):
-	"""lexical generator"""
-	color   = 'BLUE'
-	run_str = '${OCAMLLEX} ${SRC} -o ${TGT}'
-	before  = ['ocamlcmi', 'ocaml', 'ocamlcc']
-
-class ocamlyacc(Task.Task):
-	"""parser generator"""
-	color   = 'BLUE'
-	run_str = '${OCAMLYACC} -b ${tsk.base()} ${SRC}'
-	before  = ['ocamlcmi', 'ocaml', 'ocamlcc']
-
-	def base(self):
-		node = self.outputs[0]
-		s = os.path.splitext(node.name)[0]
-		return node.bld_dir() + os.sep + s
-
-def link_may_start(self):
-
-	if getattr(self, 'bytecode', 0):
-		alltasks = self.generator.bytecode_tasks
-	else:
-		alltasks = self.generator.native_tasks
-
-	for x in alltasks:
-		if not x.hasrun:
-			return Task.ASK_LATER
-
-	if not getattr(self, 'order', ''):
-
-		# now reorder the inputs given the task dependencies
-		# this part is difficult, we do not have a total order on the tasks
-		# if the dependencies are wrong, this may not stop
-		seen = []
-		pendant = []+alltasks
-		while pendant:
-			task = pendant.pop(0)
-			if task in seen:
-				continue
-			for x in task.run_after:
-				if not x in seen:
-					pendant.append(task)
-					break
-			else:
-				seen.append(task)
-		self.inputs = [x.outputs[0] for x in seen]
-		self.order = 1
-	return Task.Task.runnable_status(self)
-
-class ocalink(Task.Task):
-	"""bytecode caml link"""
-	color   = 'YELLOW'
-	run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}'
-	runnable_status = link_may_start
-	after = ['ocaml', 'ocamlcc']
-
-class ocalinkx(Task.Task):
-	"""native caml link"""
-	color   = 'YELLOW'
-	run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}'
-	runnable_status = link_may_start
-	after = ['ocamlx', 'ocamlcc']
-
-def configure(conf):
-	opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False)
-	occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False)
-	if (not opt) or (not occ):
-		conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
-
-	v = conf.env
-	v['OCAMLC']       = occ
-	v['OCAMLOPT']     = opt
-	v['OCAMLLEX']     = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False)
-	v['OCAMLYACC']    = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False)
-	v['OCAMLFLAGS']   = ''
-	where = conf.cmd_and_log(conf.env.OCAMLC + ['-where']).strip()+os.sep
-	v['OCAMLLIB']     = where
-	v['LIBPATH_OCAML'] = where
-	v['INCLUDES_OCAML'] = where
-	v['LIB_OCAML'] = 'camlrun'
-
diff --git a/waflib/extras/package.py b/waflib/extras/package.py
deleted file mode 100644
index c06498e..0000000
--- a/waflib/extras/package.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011
-
-"""
-Obtain packages, unpack them in a location, and add associated uselib variables
-(CFLAGS_pkgname, LIBPATH_pkgname, etc).
-
-The default is use a Dependencies.txt file in the source directory.
-
-This is a work in progress.
-
-Usage:
-
-def options(opt):
-	opt.load('package')
-
-def configure(conf):
-	conf.load_packages()
-"""
-
-from waflib import Logs
-from waflib.Configure import conf
-
-try:
-	from urllib import request
-except ImportError:
-	from urllib import urlopen
-else:
-	urlopen = request.urlopen
-
-
-CACHEVAR = 'WAFCACHE_PACKAGE'
-
-@conf
-def get_package_cache_dir(self):
-	cache = None
-	if CACHEVAR in conf.environ:
-		cache = conf.environ[CACHEVAR]
-		cache = self.root.make_node(cache)
-	elif self.env[CACHEVAR]:
-		cache = self.env[CACHEVAR]
-		cache = self.root.make_node(cache)
-	else:
-		cache = self.srcnode.make_node('.wafcache_package')
-	cache.mkdir()
-	return cache
-
-@conf
-def download_archive(self, src, dst):
-	for x in self.env.PACKAGE_REPO:
-		url = '/'.join((x, src))
-		try:
-			web = urlopen(url)
-			try:
-				if web.getcode() != 200:
-					continue
-			except AttributeError:
-				pass
-		except Exception:
-			# on python3 urlopen throws an exception
-			# python 2.3 does not have getcode and throws an exception to fail
-			continue
-		else:
-			tmp = self.root.make_node(dst)
-			tmp.write(web.read())
-			Logs.warn('Downloaded %s from %s', tmp.abspath(), url)
-			break
-	else:
-		self.fatal('Could not get the package %s' % src)
-
-@conf
-def load_packages(self):
-	self.get_package_cache_dir()
-	# read the dependencies, get the archives, ..
-
diff --git a/waflib/extras/parallel_debug.py b/waflib/extras/parallel_debug.py
deleted file mode 100644
index 4ffec5e..0000000
--- a/waflib/extras/parallel_debug.py
+++ /dev/null
@@ -1,462 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2007-2010 (ita)
-
-"""
-Debugging helper for parallel compilation.
-
-Copy it to your project and load it with::
-
-	def options(opt):
-		opt.load('parallel_debug', tooldir='.')
-	def build(bld):
-		...
-
-The build will then output a file named pdebug.svg in the source directory.
-"""
-
-import re, sys, threading, time, traceback
-try:
-	from Queue import Queue
-except:
-	from queue import Queue
-from waflib import Runner, Options, Task, Logs, Errors
-
-SVG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0"
-   x="${project.x}" y="${project.y}" width="${project.width}" height="${project.height}" id="svg602" xml:space="preserve">
-
-<style type='text/css' media='screen'>
-	g.over rect { stroke:#FF0000; fill-opacity:0.4 }
-</style>
-
-<script type='text/javascript'><![CDATA[
-var svg  = document.getElementsByTagName('svg')[0];
-
-svg.addEventListener('mouseover', function(e) {
-	var g = e.target.parentNode;
-	var x = document.getElementById('r_' + g.id);
-	if (x) {
-		g.setAttribute('class', g.getAttribute('class') + ' over');
-		x.setAttribute('class', x.getAttribute('class') + ' over');
-		showInfo(e, g.id, e.target.attributes.tooltip.value);
-	}
-}, false);
-
-svg.addEventListener('mouseout', function(e) {
-		var g = e.target.parentNode;
-		var x = document.getElementById('r_' + g.id);
-		if (x) {
-			g.setAttribute('class', g.getAttribute('class').replace(' over', ''));
-			x.setAttribute('class', x.getAttribute('class').replace(' over', ''));
-			hideInfo(e);
-		}
-}, false);
-
-function showInfo(evt, txt, details) {
-${if project.tooltip}
-	tooltip = document.getElementById('tooltip');
-
-	var t = document.getElementById('tooltiptext');
-	t.firstChild.data = txt + " " + details;
-
-	var x = evt.clientX + 9;
-	if (x > 250) { x -= t.getComputedTextLength() + 16; }
-	var y = evt.clientY + 20;
-	tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
-	tooltip.setAttributeNS(null, "visibility", "visible");
-
-	var r = document.getElementById('tooltiprect');
-	r.setAttribute('width', t.getComputedTextLength() + 6);
-${endif}
-}
-
-function hideInfo(evt) {
-	var tooltip = document.getElementById('tooltip');
-	tooltip.setAttributeNS(null,"visibility","hidden");
-}
-]]></script>
-
-<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
-<rect
-   x='${project.x}' y='${project.y}' width='${project.width}' height='${project.height}'
-   style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"></rect>
-
-${if project.title}
-  <text x="${project.title_x}" y="${project.title_y}"
-    style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">${project.title}</text>
-${endif}
-
-
-${for cls in project.groups}
-  <g id='${cls.classname}'>
-    ${for rect in cls.rects}
-    <rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' tooltip='${rect.name}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
-    ${endfor}
-  </g>
-${endfor}
-
-${for info in project.infos}
-  <g id='r_${info.classname}'>
-   <rect x='${info.x}' y='${info.y}' width='${info.width}' height='${info.height}' style="font-size:10;fill:${info.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
-   <text x="${info.text_x}" y="${info.text_y}"
-       style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
-   >${info.text}</text>
-  </g>
-${endfor}
-
-${if project.tooltip}
-  <g transform="translate(0,0)" visibility="hidden" id="tooltip">
-       <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
-       <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
-  </g>
-${endif}
-
-</svg>
-"""
-
-COMPILE_TEMPLATE = '''def f(project):
-	lst = []
-	def xml_escape(value):
-		return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
-
-	%s
-	return ''.join(lst)
-'''
-reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
-def compile_template(line):
-
-	extr = []
-	def repl(match):
-		g = match.group
-		if g('dollar'):
-			return "$"
-		elif g('backslash'):
-			return "\\"
-		elif g('subst'):
-			extr.append(g('code'))
-			return "<<|@|>>"
-		return None
-
-	line2 = reg_act.sub(repl, line)
-	params = line2.split('<<|@|>>')
-	assert(extr)
-
-
-	indent = 0
-	buf = []
-	app = buf.append
-
-	def app(txt):
-		buf.append(indent * '\t' + txt)
-
-	for x in range(len(extr)):
-		if params[x]:
-			app("lst.append(%r)" % params[x])
-
-		f = extr[x]
-		if f.startswith(('if', 'for')):
-			app(f + ':')
-			indent += 1
-		elif f.startswith('py:'):
-			app(f[3:])
-		elif f.startswith(('endif', 'endfor')):
-			indent -= 1
-		elif f.startswith(('else', 'elif')):
-			indent -= 1
-			app(f + ':')
-			indent += 1
-		elif f.startswith('xml:'):
-			app('lst.append(xml_escape(%s))' % f[4:])
-		else:
-			#app('lst.append((%s) or "cannot find %s")' % (f, f))
-			app('lst.append(str(%s))' % f)
-
-	if extr:
-		if params[-1]:
-			app("lst.append(%r)" % params[-1])
-
-	fun = COMPILE_TEMPLATE % "\n\t".join(buf)
-	# uncomment the following to debug the template
-	#for i, x in enumerate(fun.splitlines()):
-	#	print i, x
-	return Task.funex(fun)
-
-# red   #ff4d4d
-# green #4da74d
-# lila  #a751ff
-
-color2code = {
-	'GREEN'  : '#4da74d',
-	'YELLOW' : '#fefe44',
-	'PINK'   : '#a751ff',
-	'RED'    : '#cc1d1d',
-	'BLUE'   : '#6687bb',
-	'CYAN'   : '#34e2e2',
-}
-
-mp = {}
-info = [] # list of (text,color)
-
-def map_to_color(name):
-	if name in mp:
-		return mp[name]
-	try:
-		cls = Task.classes[name]
-	except KeyError:
-		return color2code['RED']
-	if cls.color in mp:
-		return mp[cls.color]
-	if cls.color in color2code:
-		return color2code[cls.color]
-	return color2code['RED']
-
-def process(self):
-	m = self.generator.bld.producer
-	try:
-		# TODO another place for this?
-		del self.generator.bld.task_sigs[self.uid()]
-	except KeyError:
-		pass
-
-	self.generator.bld.producer.set_running(1, self)
-
-	try:
-		ret = self.run()
-	except Exception:
-		self.err_msg = traceback.format_exc()
-		self.hasrun = Task.EXCEPTION
-
-		# TODO cleanup
-		m.error_handler(self)
-		return
-
-	if ret:
-		self.err_code = ret
-		self.hasrun = Task.CRASHED
-	else:
-		try:
-			self.post_run()
-		except Errors.WafError:
-			pass
-		except Exception:
-			self.err_msg = traceback.format_exc()
-			self.hasrun = Task.EXCEPTION
-		else:
-			self.hasrun = Task.SUCCESS
-	if self.hasrun != Task.SUCCESS:
-		m.error_handler(self)
-
-	self.generator.bld.producer.set_running(-1, self)
-
-Task.Task.process_back = Task.Task.process
-Task.Task.process = process
-
-old_start = Runner.Parallel.start
-def do_start(self):
-	try:
-		Options.options.dband
-	except AttributeError:
-		self.bld.fatal('use def options(opt): opt.load("parallel_debug")!')
-
-	self.taskinfo = Queue()
-	old_start(self)
-	if self.dirty:
-		make_picture(self)
-Runner.Parallel.start = do_start
-
-lock_running = threading.Lock()
-def set_running(self, by, tsk):
-	with lock_running:
-		try:
-			cache = self.lock_cache
-		except AttributeError:
-			cache = self.lock_cache = {}
-
-		i = 0
-		if by > 0:
-			vals = cache.values()
-			for i in range(self.numjobs):
-				if i not in vals:
-					cache[tsk] = i
-					break
-		else:
-			i = cache[tsk]
-			del cache[tsk]
-
-		self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by, ",".join(map(str, tsk.outputs)))  )
-Runner.Parallel.set_running = set_running
-
-def name2class(name):
-	return name.replace(' ', '_').replace('.', '_')
-
-def make_picture(producer):
-	# first, cast the parameters
-	if not hasattr(producer.bld, 'path'):
-		return
-
-	tmp = []
-	try:
-		while True:
-			tup = producer.taskinfo.get(False)
-			tmp.append(list(tup))
-	except:
-		pass
-
-	try:
-		ini = float(tmp[0][2])
-	except:
-		return
-
-	if not info:
-		seen = []
-		for x in tmp:
-			name = x[3]
-			if not name in seen:
-				seen.append(name)
-			else:
-				continue
-
-			info.append((name, map_to_color(name)))
-		info.sort(key=lambda x: x[0])
-
-	thread_count = 0
-	acc = []
-	for x in tmp:
-		thread_count += x[6]
-		acc.append("%d %d %f %r %d %d %d %s" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count, x[7]))
-
-	data_node = producer.bld.path.make_node('pdebug.dat')
-	data_node.write('\n'.join(acc))
-
-	tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
-
-	st = {}
-	for l in tmp:
-		if not l[0] in st:
-			st[l[0]] = len(st.keys())
-	tmp = [  [st[lst[0]]] + lst[1:] for lst in tmp ]
-	THREAD_AMOUNT = len(st.keys())
-
-	st = {}
-	for l in tmp:
-		if not l[1] in st:
-			st[l[1]] = len(st.keys())
-	tmp = [  [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
-
-
-	BAND = Options.options.dband
-
-	seen = {}
-	acc = []
-	for x in range(len(tmp)):
-		line = tmp[x]
-		id = line[1]
-
-		if id in seen:
-			continue
-		seen[id] = True
-
-		begin = line[2]
-		thread_id = line[0]
-		for y in range(x + 1, len(tmp)):
-			line = tmp[y]
-			if line[1] == id:
-				end = line[2]
-				#print id, thread_id, begin, end
-				#acc.append(  ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
-				acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3], line[7]) )
-				break
-
-	if Options.options.dmaxtime < 0.1:
-		gwidth = 1
-		for x in tmp:
-			m = BAND * x[2]
-			if m > gwidth:
-				gwidth = m
-	else:
-		gwidth = BAND * Options.options.dmaxtime
-
-	ratio = float(Options.options.dwidth) / gwidth
-	gwidth = Options.options.dwidth
-	gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
-
-
-	# simple data model for our template
-	class tobject(object):
-		pass
-
-	model = tobject()
-	model.x = 0
-	model.y = 0
-	model.width = gwidth + 4
-	model.height = gheight + 4
-
-	model.tooltip = not Options.options.dnotooltip
-
-	model.title = Options.options.dtitle
-	model.title_x = gwidth / 2
-	model.title_y = gheight + - 5
-
-	groups = {}
-	for (x, y, w, h, clsname, name) in acc:
-		try:
-			groups[clsname].append((x, y, w, h, name))
-		except:
-			groups[clsname] = [(x, y, w, h, name)]
-
-	# groups of rectangles (else js highlighting is slow)
-	model.groups = []
-	for cls in groups:
-		g = tobject()
-		model.groups.append(g)
-		g.classname = name2class(cls)
-		g.rects = []
-		for (x, y, w, h, name) in groups[cls]:
-			r = tobject()
-			g.rects.append(r)
-			r.x = 2 + x * ratio
-			r.y = 2 + y
-			r.width = w * ratio
-			r.height = h
-			r.name = name
-			r.color = map_to_color(cls)
-
-	cnt = THREAD_AMOUNT
-
-	# caption
-	model.infos = []
-	for (text, color) in info:
-		inf = tobject()
-		model.infos.append(inf)
-		inf.classname = name2class(text)
-		inf.x = 2 + BAND
-		inf.y = 5 + (cnt + 0.5) * BAND
-		inf.width = BAND/2
-		inf.height = BAND/2
-		inf.color = color
-
-		inf.text = text
-		inf.text_x = 2 + 2 * BAND
-		inf.text_y = 5 + (cnt + 0.5) * BAND + 10
-
-		cnt += 1
-
-	# write the file...
-	template1 = compile_template(SVG_TEMPLATE)
-	txt = template1(model)
-
-	node = producer.bld.path.make_node('pdebug.svg')
-	node.write(txt)
-	Logs.warn('Created the diagram %r', node)
-
-def options(opt):
-	opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
-		help='title for the svg diagram', dest='dtitle')
-	opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=800, dest='dwidth')
-	opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
-	opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
-	opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
-	opt.add_option('--dnotooltip', action='store_true', help='disable tooltips', default=False, dest='dnotooltip')
-
diff --git a/waflib/extras/pch.py b/waflib/extras/pch.py
deleted file mode 100644
index 103e752..0000000
--- a/waflib/extras/pch.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Alexander Afanasyev (UCLA), 2014
-
-"""
-Enable precompiled C++ header support (currently only clang++ and g++ are supported)
-
-To use this tool, wscript should look like:
-
-	def options(opt):
-		opt.load('pch')
-		# This will add `--with-pch` configure option.
-		# Unless --with-pch during configure stage specified, the precompiled header support is disabled
-
-	def configure(conf):
-		conf.load('pch')
-		# this will set conf.env.WITH_PCH if --with-pch is specified and the supported compiler is used
-		# Unless conf.env.WITH_PCH is set, the precompiled header support is disabled
-
-	def build(bld):
-		bld(features='cxx pch',
-			target='precompiled-headers',
-			name='precompiled-headers',
-			headers='a.h b.h c.h', # headers to pre-compile into `precompiled-headers`
-
-			# Other parameters to compile precompiled headers
-			# includes=...,
-			# export_includes=...,
-			# use=...,
-			# ...
-
-			# Exported parameters will be propagated even if precompiled headers are disabled
-		)
-
-		bld(
-			target='test',
-			features='cxx cxxprogram',
-			source='a.cpp b.cpp d.cpp main.cpp',
-			use='precompiled-headers',
-		)
-
-		# or
-
-		bld(
-			target='test',
-			features='pch cxx cxxprogram',
-			source='a.cpp b.cpp d.cpp main.cpp',
-			headers='a.h b.h c.h',
-		)
-
-Note that precompiled header must have multiple inclusion guards.  If the guards are missing, any benefit of precompiled header will be voided and compilation may fail in some cases.
-"""
-
-import os
-from waflib import Task, TaskGen, Utils
-from waflib.Tools import c_preproc, cxx
-
-
-PCH_COMPILER_OPTIONS = {
-	'clang++': [['-include'], '.pch', ['-x', 'c++-header']],
-	'g++':     [['-include'], '.gch', ['-x', 'c++-header']],
-}
-
-
-def options(opt):
-	opt.add_option('--without-pch', action='store_false', default=True, dest='with_pch', help='''Try to use precompiled header to speed up compilation (only g++ and clang++)''')
-
-def configure(conf):
-	if (conf.options.with_pch and conf.env['COMPILER_CXX'] in PCH_COMPILER_OPTIONS.keys()):
-		conf.env.WITH_PCH = True
-		flags = PCH_COMPILER_OPTIONS[conf.env['COMPILER_CXX']]
-		conf.env.CXXPCH_F = flags[0]
-		conf.env.CXXPCH_EXT = flags[1]
-		conf.env.CXXPCH_FLAGS = flags[2]
-
-
-@TaskGen.feature('pch')
-@TaskGen.before('process_source')
-def apply_pch(self):
-	if not self.env.WITH_PCH:
-		return
-
-	if getattr(self.bld, 'pch_tasks', None) is None:
-		self.bld.pch_tasks = {}
-
-	if getattr(self, 'headers', None) is None:
-		return
-
-	self.headers = self.to_nodes(self.headers)
-
-	if getattr(self, 'name', None):
-		try:
-			task = self.bld.pch_tasks["%s.%s" % (self.name, self.idx)]
-			self.bld.fatal("Duplicated 'pch' task with name %r" % "%s.%s" % (self.name, self.idx))
-		except KeyError:
-			pass
-
-	out = '%s.%d%s' % (self.target, self.idx, self.env['CXXPCH_EXT'])
-	out = self.path.find_or_declare(out)
-	task = self.create_task('gchx', self.headers, out)
-
-	# target should be an absolute path of `out`, but without precompiled header extension
-	task.target = out.abspath()[:-len(out.suffix())]
-
-	self.pch_task = task
-	if getattr(self, 'name', None):
-		self.bld.pch_tasks["%s.%s" % (self.name, self.idx)] = task
-
-@TaskGen.feature('cxx')
-@TaskGen.after_method('process_source', 'propagate_uselib_vars')
-def add_pch(self):
-	if not (self.env['WITH_PCH'] and getattr(self, 'use', None) and getattr(self, 'compiled_tasks', None) and getattr(self.bld, 'pch_tasks', None)):
-		return
-
-	pch = None
-	# find pch task, if any
-
-	if getattr(self, 'pch_task', None):
-		pch = self.pch_task
-	else:
-		for use in Utils.to_list(self.use):
-			try:
-				pch = self.bld.pch_tasks[use]
-			except KeyError:
-				pass
-
-	if pch:
-		for x in self.compiled_tasks:
-			x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target])
-
-class gchx(Task.Task):
-	run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
-	scan    = c_preproc.scan
-	color   = 'BLUE'
-	ext_out=['.h']
-
-	def runnable_status(self):
-		try:
-			node_deps = self.generator.bld.node_deps[self.uid()]
-		except KeyError:
-			node_deps = []
-		ret = Task.Task.runnable_status(self)
-		if ret == Task.SKIP_ME and self.env.CXX_NAME == 'clang':
-			t = os.stat(self.outputs[0].abspath()).st_mtime
-			for n in self.inputs + node_deps:
-				if os.stat(n.abspath()).st_mtime > t:
-					return Task.RUN_ME
-		return ret
diff --git a/waflib/extras/pep8.py b/waflib/extras/pep8.py
deleted file mode 100644
index 676beed..0000000
--- a/waflib/extras/pep8.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-#
-# written by Sylvain Rouquette, 2011
-
-'''
-Install pep8 module:
-$ easy_install pep8
-	or
-$ pip install pep8
-
-To add the pep8 tool to the waf file:
-$ ./waf-light --tools=compat15,pep8
-	or, if you have waf >= 1.6.2
-$ ./waf update --files=pep8
-
-
-Then add this to your wscript:
-
-[at]extension('.py', 'wscript')
-def run_pep8(self, node):
-	self.create_task('Pep8', node)
-
-'''
-
-import threading
-from waflib import Task, Options
-
-pep8 = __import__('pep8')
-
-
-class Pep8(Task.Task):
-	color = 'PINK'
-	lock = threading.Lock()
-
-	def check_options(self):
-		if pep8.options:
-			return
-		pep8.options = Options.options
-		pep8.options.prog = 'pep8'
-		excl = pep8.options.exclude.split(',')
-		pep8.options.exclude = [s.rstrip('/') for s in excl]
-		if pep8.options.filename:
-			pep8.options.filename = pep8.options.filename.split(',')
-		if pep8.options.select:
-			pep8.options.select = pep8.options.select.split(',')
-		else:
-			pep8.options.select = []
-		if pep8.options.ignore:
-			pep8.options.ignore = pep8.options.ignore.split(',')
-		elif pep8.options.select:
-			# Ignore all checks which are not explicitly selected
-			pep8.options.ignore = ['']
-		elif pep8.options.testsuite or pep8.options.doctest:
-			# For doctest and testsuite, all checks are required
-			pep8.options.ignore = []
-		else:
-			# The default choice: ignore controversial checks
-			pep8.options.ignore = pep8.DEFAULT_IGNORE.split(',')
-		pep8.options.physical_checks = pep8.find_checks('physical_line')
-		pep8.options.logical_checks = pep8.find_checks('logical_line')
-		pep8.options.counters = dict.fromkeys(pep8.BENCHMARK_KEYS, 0)
-		pep8.options.messages = {}
-
-	def run(self):
-		with Pep8.lock:
-			self.check_options()
-		pep8.input_file(self.inputs[0].abspath())
-		return 0 if not pep8.get_count() else -1
-
-
-def options(opt):
-	opt.add_option('-q', '--quiet', default=0, action='count',
-				   help="report only file names, or nothing with -qq")
-	opt.add_option('-r', '--repeat', action='store_true',
-				   help="show all occurrences of the same error")
-	opt.add_option('--exclude', metavar='patterns',
-				   default=pep8.DEFAULT_EXCLUDE,
-				   help="exclude files or directories which match these "
-				   "comma separated patterns (default: %s)" %
-				   pep8.DEFAULT_EXCLUDE,
-				   dest='exclude')
-	opt.add_option('--filename', metavar='patterns', default='*.py',
-				   help="when parsing directories, only check filenames "
-				   "matching these comma separated patterns (default: "
-				   "*.py)")
-	opt.add_option('--select', metavar='errors', default='',
-				   help="select errors and warnings (e.g. E,W6)")
-	opt.add_option('--ignore', metavar='errors', default='',
-				   help="skip errors and warnings (e.g. E4,W)")
-	opt.add_option('--show-source', action='store_true',
-				   help="show source code for each error")
-	opt.add_option('--show-pep8', action='store_true',
-				   help="show text of PEP 8 for each error")
-	opt.add_option('--statistics', action='store_true',
-				   help="count errors and warnings")
-	opt.add_option('--count', action='store_true',
-				   help="print total number of errors and warnings "
-				   "to standard error and set exit code to 1 if "
-				   "total is not null")
-	opt.add_option('--benchmark', action='store_true',
-				   help="measure processing speed")
-	opt.add_option('--testsuite', metavar='dir',
-				   help="run regression tests from dir")
-	opt.add_option('--doctest', action='store_true',
-				   help="run doctest on myself")
diff --git a/waflib/extras/pgicc.py b/waflib/extras/pgicc.py
deleted file mode 100644
index f8068d5..0000000
--- a/waflib/extras/pgicc.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Antoine Dechaume 2011
-
-"""
-Detect the PGI C compiler
-"""
-
-import sys, re
-from waflib import Errors
-from waflib.Configure import conf
-from waflib.Tools.compiler_c import c_compiler
-c_compiler['linux'].append('pgicc')
-
-@conf
-def find_pgi_compiler(conf, var, name):
-	"""
-	Find the program name, and execute it to ensure it really is itself.
-	"""
-	if sys.platform == 'cygwin':
-		conf.fatal('The PGI compiler does not work on Cygwin')
-
-	v = conf.env
-	cc = None
-	if v[var]:
-		cc = v[var]
-	elif var in conf.environ:
-		cc = conf.environ[var]
-	if not cc:
-		cc = conf.find_program(name, var=var)
-	if not cc:
-		conf.fatal('PGI Compiler (%s) was not found' % name)
-
-	v[var + '_VERSION'] = conf.get_pgi_version(cc)
-	v[var] = cc
-	v[var + '_NAME'] = 'pgi'
-
-@conf
-def get_pgi_version(conf, cc):
-	"""Find the version of a pgi compiler."""
-	version_re = re.compile(r"The Portland Group", re.I).search
-	cmd = cc + ['-V', '-E'] # Issue 1078, prevent wrappers from linking
-
-	try:
-		out, err = conf.cmd_and_log(cmd, output=0)
-	except Errors.WafError:
-		conf.fatal('Could not find pgi compiler %r' % cmd)
-
-	if out:
-		match = version_re(out)
-	else:
-		match = version_re(err)
-
-	if not match:
-		conf.fatal('Could not verify PGI signature')
-
-	cmd = cc + ['-help=variable']
-	try:
-		out, err = conf.cmd_and_log(cmd, output=0)
-	except Errors.WafError:
-		conf.fatal('Could not find pgi compiler %r' % cmd)
-
-	version = re.findall(r'^COMPVER\s*=(.*)', out, re.M)
-	if len(version) != 1:
-		conf.fatal('Could not determine the compiler version')
-	return version[0]
-
-def configure(conf):
-	conf.find_pgi_compiler('CC', 'pgcc')
-	conf.find_ar()
-	conf.gcc_common_flags()
-	conf.cc_load_tools()
-	conf.cc_add_flags()
-	conf.link_add_flags()
-
diff --git a/waflib/extras/pgicxx.py b/waflib/extras/pgicxx.py
deleted file mode 100644
index eae121c..0000000
--- a/waflib/extras/pgicxx.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Antoine Dechaume 2011
-
-"""
-Detect the PGI C++ compiler
-"""
-
-from waflib.Tools.compiler_cxx import cxx_compiler
-cxx_compiler['linux'].append('pgicxx')
-
-from waflib.extras import pgicc
-
-def configure(conf):
-	conf.find_pgi_compiler('CXX', 'pgCC')
-	conf.find_ar()
-	conf.gxx_common_flags()
-	conf.cxx_load_tools()
-	conf.cxx_add_flags()
-	conf.link_add_flags()
diff --git a/waflib/extras/proc.py b/waflib/extras/proc.py
deleted file mode 100644
index 764abec..0000000
--- a/waflib/extras/proc.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#! /usr/bin/env python
-# per rosengren 2011
-
-from os import environ, path
-from waflib import TaskGen, Utils
-
-def options(opt):
-	grp = opt.add_option_group('Oracle ProC Options')
-	grp.add_option('--oracle_home', action='store', default=environ.get('PROC_ORACLE'), help='Path to Oracle installation home (has bin/lib)')
-	grp.add_option('--tns_admin', action='store', default=environ.get('TNS_ADMIN'), help='Directory containing server list (TNS_NAMES.ORA)')
-	grp.add_option('--connection', action='store', default='dummy-user/dummy-password@dummy-server', help='Format: user/password@server')
-
-def configure(cnf):
-	env = cnf.env
-	if not env.PROC_ORACLE:
-		env.PROC_ORACLE = cnf.options.oracle_home
-	if not env.PROC_TNS_ADMIN:
-		env.PROC_TNS_ADMIN = cnf.options.tns_admin
-	if not env.PROC_CONNECTION:
-		env.PROC_CONNECTION = cnf.options.connection
-	cnf.find_program('proc', var='PROC', path_list=env.PROC_ORACLE + path.sep + 'bin')
-
-def proc(tsk):
-	env = tsk.env
-	gen = tsk.generator
-	inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES'])
-
-	cmd = (
-		[env.PROC] +
-		['SQLCHECK=SEMANTICS'] +
-		(['SYS_INCLUDE=(' + ','.join(env.PROC_INCLUDES) + ')']
-			if env.PROC_INCLUDES else []) +
-		['INCLUDE=(' + ','.join(
-			[i.bldpath() for i in inc_nodes]
-		) + ')'] +
-		['userid=' + env.PROC_CONNECTION] +
-		['INAME=' + tsk.inputs[0].bldpath()] +
-		['ONAME=' + tsk.outputs[0].bldpath()]
-	)
-	exec_env = {
-		'ORACLE_HOME': env.PROC_ORACLE,
-		'LD_LIBRARY_PATH': env.PROC_ORACLE + path.sep + 'lib',
-	}
-	if env.PROC_TNS_ADMIN:
-		exec_env['TNS_ADMIN'] = env.PROC_TNS_ADMIN
-	return tsk.exec_command(cmd, env=exec_env)
-
-TaskGen.declare_chain(
-	name = 'proc',
-	rule = proc,
-	ext_in = '.pc',
-	ext_out = '.c',
-)
-
diff --git a/waflib/extras/protoc.py b/waflib/extras/protoc.py
deleted file mode 100644
index 839c510..0000000
--- a/waflib/extras/protoc.py
+++ /dev/null
@@ -1,223 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Philipp Bender, 2012
-# Matt Clarkson, 2012
-
-import re, os
-from waflib.Task import Task
-from waflib.TaskGen import extension
-from waflib import Errors, Context, Logs
-
-"""
-A simple tool to integrate protocol buffers into your build system.
-
-Example for C++:
-
-    def configure(conf):
-        conf.load('compiler_cxx cxx protoc')
-
-    def build(bld):
-        bld(
-                features = 'cxx cxxprogram'
-                source   = 'main.cpp file1.proto proto/file2.proto',
-                includes = '. proto',
-                target   = 'executable')
-
-Example for Python:
-
-    def configure(conf):
-        conf.load('python protoc')
-
-    def build(bld):
-        bld(
-                features = 'py'
-                source   = 'main.py file1.proto proto/file2.proto',
-                protoc_includes  = 'proto')
-
-Example for both Python and C++ at same time:
-
-    def configure(conf):
-        conf.load('cxx python protoc')
-
-    def build(bld):
-        bld(
-                features = 'cxx py'
-                source   = 'file1.proto proto/file2.proto',
-                protoc_includes  = 'proto')	# or includes
-
-
-Example for Java:
-
-    def options(opt):
-        opt.load('java')
-
-    def configure(conf):
-        conf.load('python java protoc')
-        # Here you have to point to your protobuf-java JAR and have it in classpath
-        conf.env.CLASSPATH_PROTOBUF = ['protobuf-java-2.5.0.jar']
-
-    def build(bld):
-        bld(
-                features = 'javac protoc',
-                name = 'pbjava',
-                srcdir = 'inc/ src',	# directories used by javac
-                source   = ['inc/message_inc.proto', 'inc/message.proto'],
-					# source is used by protoc for .proto files
-                use = 'PROTOBUF',
-                protoc_includes = ['inc']) # for protoc to search dependencies
-
-
-Protoc includes passed via protoc_includes are either relative to the taskgen
-or to the project and are searched in this order.
-
-Include directories external to the waf project can also be passed to the
-extra by using protoc_extincludes
-
-                protoc_extincludes = ['/usr/include/pblib']
-
-
-Notes when using this tool:
-
-- protoc command line parsing is tricky.
-
-  The generated files can be put in subfolders which depend on
-  the order of the include paths.
-
-  Try to be simple when creating task generators
-  containing protoc stuff.
-
-"""
-
-class protoc(Task):
-	run_str = '${PROTOC} ${PROTOC_FL:PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${PROTOC_ST:PROTOC_INCPATHS} ${PROTOC_ST:PROTOC_EXTINCPATHS} ${SRC[0].bldpath()}'
-	color   = 'BLUE'
-	ext_out = ['.h', 'pb.cc', '.py', '.java']
-	def scan(self):
-		"""
-		Scan .proto dependencies
-		"""
-		node = self.inputs[0]
-
-		nodes = []
-		names = []
-		seen = []
-		search_nodes = []
-
-		if not node:
-			return (nodes, names)
-
-		if 'cxx' in self.generator.features:
-			search_nodes = self.generator.includes_nodes
-
-		if 'py' in self.generator.features or 'javac' in self.generator.features:
-			for incpath in getattr(self.generator, 'protoc_includes', []):
-				incpath_node = self.generator.path.find_node(incpath)
-				if incpath_node:
-					search_nodes.append(incpath_node)
-				else:
-					# Check if relative to top-level for extra tg dependencies
-					incpath_node = self.generator.bld.path.find_node(incpath)
-					if incpath_node:
-						search_nodes.append(incpath_node)
-					else:
-						raise Errors.WafError('protoc: include path %r does not exist' % incpath)
-
-
-		def parse_node(node):
-			if node in seen:
-				return
-			seen.append(node)
-			code = node.read().splitlines()
-			for line in code:
-				m = re.search(r'^import\s+"(.*)";.*(//)?.*', line)
-				if m:
-					dep = m.groups()[0]
-					for incnode in search_nodes:
-						found = incnode.find_resource(dep)
-						if found:
-							nodes.append(found)
-							parse_node(found)
-						else:
-							names.append(dep)
-
-		parse_node(node)
-		# Add also dependencies path to INCPATHS so protoc will find the included file
-		for deppath in nodes:
-			self.env.append_unique('INCPATHS', deppath.parent.bldpath())
-		return (nodes, names)
-
-@extension('.proto')
-def process_protoc(self, node):
-	incdirs = []
-	out_nodes = []
-	protoc_flags = []
-
-	# ensure PROTOC_FLAGS is a list; a copy is used below anyway
-	self.env.PROTOC_FLAGS = self.to_list(self.env.PROTOC_FLAGS)
-
-	if 'cxx' in self.features:
-		cpp_node = node.change_ext('.pb.cc')
-		hpp_node = node.change_ext('.pb.h')
-		self.source.append(cpp_node)
-		out_nodes.append(cpp_node)
-		out_nodes.append(hpp_node)
-		protoc_flags.append('--cpp_out=%s' % node.parent.get_bld().bldpath())
-
-	if 'py' in self.features:
-		py_node = node.change_ext('_pb2.py')
-		self.source.append(py_node)
-		out_nodes.append(py_node)
-		protoc_flags.append('--python_out=%s' % node.parent.get_bld().bldpath())
-
-	if 'javac' in self.features:
-		# Make javac get also pick java code generated in build
-		if not node.parent.get_bld() in self.javac_task.srcdir:
-			self.javac_task.srcdir.append(node.parent.get_bld())
-
-		protoc_flags.append('--java_out=%s' % node.parent.get_bld().bldpath())
-
-	tsk = self.create_task('protoc', node, out_nodes)
-	tsk.env.append_value('PROTOC_FLAGS', protoc_flags)
-
-	if 'javac' in self.features:
-		self.javac_task.set_run_after(tsk)
-
-	# Instruct protoc where to search for .proto included files.
-	# For C++ standard include files dirs are used,
-	# but this doesn't apply to Python for example
-	for incpath in getattr(self, 'protoc_includes', []):
-		incpath_node = self.path.find_node(incpath)
-		if incpath_node:
-			incdirs.append(incpath_node.bldpath())
-		else:
-			# Check if relative to top-level for extra tg dependencies
-			incpath_node = self.bld.path.find_node(incpath)
-			if incpath_node:
-				incdirs.append(incpath_node.bldpath())
-			else:
-				raise Errors.WafError('protoc: include path %r does not exist' % incpath)
-
-	tsk.env.PROTOC_INCPATHS = incdirs
-
-	# Include paths external to the waf project (ie. shared pb repositories)
-	tsk.env.PROTOC_EXTINCPATHS = getattr(self, 'protoc_extincludes', [])
-
-	# PR2115: protoc generates output of .proto files in nested
-	# directories  by canonicalizing paths. To avoid this we have to pass
-	# as first include the full directory file of the .proto file
-	tsk.env.prepend_value('INCPATHS', node.parent.bldpath())
-
-	use = getattr(self, 'use', '')
-	if not 'PROTOBUF' in use:
-		self.use = self.to_list(use) + ['PROTOBUF']
-
-def configure(conf):
-	conf.check_cfg(package='protobuf', uselib_store='PROTOBUF', args=['--cflags', '--libs'])
-	conf.find_program('protoc', var='PROTOC')
-	conf.start_msg('Checking for protoc version')
-	protocver = conf.cmd_and_log(conf.env.PROTOC + ['--version'], output=Context.BOTH)
-	protocver = ''.join(protocver).strip()[protocver[0].rfind(' ')+1:]
-	conf.end_msg(protocver)
-	conf.env.PROTOC_MAJOR = protocver[:protocver.find('.')]
-	conf.env.PROTOC_ST = '-I%s'
-	conf.env.PROTOC_FL = '%s'
diff --git a/waflib/extras/pyqt5.py b/waflib/extras/pyqt5.py
deleted file mode 100644
index 80f43b8..0000000
--- a/waflib/extras/pyqt5.py
+++ /dev/null
@@ -1,241 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Federico Pellegrin, 2016-2018 (fedepell) adapted for Python
-
-"""
-This tool helps with finding Python Qt5 tools and libraries,
-and provides translation from QT5 files to Python code.
-
-The following snippet illustrates the tool usage::
-
-	def options(opt):
-		opt.load('py pyqt5')
-
-	def configure(conf):
-		conf.load('py pyqt5')
-
-	def build(bld):
-		bld(
-			features = 'py pyqt5',
-			source   = 'main.py textures.qrc aboutDialog.ui',
-		)
-
-Here, the UI description and resource files will be processed
-to generate code.
-
-Usage
-=====
-
-Load the "pyqt5" tool.
-
-Add into the sources list also the qrc resources files or ui5
-definition files and they will be translated into python code
-with the system tools (PyQt5, pyside2, PyQt4 are searched in this
-order) and then compiled
-"""
-
-try:
-	from xml.sax import make_parser
-	from xml.sax.handler import ContentHandler
-except ImportError:
-	has_xml = False
-	ContentHandler = object
-else:
-	has_xml = True
-
-import os
-from waflib.Tools import python
-from waflib import Task, Options
-from waflib.TaskGen import feature, extension
-from waflib.Configure import conf
-from waflib import Logs
-
-EXT_RCC = ['.qrc']
-"""
-File extension for the resource (.qrc) files
-"""
-
-EXT_UI  = ['.ui']
-"""
-File extension for the user interface (.ui) files
-"""
-
-
-class XMLHandler(ContentHandler):
-	"""
-	Parses ``.qrc`` files
-	"""
-	def __init__(self):
-		self.buf = []
-		self.files = []
-	def startElement(self, name, attrs):
-		if name == 'file':
-			self.buf = []
-	def endElement(self, name):
-		if name == 'file':
-			self.files.append(str(''.join(self.buf)))
-	def characters(self, cars):
-		self.buf.append(cars)
-
-@extension(*EXT_RCC)
-def create_pyrcc_task(self, node):
-	"Creates rcc and py task for ``.qrc`` files"
-	rcnode = node.change_ext('.py')
-	self.create_task('pyrcc', node, rcnode)
-	if getattr(self, 'install_from', None):
-		self.install_from = self.install_from.get_bld()
-	else:
-		self.install_from = self.path.get_bld()
-	self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
-	self.process_py(rcnode)
-
-@extension(*EXT_UI)
-def create_pyuic_task(self, node):
-	"Create uic tasks and py for user interface ``.ui`` definition files"
-	uinode = node.change_ext('.py')
-	self.create_task('ui5py', node, uinode)
-	if getattr(self, 'install_from', None):
-		self.install_from = self.install_from.get_bld()
-	else:
-		self.install_from = self.path.get_bld()
-	self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
-	self.process_py(uinode)
-
-@extension('.ts')
-def add_pylang(self, node):
-	"""Adds all the .ts file into ``self.lang``"""
-	self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
-
-@feature('pyqt5')
-def apply_pyqt5(self):
-	"""
-	The additional parameters are:
-
-	:param lang: list of translation files (\\*.ts) to process
-	:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
-	:param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file
-	:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
-	"""
-	if getattr(self, 'lang', None):
-		qmtasks = []
-		for x in self.to_list(self.lang):
-			if isinstance(x, str):
-				x = self.path.find_resource(x + '.ts')
-			qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
-
-
-		if getattr(self, 'langname', None):
-			qmnodes = [k.outputs[0] for k in qmtasks]
-			rcnode = self.langname
-			if isinstance(rcnode, str):
-				rcnode = self.path.find_or_declare(rcnode + '.qrc')
-			t = self.create_task('qm2rcc', qmnodes, rcnode)
-			create_pyrcc_task(self, t.outputs[0])
-
-class pyrcc(Task.Task):
-	"""
-	Processes ``.qrc`` files
-	"""
-	color   = 'BLUE'
-	run_str = '${QT_PYRCC} ${SRC} -o ${TGT}'
-	ext_out = ['.py']
-
-	def rcname(self):
-		return os.path.splitext(self.inputs[0].name)[0]
-
-	def scan(self):
-		"""Parse the *.qrc* files"""
-		if not has_xml:
-			Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
-			return ([], [])
-
-		parser = make_parser()
-		curHandler = XMLHandler()
-		parser.setContentHandler(curHandler)
-		fi = open(self.inputs[0].abspath(), 'r')
-		try:
-			parser.parse(fi)
-		finally:
-			fi.close()
-
-		nodes = []
-		names = []
-		root = self.inputs[0].parent
-		for x in curHandler.files:
-			nd = root.find_resource(x)
-			if nd:
-				nodes.append(nd)
-			else:
-				names.append(x)
-		return (nodes, names)
-
-
-class ui5py(Task.Task):
-	"""
-	Processes ``.ui`` files for python
-	"""
-	color   = 'BLUE'
-	run_str = '${QT_PYUIC} ${SRC} -o ${TGT}'
-	ext_out = ['.py']
-
-class ts2qm(Task.Task):
-	"""
-	Generates ``.qm`` files from ``.ts`` files
-	"""
-	color   = 'BLUE'
-	run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
-
-class qm2rcc(Task.Task):
-	"""
-	Generates ``.qrc`` files from ``.qm`` files
-	"""
-	color = 'BLUE'
-	after = 'ts2qm'
-	def run(self):
-		"""Create a qrc file including the inputs"""
-		txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
-		code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
-		self.outputs[0].write(code)
-
-def configure(self):
-	self.find_pyqt5_binaries()
-
-	# warn about this during the configuration too
-	if not has_xml:
-		Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
-
-@conf
-def find_pyqt5_binaries(self):
-	"""
-	Detects PyQt5 or pyside2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc
-	"""
-	env = self.env
-
-	if getattr(Options.options, 'want_pyside2', True):
-		self.find_program(['pyside2-uic'], var='QT_PYUIC')
-		self.find_program(['pyside2-rcc'], var='QT_PYRCC')
-		self.find_program(['pyside2-lupdate'], var='QT_PYLUPDATE')
-	elif getattr(Options.options, 'want_pyqt4', True):
-		self.find_program(['pyuic4'], var='QT_PYUIC')
-		self.find_program(['pyrcc4'], var='QT_PYRCC')
-		self.find_program(['pylupdate4'], var='QT_PYLUPDATE')
-	else:
-		self.find_program(['pyuic5','pyside2-uic','pyuic4'], var='QT_PYUIC')
-		self.find_program(['pyrcc5','pyside2-rcc','pyrcc4'], var='QT_PYRCC')
-		self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4'], var='QT_PYLUPDATE')
-
-	if not env.QT_PYUIC:
-		self.fatal('cannot find the uic compiler for python for qt5')
-
-	if not env.QT_PYUIC:
-		self.fatal('cannot find the rcc compiler for python for qt5')
-
-	self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE')
-
-def options(opt):
-	"""
-	Command-line options
-	"""
-	pyqt5opt=opt.add_option_group("Python QT5 Options")
-	pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use pyside2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after)')
-	pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
diff --git a/waflib/extras/pytest.py b/waflib/extras/pytest.py
deleted file mode 100644
index 7dd5a1a..0000000
--- a/waflib/extras/pytest.py
+++ /dev/null
@@ -1,225 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Calle Rosenquist, 2016-2018 (xbreak)
-
-"""
-Provides Python unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
-task via the **pytest** feature.
-
-To use pytest the following is needed:
-
-1. Load `pytest` and the dependency `waf_unit_test` tools.
-2. Create a task generator with feature `pytest` (not `test`) and customize behaviour with
-   the following attributes:
-
-   - `pytest_source`: Test input files.
-   - `ut_str`: Test runner command, e.g. ``${PYTHON} -B -m unittest discover`` or
-               if nose is used: ``${NOSETESTS} --no-byte-compile ${SRC}``.
-   - `ut_shell`: Determines if ``ut_str`` is executed in a shell. Default: False.
-   - `ut_cwd`: Working directory for test runner. Defaults to directory of
-               first ``pytest_source`` file.
-
-   Additionally the following `pytest` specific attributes are used in dependent taskgens:
-
-   - `pytest_path`: Node or string list of additional Python paths.
-   - `pytest_libpath`: Node or string list of additional library paths.
-
-The `use` dependencies are used for both update calculation and to populate
-the following environment variables for the `pytest` test runner:
-
-1. `PYTHONPATH` (`sys.path`) of any dependent taskgen that has the feature `py`:
-
-   - `install_from` attribute is used to determine where the root of the Python sources
-      are located. If `install_from` is not specified the default is to use the taskgen path
-      as the root.
-
-   - `pytest_path` attribute is used to manually specify additional Python paths.
-
-2. Dynamic linker search path variable (e.g. `LD_LIBRARY_PATH`) of any dependent taskgen with
-   non-static link_task.
-
-   - `pytest_libpath` attribute is used to manually specify additional linker paths.
-
-Note: `pytest` cannot automatically determine the correct `PYTHONPATH` for `pyext` taskgens
-      because the extension might be part of a Python package or used standalone:
-
-      - When used as part of another `py` package, the `PYTHONPATH` is provided by
-      that taskgen so no additional action is required.
-
-      - When used as a standalone module, the user needs to specify the `PYTHONPATH` explicitly
-      via the `pytest_path` attribute on the `pyext` taskgen.
-
-      For details c.f. the pytest playground examples.
-
-
-For example::
-
-    # A standalone Python C extension that demonstrates unit test environment population
-    # of PYTHONPATH and LD_LIBRARY_PATH/PATH/DYLD_LIBRARY_PATH.
-    #
-    # Note: `pytest_path` is provided here because pytest cannot automatically determine
-    # if the extension is part of another Python package or is used standalone.
-    bld(name         = 'foo_ext',
-        features     = 'c cshlib pyext',
-        source       = 'src/foo_ext.c',
-        target       = 'foo_ext',
-        pytest_path  = [ bld.path.get_bld() ])
-
-    # Python package under test that also depend on the Python module `foo_ext`
-    #
-    # Note: `install_from` is added automatically to `PYTHONPATH`.
-    bld(name         = 'foo',
-        features     = 'py',
-        use          = 'foo_ext',
-        source       = bld.path.ant_glob('src/foo/*.py'),
-        install_from = 'src')
-
-    # Unit test example using the built in module unittest and let that discover
-    # any test cases.
-    bld(name          = 'foo_test',
-        features      = 'pytest',
-        use           = 'foo',
-        pytest_source = bld.path.ant_glob('test/*.py'),
-        ut_str        = '${PYTHON} -B -m unittest discover')
-
-"""
-
-import os
-from waflib import Task, TaskGen, Errors, Utils, Logs
-from waflib.Tools import ccroot
-
-def _process_use_rec(self, name):
-	"""
-	Recursively process ``use`` for task generator with name ``name``..
-	Used by pytest_process_use.
-	"""
-	if name in self.pytest_use_not or name in self.pytest_use_seen:
-		return
-	try:
-		tg = self.bld.get_tgen_by_name(name)
-	except Errors.WafError:
-		self.pytest_use_not.add(name)
-		return
-
-	self.pytest_use_seen.append(name)
-	tg.post()
-
-	for n in self.to_list(getattr(tg, 'use', [])):
-		_process_use_rec(self, n)
-
-
-@TaskGen.feature('pytest')
-@TaskGen.after_method('process_source', 'apply_link')
-def pytest_process_use(self):
-	"""
-	Process the ``use`` attribute which contains a list of task generator names and store
-	paths that later is used to populate the unit test runtime environment.
-	"""
-	self.pytest_use_not = set()
-	self.pytest_use_seen = []
-	self.pytest_paths = [] # strings or Nodes
-	self.pytest_libpaths = [] # strings or Nodes
-	self.pytest_dep_nodes = []
-
-	names = self.to_list(getattr(self, 'use', []))
-	for name in names:
-		_process_use_rec(self, name)
-	
-	def extend_unique(lst, varlst):
-		ext = []
-		for x in varlst:
-			if x not in lst:
-				ext.append(x)
-		lst.extend(ext)
-
-	# Collect type specific info needed to construct a valid runtime environment
-	# for the test.
-	for name in self.pytest_use_seen:
-		tg = self.bld.get_tgen_by_name(name)
-
-		extend_unique(self.pytest_paths, Utils.to_list(getattr(tg, 'pytest_path', [])))
-		extend_unique(self.pytest_libpaths, Utils.to_list(getattr(tg, 'pytest_libpath', [])))
-
-		if 'py' in tg.features:
-			# Python dependencies are added to PYTHONPATH
-			pypath = getattr(tg, 'install_from', tg.path)
-
-			if 'buildcopy' in tg.features:
-				# Since buildcopy is used we assume that PYTHONPATH in build should be used,
-				# not source
-				extend_unique(self.pytest_paths, [pypath.get_bld().abspath()])
-
-				# Add buildcopy output nodes to dependencies
-				extend_unique(self.pytest_dep_nodes, [o for task in getattr(tg, 'tasks', []) \
-														for o in getattr(task, 'outputs', [])])
-			else:
-				# If buildcopy is not used, depend on sources instead
-				extend_unique(self.pytest_dep_nodes, tg.source)
-				extend_unique(self.pytest_paths, [pypath.abspath()])
-
-		if getattr(tg, 'link_task', None):
-			# For tasks with a link_task (C, C++, D et.c.) include their library paths:
-			if not isinstance(tg.link_task, ccroot.stlink_task):
-				extend_unique(self.pytest_dep_nodes, tg.link_task.outputs)
-				extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH)
-
-				if 'pyext' in tg.features:
-					# If the taskgen is extending Python we also want to add the interpreter libpath.
-					extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH_PYEXT)
-				else:
-					# Only add to libpath if the link task is not a Python extension
-					extend_unique(self.pytest_libpaths, [tg.link_task.outputs[0].parent.abspath()])
-
-
-@TaskGen.feature('pytest')
-@TaskGen.after_method('pytest_process_use')
-def make_pytest(self):
-	"""
-	Creates a ``utest`` task with a populated environment for Python if not specified in ``ut_env``:
-
-	- Paths in `pytest_paths` attribute are used to populate PYTHONPATH
-	- Paths in `pytest_libpaths` attribute are used to populate the system library path (e.g. LD_LIBRARY_PATH)
-	"""
-	nodes = self.to_nodes(self.pytest_source)
-	tsk = self.create_task('utest', nodes)
-	
-	tsk.dep_nodes.extend(self.pytest_dep_nodes)
-	if getattr(self, 'ut_str', None):
-		self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
-		tsk.vars = lst + tsk.vars
-
-	if getattr(self, 'ut_cwd', None):
-		if isinstance(self.ut_cwd, str):
-			# we want a Node instance
-			if os.path.isabs(self.ut_cwd):
-				self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
-			else:
-				self.ut_cwd = self.path.make_node(self.ut_cwd)
-	else:
-		if tsk.inputs:
-			self.ut_cwd = tsk.inputs[0].parent
-		else:
-			raise Errors.WafError("no valid input files for pytest task, check pytest_source value")
-
-	if not self.ut_cwd.exists():
-		self.ut_cwd.mkdir()
-
-	if not hasattr(self, 'ut_env'):
-		self.ut_env = dict(os.environ)
-		def add_paths(var, lst):
-			# Add list of paths to a variable, lst can contain strings or nodes
-			lst = [ str(n) for n in lst ]
-			Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
-			self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')
-
-		# Prepend dependency paths to PYTHONPATH and LD_LIBRARY_PATH
-		add_paths('PYTHONPATH', self.pytest_paths)
-
-		if Utils.is_win32:
-			add_paths('PATH', self.pytest_libpaths)
-		elif Utils.unversioned_sys_platform() == 'darwin':
-			add_paths('DYLD_LIBRARY_PATH', self.pytest_libpaths)
-			add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
-		else:
-			add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
-
diff --git a/waflib/extras/qnxnto.py b/waflib/extras/qnxnto.py
deleted file mode 100644
index 1158124..0000000
--- a/waflib/extras/qnxnto.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Jérôme Carretero 2011 (zougloub)
-# QNX neutrino compatibility functions
-
-import sys, os
-from waflib import Utils
-
-class Popen(object):
-	"""
-	Popen cannot work on QNX from a threaded program:
-	Forking in threads is not implemented in neutrino.
-
-	Python's os.popen / spawn / fork won't work when running in threads (they will if in the main program thread)
-
-	In waf, this happens mostly in build.
-	And the use cases can be replaced by os.system() calls.
-	"""
-	__slots__ = ["prog", "kw", "popen", "verbose"]
-	verbose = 0
-	def __init__(self, prog, **kw):
-		try:
-			self.prog = prog
-			self.kw = kw
-			self.popen = None
-			if Popen.verbose:
-				sys.stdout.write("Popen created: %r, kw=%r..." % (prog, kw))
-
-			do_delegate = kw.get('stdout') == -1 and kw.get('stderr') == -1
-			if do_delegate:
-				if Popen.verbose:
-					print("Delegating to real Popen")
-				self.popen = self.real_Popen(prog, **kw)
-			else:
-				if Popen.verbose:
-					print("Emulating")
-		except Exception as e:
-			if Popen.verbose:
-				print("Exception: %s" % e)
-			raise
-
-	def __getattr__(self, name):
-		if Popen.verbose:
-			sys.stdout.write("Getattr: %s..." % name)
-		if name in Popen.__slots__:
-			return object.__getattribute__(self, name)
-		else:
-			if self.popen is not None:
-				if Popen.verbose:
-					print("from Popen")
-				return getattr(self.popen, name)
-			else:
-				if name == "wait":
-					return self.emu_wait
-				else:
-					raise Exception("subprocess emulation: not implemented: %s" % name)
-
-	def emu_wait(self):
-		if Popen.verbose:
-			print("emulated wait (%r kw=%r)" % (self.prog, self.kw))
-		if isinstance(self.prog, str):
-			cmd = self.prog
-		else:
-			cmd = " ".join(self.prog)
-		if 'cwd' in self.kw:
-			cmd = 'cd "%s" && %s' % (self.kw['cwd'], cmd)
-		return os.system(cmd)
-
-if sys.platform == "qnx6":
-	Popen.real_Popen = Utils.subprocess.Popen
-	Utils.subprocess.Popen = Popen
-
diff --git a/waflib/extras/qt4.py b/waflib/extras/qt4.py
deleted file mode 100644
index d19a4dd..0000000
--- a/waflib/extras/qt4.py
+++ /dev/null
@@ -1,695 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
-
-"""
-
-Tool Description
-================
-
-This tool helps with finding Qt4 tools and libraries,
-and also provides syntactic sugar for using Qt4 tools.
-
-The following snippet illustrates the tool usage::
-
-	def options(opt):
-		opt.load('compiler_cxx qt4')
-
-	def configure(conf):
-		conf.load('compiler_cxx qt4')
-
-	def build(bld):
-		bld(
-			features = 'qt4 cxx cxxprogram',
-			uselib   = 'QTCORE QTGUI QTOPENGL QTSVG',
-			source   = 'main.cpp textures.qrc aboutDialog.ui',
-			target   = 'window',
-		)
-
-Here, the UI description and resource files will be processed
-to generate code.
-
-Usage
-=====
-
-Load the "qt4" tool.
-
-You also need to edit your sources accordingly:
-
-- the normal way of doing things is to have your C++ files
-  include the .moc file.
-  This is regarded as the best practice (and provides much faster
-  compilations).
-  It also implies that the include paths have beenset properly.
-
-- to have the include paths added automatically, use the following::
-
-     from waflib.TaskGen import feature, before_method, after_method
-     @feature('cxx')
-     @after_method('process_source')
-     @before_method('apply_incpaths')
-     def add_includes_paths(self):
-        incs = set(self.to_list(getattr(self, 'includes', '')))
-        for x in self.compiled_tasks:
-            incs.add(x.inputs[0].parent.path_from(self.path))
-        self.includes = sorted(incs)
-
-Note: another tool provides Qt processing that does not require
-.moc includes, see 'playground/slow_qt/'.
-
-A few options (--qt{dir,bin,...}) and environment variables
-(QT4_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
-tool path selection, etc; please read the source for more info.
-
-"""
-
-try:
-	from xml.sax import make_parser
-	from xml.sax.handler import ContentHandler
-except ImportError:
-	has_xml = False
-	ContentHandler = object
-else:
-	has_xml = True
-
-import os, sys
-from waflib.Tools import cxx
-from waflib import Task, Utils, Options, Errors, Context
-from waflib.TaskGen import feature, after_method, extension
-from waflib.Configure import conf
-from waflib import Logs
-
-MOC_H = ['.h', '.hpp', '.hxx', '.hh']
-"""
-File extensions associated to the .moc files
-"""
-
-EXT_RCC = ['.qrc']
-"""
-File extension for the resource (.qrc) files
-"""
-
-EXT_UI  = ['.ui']
-"""
-File extension for the user interface (.ui) files
-"""
-
-EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
-"""
-File extensions of C++ files that may require a .moc processing
-"""
-
-QT4_LIBS = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner"
-
-class qxx(Task.classes['cxx']):
-	"""
-	Each C++ file can have zero or several .moc files to create.
-	They are known only when the files are scanned (preprocessor)
-	To avoid scanning the c++ files each time (parsing C/C++), the results
-	are retrieved from the task cache (bld.node_deps/bld.raw_deps).
-	The moc tasks are also created *dynamically* during the build.
-	"""
-
-	def __init__(self, *k, **kw):
-		Task.Task.__init__(self, *k, **kw)
-		self.moc_done = 0
-
-	def runnable_status(self):
-		"""
-		Compute the task signature to make sure the scanner was executed. Create the
-		moc tasks by using :py:meth:`waflib.Tools.qt4.qxx.add_moc_tasks` (if necessary),
-		then postpone the task execution (there is no need to recompute the task signature).
-		"""
-		if self.moc_done:
-			return Task.Task.runnable_status(self)
-		else:
-			for t in self.run_after:
-				if not t.hasrun:
-					return Task.ASK_LATER
-			self.add_moc_tasks()
-			return Task.Task.runnable_status(self)
-
-	def create_moc_task(self, h_node, m_node):
-		"""
-		If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
-		It is not possible to change the file names, but we can assume that the moc transformation will be identical,
-		and the moc tasks can be shared in a global cache.
-
-		The defines passed to moc will then depend on task generator order. If this is not acceptable, then
-		use the tool slow_qt4 instead (and enjoy the slow builds... :-( )
-		"""
-		try:
-			moc_cache = self.generator.bld.moc_cache
-		except AttributeError:
-			moc_cache = self.generator.bld.moc_cache = {}
-
-		try:
-			return moc_cache[h_node]
-		except KeyError:
-			tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
-			tsk.set_inputs(h_node)
-			tsk.set_outputs(m_node)
-
-			if self.generator:
-				self.generator.tasks.append(tsk)
-
-			# direct injection in the build phase (safe because called from the main thread)
-			gen = self.generator.bld.producer
-			gen.outstanding.append(tsk)
-			gen.total += 1
-
-			return tsk
-
-	def moc_h_ext(self):
-		ext = []
-		try:
-			ext = Options.options.qt_header_ext.split()
-		except AttributeError:
-			pass
-		if not ext:
-			ext = MOC_H
-		return ext
-
-	def add_moc_tasks(self):
-		"""
-		Create the moc tasks by looking in ``bld.raw_deps[self.uid()]``
-		"""
-		node = self.inputs[0]
-		bld = self.generator.bld
-
-		try:
-			# compute the signature once to know if there is a moc file to create
-			self.signature()
-		except KeyError:
-			# the moc file may be referenced somewhere else
-			pass
-		else:
-			# remove the signature, it must be recomputed with the moc task
-			delattr(self, 'cache_sig')
-
-		include_nodes = [node.parent] + self.generator.includes_nodes
-
-		moctasks = []
-		mocfiles = set()
-		for d in bld.raw_deps.get(self.uid(), []):
-			if not d.endswith('.moc'):
-				continue
-
-			# process that base.moc only once
-			if d in mocfiles:
-				continue
-			mocfiles.add(d)
-
-			# find the source associated with the moc file
-			h_node = None
-
-			base2 = d[:-4]
-			for x in include_nodes:
-				for e in self.moc_h_ext():
-					h_node = x.find_node(base2 + e)
-					if h_node:
-						break
-				if h_node:
-					m_node = h_node.change_ext('.moc')
-					break
-			else:
-				# foo.cpp -> foo.cpp.moc
-				for k in EXT_QT4:
-					if base2.endswith(k):
-						for x in include_nodes:
-							h_node = x.find_node(base2)
-							if h_node:
-								break
-						if h_node:
-							m_node = h_node.change_ext(k + '.moc')
-							break
-
-			if not h_node:
-				raise Errors.WafError('No source found for %r which is a moc file' % d)
-
-			# create the moc task
-			task = self.create_moc_task(h_node, m_node)
-			moctasks.append(task)
-
-		# simple scheduler dependency: run the moc task before others
-		self.run_after.update(set(moctasks))
-		self.moc_done = 1
-
-class trans_update(Task.Task):
-	"""Update a .ts files from a list of C++ files"""
-	run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
-	color   = 'BLUE'
-
-class XMLHandler(ContentHandler):
-	"""
-	Parser for *.qrc* files
-	"""
-	def __init__(self):
-		self.buf = []
-		self.files = []
-	def startElement(self, name, attrs):
-		if name == 'file':
-			self.buf = []
-	def endElement(self, name):
-		if name == 'file':
-			self.files.append(str(''.join(self.buf)))
-	def characters(self, cars):
-		self.buf.append(cars)
-
-@extension(*EXT_RCC)
-def create_rcc_task(self, node):
-	"Create rcc and cxx tasks for *.qrc* files"
-	rcnode = node.change_ext('_rc.cpp')
-	self.create_task('rcc', node, rcnode)
-	cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
-	try:
-		self.compiled_tasks.append(cpptask)
-	except AttributeError:
-		self.compiled_tasks = [cpptask]
-	return cpptask
-
-@extension(*EXT_UI)
-def create_uic_task(self, node):
-	"hook for uic tasks"
-	uictask = self.create_task('ui4', node)
-	uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
-
-@extension('.ts')
-def add_lang(self, node):
-	"""add all the .ts file into self.lang"""
-	self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
-
-@feature('qt4')
-@after_method('apply_link')
-def apply_qt4(self):
-	"""
-	Add MOC_FLAGS which may be necessary for moc::
-
-		def build(bld):
-			bld.program(features='qt4', source='main.cpp', target='app', use='QTCORE')
-
-	The additional parameters are:
-
-	:param lang: list of translation files (\\*.ts) to process
-	:type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
-	:param update: whether to process the C++ files to update the \\*.ts files (use **waf --translate**)
-	:type update: bool
-	:param langname: if given, transform the \\*.ts files into a .qrc files to include in the binary file
-	:type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
-	"""
-	if getattr(self, 'lang', None):
-		qmtasks = []
-		for x in self.to_list(self.lang):
-			if isinstance(x, str):
-				x = self.path.find_resource(x + '.ts')
-			qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
-
-		if getattr(self, 'update', None) and Options.options.trans_qt4:
-			cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
-				a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
-			for x in qmtasks:
-				self.create_task('trans_update', cxxnodes, x.inputs)
-
-		if getattr(self, 'langname', None):
-			qmnodes = [x.outputs[0] for x in qmtasks]
-			rcnode = self.langname
-			if isinstance(rcnode, str):
-				rcnode = self.path.find_or_declare(rcnode + '.qrc')
-			t = self.create_task('qm2rcc', qmnodes, rcnode)
-			k = create_rcc_task(self, t.outputs[0])
-			self.link_task.inputs.append(k.outputs[0])
-
-	lst = []
-	for flag in self.to_list(self.env['CXXFLAGS']):
-		if len(flag) < 2:
-			continue
-		f = flag[0:2]
-		if f in ('-D', '-I', '/D', '/I'):
-			if (f[0] == '/'):
-				lst.append('-' + flag[1:])
-			else:
-				lst.append(flag)
-	self.env.append_value('MOC_FLAGS', lst)
-
-@extension(*EXT_QT4)
-def cxx_hook(self, node):
-	"""
-	Re-map C++ file extensions to the :py:class:`waflib.Tools.qt4.qxx` task.
-	"""
-	return self.create_compiled_task('qxx', node)
-
-class rcc(Task.Task):
-	"""
-	Process *.qrc* files
-	"""
-	color   = 'BLUE'
-	run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
-	ext_out = ['.h']
-
-	def rcname(self):
-		return os.path.splitext(self.inputs[0].name)[0]
-
-	def scan(self):
-		"""Parse the *.qrc* files"""
-		if not has_xml:
-			Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
-			return ([], [])
-
-		parser = make_parser()
-		curHandler = XMLHandler()
-		parser.setContentHandler(curHandler)
-		fi = open(self.inputs[0].abspath(), 'r')
-		try:
-			parser.parse(fi)
-		finally:
-			fi.close()
-
-		nodes = []
-		names = []
-		root = self.inputs[0].parent
-		for x in curHandler.files:
-			nd = root.find_resource(x)
-			if nd:
-				nodes.append(nd)
-			else:
-				names.append(x)
-		return (nodes, names)
-
-class moc(Task.Task):
-	"""
-	Create *.moc* files
-	"""
-	color   = 'BLUE'
-	run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
-	def keyword(self):
-		return "Creating"
-	def __str__(self):
-		return self.outputs[0].path_from(self.generator.bld.launch_node())
-
-class ui4(Task.Task):
-	"""
-	Process *.ui* files
-	"""
-	color   = 'BLUE'
-	run_str = '${QT_UIC} ${SRC} -o ${TGT}'
-	ext_out = ['.h']
-
-class ts2qm(Task.Task):
-	"""
-	Create *.qm* files from *.ts* files
-	"""
-	color   = 'BLUE'
-	run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
-
-class qm2rcc(Task.Task):
-	"""
-	Transform *.qm* files into *.rc* files
-	"""
-	color = 'BLUE'
-	after = 'ts2qm'
-
-	def run(self):
-		"""Create a qrc file including the inputs"""
-		txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
-		code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
-		self.outputs[0].write(code)
-
-def configure(self):
-	"""
-	Besides the configuration options, the environment variable QT4_ROOT may be used
-	to give the location of the qt4 libraries (absolute path).
-
-	The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg`
-	"""
-	self.find_qt4_binaries()
-	self.set_qt4_libs_to_check()
-	self.set_qt4_defines()
-	self.find_qt4_libraries()
-	self.add_qt4_rpath()
-	self.simplify_qt4_libs()
-
-@conf
-def find_qt4_binaries(self):
-	env = self.env
-	opt = Options.options
-
-	qtdir = getattr(opt, 'qtdir', '')
-	qtbin = getattr(opt, 'qtbin', '')
-
-	paths = []
-
-	if qtdir:
-		qtbin = os.path.join(qtdir, 'bin')
-
-	# the qt directory has been given from QT4_ROOT - deduce the qt binary path
-	if not qtdir:
-		qtdir = os.environ.get('QT4_ROOT', '')
-		qtbin = os.environ.get('QT4_BIN') or os.path.join(qtdir, 'bin')
-
-	if qtbin:
-		paths = [qtbin]
-
-	# no qtdir, look in the path and in /usr/local/Trolltech
-	if not qtdir:
-		paths = os.environ.get('PATH', '').split(os.pathsep)
-		paths.append('/usr/share/qt4/bin/')
-		try:
-			lst = Utils.listdir('/usr/local/Trolltech/')
-		except OSError:
-			pass
-		else:
-			if lst:
-				lst.sort()
-				lst.reverse()
-
-				# keep the highest version
-				qtdir = '/usr/local/Trolltech/%s/' % lst[0]
-				qtbin = os.path.join(qtdir, 'bin')
-				paths.append(qtbin)
-
-	# at the end, try to find qmake in the paths given
-	# keep the one with the highest version
-	cand = None
-	prev_ver = ['4', '0', '0']
-	for qmk in ('qmake-qt4', 'qmake4', 'qmake'):
-		try:
-			qmake = self.find_program(qmk, path_list=paths)
-		except self.errors.ConfigurationError:
-			pass
-		else:
-			try:
-				version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
-			except self.errors.WafError:
-				pass
-			else:
-				if version:
-					new_ver = version.split('.')
-					if new_ver > prev_ver:
-						cand = qmake
-						prev_ver = new_ver
-	if cand:
-		self.env.QMAKE = cand
-	else:
-		self.fatal('Could not find qmake for qt4')
-
-	qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep
-
-	def find_bin(lst, var):
-		if var in env:
-			return
-		for f in lst:
-			try:
-				ret = self.find_program(f, path_list=paths)
-			except self.errors.ConfigurationError:
-				pass
-			else:
-				env[var]=ret
-				break
-
-	find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
-	find_bin(['uic-qt4', 'uic'], 'QT_UIC')
-	if not env.QT_UIC:
-		self.fatal('cannot find the uic compiler for qt4')
-
-	self.start_msg('Checking for uic version')
-	uicver = self.cmd_and_log(env.QT_UIC + ["-version"], output=Context.BOTH)
-	uicver = ''.join(uicver).strip()
-	uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
-	self.end_msg(uicver)
-	if uicver.find(' 3.') != -1:
-		self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
-
-	find_bin(['moc-qt4', 'moc'], 'QT_MOC')
-	find_bin(['rcc-qt4', 'rcc'], 'QT_RCC')
-	find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
-	find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
-
-	env['UIC3_ST']= '%s -o %s'
-	env['UIC_ST'] = '%s -o %s'
-	env['MOC_ST'] = '-o'
-	env['ui_PATTERN'] = 'ui_%s.h'
-	env['QT_LRELEASE_FLAGS'] = ['-silent']
-	env.MOCCPPPATH_ST = '-I%s'
-	env.MOCDEFINES_ST = '-D%s'
-
-@conf
-def find_qt4_libraries(self):
-	qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR")
-	if not qtlibs:
-		try:
-			qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
-		except Errors.WafError:
-			qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
-			qtlibs = os.path.join(qtdir, 'lib')
-	self.msg('Found the Qt4 libraries in', qtlibs)
-
-	qtincludes =  os.environ.get("QT4_INCLUDES") or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
-	env = self.env
-	if not 'PKG_CONFIG_PATH' in os.environ:
-		os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs)
-
-	try:
-		if os.environ.get("QT4_XCOMPILE"):
-			raise self.errors.ConfigurationError()
-		self.check_cfg(atleast_pkgconfig_version='0.1')
-	except self.errors.ConfigurationError:
-		for i in self.qt4_vars:
-			uselib = i.upper()
-			if Utils.unversioned_sys_platform() == "darwin":
-				# Since at least qt 4.7.3 each library locates in separate directory
-				frameworkName = i + ".framework"
-				qtDynamicLib = os.path.join(qtlibs, frameworkName, i)
-				if os.path.exists(qtDynamicLib):
-					env.append_unique('FRAMEWORK_' + uselib, i)
-					self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
-				else:
-					self.msg('Checking for %s' % i, False, 'YELLOW')
-				env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers'))
-			elif env.DEST_OS != "win32":
-				qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so")
-				qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a")
-				if os.path.exists(qtDynamicLib):
-					env.append_unique('LIB_' + uselib, i)
-					self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
-				elif os.path.exists(qtStaticLib):
-					env.append_unique('LIB_' + uselib, i)
-					self.msg('Checking for %s' % i, qtStaticLib, 'GREEN')
-				else:
-					self.msg('Checking for %s' % i, False, 'YELLOW')
-
-				env.append_unique('LIBPATH_' + uselib, qtlibs)
-				env.append_unique('INCLUDES_' + uselib, qtincludes)
-				env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
-			else:
-				# Release library names are like QtCore4
-				for k in ("lib%s.a", "lib%s4.a", "%s.lib", "%s4.lib"):
-					lib = os.path.join(qtlibs, k % i)
-					if os.path.exists(lib):
-						env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
-						self.msg('Checking for %s' % i, lib, 'GREEN')
-						break
-				else:
-					self.msg('Checking for %s' % i, False, 'YELLOW')
-
-				env.append_unique('LIBPATH_' + uselib, qtlibs)
-				env.append_unique('INCLUDES_' + uselib, qtincludes)
-				env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
-
-				# Debug library names are like QtCore4d
-				uselib = i.upper() + "_debug"
-				for k in ("lib%sd.a", "lib%sd4.a", "%sd.lib", "%sd4.lib"):
-					lib = os.path.join(qtlibs, k % i)
-					if os.path.exists(lib):
-						env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
-						self.msg('Checking for %s' % i, lib, 'GREEN')
-						break
-				else:
-					self.msg('Checking for %s' % i, False, 'YELLOW')
-
-				env.append_unique('LIBPATH_' + uselib, qtlibs)
-				env.append_unique('INCLUDES_' + uselib, qtincludes)
-				env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
-	else:
-		for i in self.qt4_vars_debug + self.qt4_vars:
-			self.check_cfg(package=i, args='--cflags --libs', mandatory=False)
-
-@conf
-def simplify_qt4_libs(self):
-	# the libpaths make really long command-lines
-	# remove the qtcore ones from qtgui, etc
-	env = self.env
-	def process_lib(vars_, coreval):
-		for d in vars_:
-			var = d.upper()
-			if var == 'QTCORE':
-				continue
-
-			value = env['LIBPATH_'+var]
-			if value:
-				core = env[coreval]
-				accu = []
-				for lib in value:
-					if lib in core:
-						continue
-					accu.append(lib)
-				env['LIBPATH_'+var] = accu
-
-	process_lib(self.qt4_vars,       'LIBPATH_QTCORE')
-	process_lib(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
-
-@conf
-def add_qt4_rpath(self):
-	# rpath if wanted
-	env = self.env
-	if getattr(Options.options, 'want_rpath', False):
-		def process_rpath(vars_, coreval):
-			for d in vars_:
-				var = d.upper()
-				value = env['LIBPATH_'+var]
-				if value:
-					core = env[coreval]
-					accu = []
-					for lib in value:
-						if var != 'QTCORE':
-							if lib in core:
-								continue
-						accu.append('-Wl,--rpath='+lib)
-					env['RPATH_'+var] = accu
-		process_rpath(self.qt4_vars,       'LIBPATH_QTCORE')
-		process_rpath(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
-
-@conf
-def set_qt4_libs_to_check(self):
-	if not hasattr(self, 'qt4_vars'):
-		self.qt4_vars = QT4_LIBS
-	self.qt4_vars = Utils.to_list(self.qt4_vars)
-	if not hasattr(self, 'qt4_vars_debug'):
-		self.qt4_vars_debug = [a + '_debug' for a in self.qt4_vars]
-	self.qt4_vars_debug = Utils.to_list(self.qt4_vars_debug)
-
-@conf
-def set_qt4_defines(self):
-	if sys.platform != 'win32':
-		return
-	for x in self.qt4_vars:
-		y = x[2:].upper()
-		self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
-		self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)
-
-def options(opt):
-	"""
-	Command-line options
-	"""
-	opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
-
-	opt.add_option('--header-ext',
-		type='string',
-		default='',
-		help='header extension for moc files',
-		dest='qt_header_ext')
-
-	for i in 'qtdir qtbin qtlibs'.split():
-		opt.add_option('--'+i, type='string', default='', dest=i)
-
-	opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
-
diff --git a/waflib/extras/relocation.py b/waflib/extras/relocation.py
deleted file mode 100644
index 7e821f4..0000000
--- a/waflib/extras/relocation.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-Waf 1.6
-
-Try to detect if the project directory was relocated, and if it was,
-change the node representing the project directory. Just call:
-
- waf configure build
-
-Note that if the project directory name changes, the signatures for the tasks using
-files in that directory will change, causing a partial build.
-"""
-
-import os
-from waflib import Build, ConfigSet, Task, Utils, Errors
-from waflib.TaskGen import feature, after_method
-
-EXTRA_LOCK = '.old_srcdir'
-
-old1 = Build.BuildContext.store
-def store(self):
-	old1(self)
-	db = os.path.join(self.variant_dir, EXTRA_LOCK)
-	env = ConfigSet.ConfigSet()
-	env.SRCDIR = self.srcnode.abspath()
-	env.store(db)
-Build.BuildContext.store = store
-
-old2 = Build.BuildContext.init_dirs
-def init_dirs(self):
-
-	if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)):
-		raise Errors.WafError('The project was not configured: run "waf configure" first!')
-
-	srcdir = None
-	db = os.path.join(self.variant_dir, EXTRA_LOCK)
-	env = ConfigSet.ConfigSet()
-	try:
-		env.load(db)
-		srcdir = env.SRCDIR
-	except:
-		pass
-
-	if srcdir:
-		d = self.root.find_node(srcdir)
-		if d and srcdir != self.top_dir and getattr(d, 'children', ''):
-			srcnode = self.root.make_node(self.top_dir)
-			print("relocating the source directory %r -> %r" % (srcdir, self.top_dir))
-			srcnode.children = {}
-
-			for (k, v) in d.children.items():
-				srcnode.children[k] = v
-				v.parent = srcnode
-			d.children = {}
-
-	old2(self)
-
-Build.BuildContext.init_dirs = init_dirs
-
-
-def uid(self):
-	try:
-		return self.uid_
-	except AttributeError:
-		# this is not a real hot zone, but we want to avoid surprises here
-		m = Utils.md5()
-		up = m.update
-		up(self.__class__.__name__.encode())
-		for x in self.inputs + self.outputs:
-			up(x.path_from(x.ctx.srcnode).encode())
-		self.uid_ = m.digest()
-		return self.uid_
-Task.Task.uid = uid
-
-@feature('c', 'cxx', 'd', 'go', 'asm', 'fc', 'includes')
-@after_method('propagate_uselib_vars', 'process_source')
-def apply_incpaths(self):
-	lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
-	self.includes_nodes = lst
-	bld = self.bld
-	self.env['INCPATHS'] = [x.is_child_of(bld.srcnode) and x.path_from(bld.bldnode) or x.abspath() for x in lst]
-
-
diff --git a/waflib/extras/remote.py b/waflib/extras/remote.py
deleted file mode 100644
index f43b600..0000000
--- a/waflib/extras/remote.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Remote Builds tool using rsync+ssh
-
-__author__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2013"
-
-"""
-Simple Remote Builds
-********************
-
-This tool is an *experimental* tool (meaning, do not even try to pollute
-the waf bug tracker with bugs in here, contact me directly) providing simple
-remote builds.
-
-It uses rsync and ssh to perform the remote builds.
-It is intended for performing cross-compilation on platforms where
-a cross-compiler is either unavailable (eg. MacOS, QNX) a specific product
-does not exist (eg. Windows builds using Visual Studio) or simply not installed.
-This tool sends the sources and the waf script to the remote host,
-and commands the usual waf execution.
-
-There are alternatives to using this tool, such as setting up shared folders,
-logging on to remote machines, and building on the shared folders.
-Electing one method or another depends on the size of the program.
-
-
-Usage
-=====
-
-1. Set your wscript file so it includes a list of variants,
-   e.g.::
-
-     from waflib import Utils
-     top = '.'
-     out = 'build'
-
-     variants = [
-      'linux_64_debug',
-      'linux_64_release',
-      'linux_32_debug',
-      'linux_32_release',
-      ]
-
-     from waflib.extras import remote
-
-     def options(opt):
-         # normal stuff from here on
-         opt.load('compiler_c')
-
-     def configure(conf):
-         if not conf.variant:
-             return
-         # normal stuff from here on
-         conf.load('compiler_c')
-
-     def build(bld):
-         if not bld.variant:
-             return
-         # normal stuff from here on
-         bld(features='c cprogram', target='app', source='main.c')
-
-
-2. Build the waf file, so it includes this tool, and put it in the current
-   directory
-
-   .. code:: bash
-
-      ./waf-light --tools=remote
-
-3. Set the host names to access the hosts:
-
-   .. code:: bash
-
-      export REMOTE_QNX=user@kiunix
-
-4. Setup the ssh server and ssh keys
-
-   The ssh key should not be protected by a password, or it will prompt for it every time.
-   Create the key on the client:
-
-   .. code:: bash
-
-      ssh-keygen -t rsa -f foo.rsa
-
-   Then copy foo.rsa.pub to the remote machine (user@kiunix:/home/user/.ssh/authorized_keys),
-   and make sure the permissions are correct (chmod go-w ~ ~/.ssh ~/.ssh/authorized_keys)
-
-   A separate key for the build processes can be set in the environment variable WAF_SSH_KEY.
-   The tool will then use 'ssh-keyscan' to avoid prompting for remote hosts, so
-   be warned to use this feature on internal networks only (MITM).
-
-   .. code:: bash
-
-      export WAF_SSH_KEY=~/foo.rsa
-
-5. Perform the build:
-
-   .. code:: bash
-
-      waf configure_all build_all --remote
-
-"""
-
-
-import getpass, os, re, sys
-from collections import OrderedDict
-from waflib import Context, Options, Utils, ConfigSet
-
-from waflib.Build import BuildContext, CleanContext, InstallContext, UninstallContext
-from waflib.Configure import ConfigurationContext
-
-
-is_remote = False
-if '--remote' in sys.argv:
-	is_remote = True
-	sys.argv.remove('--remote')
-
-class init(Context.Context):
-	"""
-	Generates the *_all commands
-	"""
-	cmd = 'init'
-	fun = 'init'
-	def execute(self):
-		for x in list(Context.g_module.variants):
-			self.make_variant(x)
-		lst = ['remote']
-		for k in Options.commands:
-			if k.endswith('_all'):
-				name = k.replace('_all', '')
-				for x in Context.g_module.variants:
-					lst.append('%s_%s' % (name, x))
-			else:
-				lst.append(k)
-		del Options.commands[:]
-		Options.commands += lst
-
-	def make_variant(self, x):
-		for y in (BuildContext, CleanContext, InstallContext, UninstallContext):
-			name = y.__name__.replace('Context','').lower()
-			class tmp(y):
-				cmd = name + '_' + x
-				fun = 'build'
-				variant = x
-		class tmp(ConfigurationContext):
-			cmd = 'configure_' + x
-			fun = 'configure'
-			variant = x
-			def __init__(self, **kw):
-				ConfigurationContext.__init__(self, **kw)
-				self.setenv(x)
-
-class remote(BuildContext):
-	cmd = 'remote'
-	fun = 'build'
-
-	def get_ssh_hosts(self):
-		lst = []
-		for v in Context.g_module.variants:
-			self.env.HOST = self.login_to_host(self.variant_to_login(v))
-			cmd = Utils.subst_vars('${SSH_KEYSCAN} -t rsa,ecdsa ${HOST}', self.env)
-			out, err = self.cmd_and_log(cmd, output=Context.BOTH, quiet=Context.BOTH)
-			lst.append(out.strip())
-		return lst
-
-	def setup_private_ssh_key(self):
-		"""
-		When WAF_SSH_KEY points to a private key, a .ssh directory will be created in the build directory
-		Make sure that the ssh key does not prompt for a password
-		"""
-		key = os.environ.get('WAF_SSH_KEY', '')
-		if not key:
-			return
-		if not os.path.isfile(key):
-			self.fatal('Key in WAF_SSH_KEY must point to a valid file')
-		self.ssh_dir = os.path.join(self.path.abspath(), 'build', '.ssh')
-		self.ssh_hosts = os.path.join(self.ssh_dir, 'known_hosts')
-		self.ssh_key = os.path.join(self.ssh_dir, os.path.basename(key))
-		self.ssh_config = os.path.join(self.ssh_dir, 'config')
-		for x in self.ssh_hosts, self.ssh_key, self.ssh_config:
-			if not os.path.isfile(x):
-				if not os.path.isdir(self.ssh_dir):
-					os.makedirs(self.ssh_dir)
-				Utils.writef(self.ssh_key, Utils.readf(key), 'wb')
-				os.chmod(self.ssh_key, 448)
-
-				Utils.writef(self.ssh_hosts, '\n'.join(self.get_ssh_hosts()))
-				os.chmod(self.ssh_key, 448)
-
-				Utils.writef(self.ssh_config, 'UserKnownHostsFile %s' % self.ssh_hosts, 'wb')
-				os.chmod(self.ssh_config, 448)
-		self.env.SSH_OPTS = ['-F', self.ssh_config, '-i', self.ssh_key]
-		self.env.append_value('RSYNC_SEND_OPTS', '--exclude=build/.ssh')
-
-	def skip_unbuildable_variant(self):
-		# skip variants that cannot be built on this OS
-		for k in Options.commands:
-			a, _, b = k.partition('_')
-			if b in Context.g_module.variants:
-				c, _, _ = b.partition('_')
-				if c != Utils.unversioned_sys_platform():
-					Options.commands.remove(k)
-
-	def login_to_host(self, login):
-		return re.sub(r'(\w+@)', '', login)
-
-	def variant_to_login(self, variant):
-		"""linux_32_debug -> search env.LINUX_32 and then env.LINUX"""
-		x = variant[:variant.rfind('_')]
-		ret = os.environ.get('REMOTE_' + x.upper(), '')
-		if not ret:
-			x = x[:x.find('_')]
-			ret = os.environ.get('REMOTE_' + x.upper(), '')
-		if not ret:
-			ret = '%s@localhost' % getpass.getuser()
-		return ret
-
-	def execute(self):
-		global is_remote
-		if not is_remote:
-			self.skip_unbuildable_variant()
-		else:
-			BuildContext.execute(self)
-
-	def restore(self):
-		self.top_dir = os.path.abspath(Context.g_module.top)
-		self.srcnode = self.root.find_node(self.top_dir)
-		self.path = self.srcnode
-
-		self.out_dir = os.path.join(self.top_dir, Context.g_module.out)
-		self.bldnode = self.root.make_node(self.out_dir)
-		self.bldnode.mkdir()
-
-		self.env = ConfigSet.ConfigSet()
-
-	def extract_groups_of_builds(self):
-		"""Return a dict mapping each variants to the commands to build"""
-		self.vgroups = {}
-		for x in reversed(Options.commands):
-			_, _, variant = x.partition('_')
-			if variant in Context.g_module.variants:
-				try:
-					dct = self.vgroups[variant]
-				except KeyError:
-					dct = self.vgroups[variant] = OrderedDict()
-				try:
-					dct[variant].append(x)
-				except KeyError:
-					dct[variant] = [x]
-				Options.commands.remove(x)
-
-	def custom_options(self, login):
-		try:
-			return Context.g_module.host_options[login]
-		except (AttributeError, KeyError):
-			return {}
-
-	def recurse(self, *k, **kw):
-		self.env.RSYNC = getattr(Context.g_module, 'rsync', 'rsync -a --chmod=u+rwx')
-		self.env.SSH = getattr(Context.g_module, 'ssh', 'ssh')
-		self.env.SSH_KEYSCAN = getattr(Context.g_module, 'ssh_keyscan', 'ssh-keyscan')
-		try:
-			self.env.WAF = getattr(Context.g_module, 'waf')
-		except AttributeError:
-			try:
-				os.stat('waf')
-			except KeyError:
-				self.fatal('Put a waf file in the directory (./waf-light --tools=remote)')
-			else:
-				self.env.WAF = './waf'
-
-		self.extract_groups_of_builds()
-		self.setup_private_ssh_key()
-		for k, v in self.vgroups.items():
-			task = self(rule=rsync_and_ssh, always=True)
-			task.env.login = self.variant_to_login(k)
-
-			task.env.commands = []
-			for opt, value in v.items():
-				task.env.commands += value
-			task.env.variant = task.env.commands[0].partition('_')[2]
-			for opt, value in self.custom_options(k):
-				task.env[opt] = value
-		self.jobs = len(self.vgroups)
-
-	def make_mkdir_command(self, task):
-		return Utils.subst_vars('${SSH} ${SSH_OPTS} ${login} "rm -fr ${remote_dir} && mkdir -p ${remote_dir}"', task.env)
-
-	def make_send_command(self, task):
-		return Utils.subst_vars('${RSYNC} ${RSYNC_SEND_OPTS} -e "${SSH} ${SSH_OPTS}" ${local_dir} ${login}:${remote_dir}', task.env)
-
-	def make_exec_command(self, task):
-		txt = '''${SSH} ${SSH_OPTS} ${login} "cd ${remote_dir} && ${WAF} ${commands}"'''
-		return Utils.subst_vars(txt, task.env)
-
-	def make_save_command(self, task):
-		return Utils.subst_vars('${RSYNC} ${RSYNC_SAVE_OPTS} -e "${SSH} ${SSH_OPTS}" ${login}:${remote_dir_variant} ${build_dir}', task.env)
-
-def rsync_and_ssh(task):
-
-	# remove a warning
-	task.uid_ = id(task)
-
-	bld = task.generator.bld
-
-	task.env.user, _, _ = task.env.login.partition('@')
-	task.env.hdir = Utils.to_hex(Utils.h_list((task.generator.path.abspath(), task.env.variant)))
-	task.env.remote_dir = '~%s/wafremote/%s' % (task.env.user, task.env.hdir)
-	task.env.local_dir = bld.srcnode.abspath() + '/'
-
-	task.env.remote_dir_variant = '%s/%s/%s' % (task.env.remote_dir, Context.g_module.out, task.env.variant)
-	task.env.build_dir = bld.bldnode.abspath()
-
-	ret = task.exec_command(bld.make_mkdir_command(task))
-	if ret:
-		return ret
-	ret = task.exec_command(bld.make_send_command(task))
-	if ret:
-		return ret
-	ret = task.exec_command(bld.make_exec_command(task))
-	if ret:
-		return ret
-	ret = task.exec_command(bld.make_save_command(task))
-	if ret:
-		return ret
-
diff --git a/waflib/extras/resx.py b/waflib/extras/resx.py
deleted file mode 100644
index caf4d31..0000000
--- a/waflib/extras/resx.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-import os
-from waflib import Task
-from waflib.TaskGen import extension
-
-def configure(conf):
-	conf.find_program(['resgen'], var='RESGEN')
-	conf.env.RESGENFLAGS = '/useSourcePath'
-
-@extension('.resx')
-def resx_file(self, node):
-	"""
-	Bind the .resx extension to a resgen task
-	"""
-	if not getattr(self, 'cs_task', None):
-		self.bld.fatal('resx_file has no link task for use %r' % self)
-
-	# Given assembly 'Foo' and file 'Sub/Dir/File.resx', create 'Foo.Sub.Dir.File.resources'
-	assembly = getattr(self, 'namespace', os.path.splitext(self.gen)[0])
-	res = os.path.splitext(node.path_from(self.path))[0].replace('/', '.').replace('\\', '.')
-	out = self.path.find_or_declare(assembly + '.' + res + '.resources')
-
-	tsk = self.create_task('resgen', node, out)
-
-	self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
-	self.env.append_value('RESOURCES', tsk.outputs[0].bldpath())
-
-class resgen(Task.Task):
-	"""
-	Compile C# resource files
-	"""
-	color   = 'YELLOW'
-	run_str = '${RESGEN} ${RESGENFLAGS} ${SRC} ${TGT}'
diff --git a/waflib/extras/review.py b/waflib/extras/review.py
deleted file mode 100644
index 561e062..0000000
--- a/waflib/extras/review.py
+++ /dev/null
@@ -1,325 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Laurent Birtz, 2011
-# moved the code into a separate tool (ita)
-
-"""
-There are several things here:
-- a different command-line option management making options persistent
-- the review command to display the options set
-
-Assumptions:
-- configuration options are not always added to the right group (and do not count on the users to do it...)
-- the options are persistent between the executions (waf options are NOT persistent by design), even for the configuration
-- when the options change, the build is invalidated (forcing a reconfiguration)
-"""
-
-import os, textwrap, shutil
-from waflib import Logs, Context, ConfigSet, Options, Build, Configure
-
-class Odict(dict):
-	"""Ordered dictionary"""
-	def __init__(self, data=None):
-		self._keys = []
-		dict.__init__(self)
-		if data:
-			# we were provided a regular dict
-			if isinstance(data, dict):
-				self.append_from_dict(data)
-
-			# we were provided a tuple list
-			elif type(data) == list:
-				self.append_from_plist(data)
-
-			# we were provided invalid input
-			else:
-				raise Exception("expected a dict or a tuple list")
-
-	def append_from_dict(self, dict):
-		map(self.__setitem__, dict.keys(), dict.values())
-
-	def append_from_plist(self, plist):
-		for pair in plist:
-			if len(pair) != 2:
-				raise Exception("invalid pairs list")
-		for (k, v) in plist:
-			self.__setitem__(k, v)
-
-	def __delitem__(self, key):
-		if not key in self._keys:
-			raise KeyError(key)
-		dict.__delitem__(self, key)
-		self._keys.remove(key)
-
-	def __setitem__(self, key, item):
-		dict.__setitem__(self, key, item)
-		if key not in self._keys:
-			self._keys.append(key)
-
-	def clear(self):
-		dict.clear(self)
-		self._keys = []
-
-	def copy(self):
-		return Odict(self.plist())
-
-	def items(self):
-		return zip(self._keys, self.values())
-
-	def keys(self):
-		return list(self._keys) # return a copy of the list
-
-	def values(self):
-		return map(self.get, self._keys)
-
-	def plist(self):
-		p = []
-		for k, v in self.items():
-			p.append( (k, v) )
-		return p
-
-	def __str__(self):
-		buf = []
-		buf.append("{ ")
-		for k, v in self.items():
-			buf.append('%r : %r, ' % (k, v))
-		buf.append("}")
-		return ''.join(buf)
-
-review_options = Odict()
-"""
-Ordered dictionary mapping configuration option names to their optparse option.
-"""
-
-review_defaults = {}
-"""
-Dictionary mapping configuration option names to their default value.
-"""
-
-old_review_set = None
-"""
-Review set containing the configuration values before parsing the command line.
-"""
-
-new_review_set = None
-"""
-Review set containing the configuration values after parsing the command line.
-"""
-
-class OptionsReview(Options.OptionsContext):
-	def __init__(self, **kw):
-		super(self.__class__, self).__init__(**kw)
-
-	def prepare_config_review(self):
-		"""
-		Find the configuration options that are reviewable, detach
-		their default value from their optparse object and store them
-		into the review dictionaries.
-		"""
-		gr = self.get_option_group('configure options')
-		for opt in gr.option_list:
-			if opt.action != 'store' or opt.dest in ("out", "top"):
-				continue
-			review_options[opt.dest] = opt
-			review_defaults[opt.dest] = opt.default
-			if gr.defaults.has_key(opt.dest):
-				del gr.defaults[opt.dest]
-			opt.default = None
-
-	def parse_args(self):
-		self.prepare_config_review()
-		self.parser.get_option('--prefix').help = 'installation prefix'
-		super(OptionsReview, self).parse_args()
-		Context.create_context('review').refresh_review_set()
-
-class ReviewContext(Context.Context):
-	'''reviews the configuration values'''
-
-	cmd = 'review'
-
-	def __init__(self, **kw):
-		super(self.__class__, self).__init__(**kw)
-
-		out = Options.options.out
-		if not out:
-			out = getattr(Context.g_module, Context.OUT, None)
-		if not out:
-			out = Options.lockfile.replace('.lock-waf', '')
-		self.build_path = (os.path.isabs(out) and self.root or self.path).make_node(out).abspath()
-		"""Path to the build directory"""
-
-		self.cache_path = os.path.join(self.build_path, Build.CACHE_DIR)
-		"""Path to the cache directory"""
-
-		self.review_path = os.path.join(self.cache_path, 'review.cache')
-		"""Path to the review cache file"""
-
-	def execute(self):
-		"""
-		Display and store the review set. Invalidate the cache as required.
-		"""
-		if not self.compare_review_set(old_review_set, new_review_set):
-			self.invalidate_cache()
-		self.store_review_set(new_review_set)
-		print(self.display_review_set(new_review_set))
-
-	def invalidate_cache(self):
-		"""Invalidate the cache to prevent bad builds."""
-		try:
-			Logs.warn("Removing the cached configuration since the options have changed")
-			shutil.rmtree(self.cache_path)
-		except:
-			pass
-
-	def refresh_review_set(self):
-		"""
-		Obtain the old review set and the new review set, and import the new set.
-		"""
-		global old_review_set, new_review_set
-		old_review_set = self.load_review_set()
-		new_review_set = self.update_review_set(old_review_set)
-		self.import_review_set(new_review_set)
-
-	def load_review_set(self):
-		"""
-		Load and return the review set from the cache if it exists.
-		Otherwise, return an empty set.
-		"""
-		if os.path.isfile(self.review_path):
-			return ConfigSet.ConfigSet(self.review_path)
-		return ConfigSet.ConfigSet()
-
-	def store_review_set(self, review_set):
-		"""
-		Store the review set specified in the cache.
-		"""
-		if not os.path.isdir(self.cache_path):
-			os.makedirs(self.cache_path)
-		review_set.store(self.review_path)
-
-	def update_review_set(self, old_set):
-		"""
-		Merge the options passed on the command line with those imported
-		from the previous review set and return the corresponding
-		preview set.
-		"""
-
-		# Convert value to string. It's important that 'None' maps to
-		# the empty string.
-		def val_to_str(val):
-			if val == None or val == '':
-				return ''
-			return str(val)
-
-		new_set = ConfigSet.ConfigSet()
-		opt_dict = Options.options.__dict__
-
-		for name in review_options.keys():
-			# the option is specified explicitly on the command line
-			if name in opt_dict:
-				# if the option is the default, pretend it was never specified
-				if val_to_str(opt_dict[name]) != val_to_str(review_defaults[name]):
-					new_set[name] = opt_dict[name]
-			# the option was explicitly specified in a previous command
-			elif name in old_set:
-				new_set[name] = old_set[name]
-
-		return new_set
-
-	def import_review_set(self, review_set):
-		"""
-		Import the actual value of the reviewable options in the option
-		dictionary, given the current review set.
-		"""
-		for name in review_options.keys():
-			if name in review_set:
-				value = review_set[name]
-			else:
-				value = review_defaults[name]
-			setattr(Options.options, name, value)
-
-	def compare_review_set(self, set1, set2):
-		"""
-		Return true if the review sets specified are equal.
-		"""
-		if len(set1.keys()) != len(set2.keys()):
-			return False
-		for key in set1.keys():
-			if not key in set2 or set1[key] != set2[key]:
-				return False
-		return True
-
-	def display_review_set(self, review_set):
-		"""
-		Return the string representing the review set specified.
-		"""
-		term_width = Logs.get_term_cols()
-		lines = []
-		for dest in review_options.keys():
-			opt = review_options[dest]
-			name = ", ".join(opt._short_opts + opt._long_opts)
-			help = opt.help
-			actual = None
-			if dest in review_set:
-				actual = review_set[dest]
-			default = review_defaults[dest]
-			lines.append(self.format_option(name, help, actual, default, term_width))
-		return "Configuration:\n\n" + "\n\n".join(lines) + "\n"
-
-	def format_option(self, name, help, actual, default, term_width):
-		"""
-		Return the string representing the option specified.
-		"""
-		def val_to_str(val):
-			if val == None or val == '':
-				return "(void)"
-			return str(val)
-
-		max_name_len = 20
-		sep_len = 2
-
-		w = textwrap.TextWrapper()
-		w.width = term_width - 1
-		if w.width < 60:
-			w.width = 60
-
-		out = ""
-
-		# format the help
-		out += w.fill(help) + "\n"
-
-		# format the name
-		name_len = len(name)
-		out += Logs.colors.CYAN + name + Logs.colors.NORMAL
-
-		# set the indentation used when the value wraps to the next line
-		w.subsequent_indent = " ".rjust(max_name_len + sep_len)
-		w.width -= (max_name_len + sep_len)
-
-		# the name string is too long, switch to the next line
-		if name_len > max_name_len:
-			out += "\n" + w.subsequent_indent
-
-		# fill the remaining of the line with spaces
-		else:
-			out += " ".rjust(max_name_len + sep_len - name_len)
-
-		# format the actual value, if there is one
-		if actual != None:
-			out += Logs.colors.BOLD + w.fill(val_to_str(actual)) + Logs.colors.NORMAL + "\n" + w.subsequent_indent
-
-		# format the default value
-		default_fmt = val_to_str(default)
-		if actual != None:
-			default_fmt = "default: " + default_fmt
-		out += Logs.colors.NORMAL + w.fill(default_fmt) + Logs.colors.NORMAL
-
-		return out
-
-# Monkey-patch ConfigurationContext.execute() to have it store the review set.
-old_configure_execute = Configure.ConfigurationContext.execute
-def new_configure_execute(self):
-	old_configure_execute(self)
-	Context.create_context('review').store_review_set(new_review_set)
-Configure.ConfigurationContext.execute = new_configure_execute
-
diff --git a/waflib/extras/rst.py b/waflib/extras/rst.py
deleted file mode 100644
index f3c3a5e..0000000
--- a/waflib/extras/rst.py
+++ /dev/null
@@ -1,260 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Jérôme Carretero, 2013 (zougloub)
-
-"""
-reStructuredText support (experimental)
-
-Example::
-
-	def configure(conf):
-		conf.load('rst')
-		if not conf.env.RST2HTML:
-			conf.fatal('The program rst2html is required')
-
-	def build(bld):
-		bld(
-		 features = 'rst',
-		 type     = 'rst2html', # rst2html, rst2pdf, ...
-		 source   = 'index.rst', # mandatory, the source
-		 deps     = 'image.png', # to give additional non-trivial dependencies
-		)
-
-By default the tool looks for a set of programs in PATH.
-The tools are defined in `rst_progs`.
-To configure with a special program use::
-
-	$ RST2HTML=/path/to/rst2html waf configure
-
-This tool is experimental; don't hesitate to contribute to it.
-
-"""
-
-import re
-from waflib import Node, Utils, Task, Errors, Logs
-from waflib.TaskGen import feature, before_method
-
-rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split()
-
-def parse_rst_node(task, node, nodes, names, seen, dirs=None):
-	# TODO add extensibility, to handle custom rst include tags...
-	if dirs is None:
-		dirs = (node.parent,node.get_bld().parent)
-
-	if node in seen:
-		return
-	seen.append(node)
-	code = node.read()
-	re_rst = re.compile(r'^\s*.. ((?P<subst>\|\S+\|) )?(?P<type>include|image|figure):: (?P<file>.*)$', re.M)
-	for match in re_rst.finditer(code):
-		ipath = match.group('file')
-		itype = match.group('type')
-		Logs.debug('rst: visiting %s: %s', itype, ipath)
-		found = False
-		for d in dirs:
-			Logs.debug('rst: looking for %s in %s', ipath, d.abspath())
-			found = d.find_node(ipath)
-			if found:
-				Logs.debug('rst: found %s as %s', ipath, found.abspath())
-				nodes.append((itype, found))
-				if itype == 'include':
-					parse_rst_node(task, found, nodes, names, seen)
-				break
-		if not found:
-			names.append((itype, ipath))
-
-class docutils(Task.Task):
-	"""
-	Compile a rst file.
-	"""
-
-	def scan(self):
-		"""
-		A recursive regex-based scanner that finds rst dependencies.
-		"""
-
-		nodes = []
-		names = []
-		seen = []
-
-		node = self.inputs[0]
-
-		if not node:
-			return (nodes, names)
-
-		parse_rst_node(self, node, nodes, names, seen)
-
-		Logs.debug('rst: %r: found the following file deps: %r', self, nodes)
-		if names:
-			Logs.warn('rst: %r: could not find the following file deps: %r', self, names)
-
-		return ([v for (t,v) in nodes], [v for (t,v) in names])
-
-	def check_status(self, msg, retcode):
-		"""
-		Check an exit status and raise an error with a particular message
-
-		:param msg: message to display if the code is non-zero
-		:type msg: string
-		:param retcode: condition
-		:type retcode: boolean
-		"""
-		if retcode != 0:
-			raise Errors.WafError('%r command exit status %r' % (msg, retcode))
-
-	def run(self):
-		"""
-		Runs the rst compilation using docutils
-		"""
-		raise NotImplementedError()
-
-class rst2html(docutils):
-	color = 'BLUE'
-
-	def __init__(self, *args, **kw):
-		docutils.__init__(self, *args, **kw)
-		self.command = self.generator.env.RST2HTML
-		self.attributes = ['stylesheet']
-
-	def scan(self):
-		nodes, names = docutils.scan(self)
-
-		for attribute in self.attributes:
-			stylesheet = getattr(self.generator, attribute, None)
-			if stylesheet is not None:
-				ssnode = self.generator.to_nodes(stylesheet)[0]
-				nodes.append(ssnode)
-				Logs.debug('rst: adding dep to %s %s', attribute, stylesheet)
-
-		return nodes, names
-
-	def run(self):
-		cwdn = self.outputs[0].parent
-		src = self.inputs[0].path_from(cwdn)
-		dst = self.outputs[0].path_from(cwdn)
-
-		cmd = self.command + [src, dst]
-		cmd += Utils.to_list(getattr(self.generator, 'options', []))
-		for attribute in self.attributes:
-			stylesheet = getattr(self.generator, attribute, None)
-			if stylesheet is not None:
-				stylesheet = self.generator.to_nodes(stylesheet)[0]
-				cmd += ['--%s' % attribute, stylesheet.path_from(cwdn)]
-
-		return self.exec_command(cmd, cwd=cwdn.abspath())
-
-class rst2s5(rst2html):
-	def __init__(self, *args, **kw):
-		rst2html.__init__(self, *args, **kw)
-		self.command = self.generator.env.RST2S5
-		self.attributes = ['stylesheet']
-
-class rst2latex(rst2html):
-	def __init__(self, *args, **kw):
-		rst2html.__init__(self, *args, **kw)
-		self.command = self.generator.env.RST2LATEX
-		self.attributes = ['stylesheet']
-
-class rst2xetex(rst2html):
-	def __init__(self, *args, **kw):
-		rst2html.__init__(self, *args, **kw)
-		self.command = self.generator.env.RST2XETEX
-		self.attributes = ['stylesheet']
-
-class rst2pdf(docutils):
-	color = 'BLUE'
-	def run(self):
-		cwdn = self.outputs[0].parent
-		src = self.inputs[0].path_from(cwdn)
-		dst = self.outputs[0].path_from(cwdn)
-
-		cmd = self.generator.env.RST2PDF + [src, '-o', dst]
-		cmd += Utils.to_list(getattr(self.generator, 'options', []))
-
-		return self.exec_command(cmd, cwd=cwdn.abspath())
-
-
-@feature('rst')
-@before_method('process_source')
-def apply_rst(self):
-	"""
-	Create :py:class:`rst` or other rst-related task objects
-	"""
-
-	if self.target:
-		if isinstance(self.target, Node.Node):
-			tgt = self.target
-		elif isinstance(self.target, str):
-			tgt = self.path.get_bld().make_node(self.target)
-		else:
-			self.bld.fatal("rst: Don't know how to build target name %s which is not a string or Node for %s" % (self.target, self))
-	else:
-		tgt = None
-
-	tsk_type = getattr(self, 'type', None)
-
-	src = self.to_nodes(self.source)
-	assert len(src) == 1
-	src = src[0]
-
-	if tsk_type is not None and tgt is None:
-		if tsk_type.startswith('rst2'):
-			ext = tsk_type[4:]
-		else:
-			self.bld.fatal("rst: Could not detect the output file extension for %s" % self)
-		tgt = src.change_ext('.%s' % ext)
-	elif tsk_type is None and tgt is not None:
-		out = tgt.name
-		ext = out[out.rfind('.')+1:]
-		self.type = 'rst2' + ext
-	elif tsk_type is not None and tgt is not None:
-		# the user knows what he wants
-		pass
-	else:
-		self.bld.fatal("rst: Need to indicate task type or target name for %s" % self)
-
-	deps_lst = []
-
-	if getattr(self, 'deps', None):
-		deps = self.to_list(self.deps)
-		for filename in deps:
-			n = self.path.find_resource(filename)
-			if not n:
-				self.bld.fatal('Could not find %r for %r' % (filename, self))
-			if not n in deps_lst:
-				deps_lst.append(n)
-
-	try:
-		task = self.create_task(self.type, src, tgt)
-	except KeyError:
-		self.bld.fatal("rst: Task of type %s not implemented (created by %s)" % (self.type, self))
-
-	task.env = self.env
-
-	# add the manual dependencies
-	if deps_lst:
-		try:
-			lst = self.bld.node_deps[task.uid()]
-			for n in deps_lst:
-				if not n in lst:
-					lst.append(n)
-		except KeyError:
-			self.bld.node_deps[task.uid()] = deps_lst
-
-	inst_to = getattr(self, 'install_path', None)
-	if inst_to:
-		self.install_task = self.add_install_files(install_to=inst_to, install_from=task.outputs[:])
-
-	self.source = []
-
-def configure(self):
-	"""
-	Try to find the rst programs.
-
-	Do not raise any error if they are not found.
-	You'll have to use additional code in configure() to die
-	if programs were not found.
-	"""
-	for p in rst_progs:
-		self.find_program(p, mandatory=False)
-
diff --git a/waflib/extras/run_do_script.py b/waflib/extras/run_do_script.py
deleted file mode 100644
index 07e3aa2..0000000
--- a/waflib/extras/run_do_script.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Hans-Martin von Gaudecker, 2012
-
-"""
-Run a Stata do-script in the directory specified by **ctx.bldnode**. The
-first and only argument will be the name of the do-script (no extension),
-which can be accessed inside the do-script by the local macro `1'. Useful
-for keeping a log file.
-
-The tool uses the log file that is automatically kept by Stata only 
-for error-catching purposes, it will be destroyed if the task finished
-without error. In case of an error in **some_script.do**, you can inspect
-it as **some_script.log** in the **ctx.bldnode** directory.
-
-Note that Stata will not return an error code if it exits abnormally -- 
-catching errors relies on parsing the log file mentioned before. Should
-the parser behave incorrectly please send an email to hmgaudecker [at] gmail.
-
-**WARNING**
-
-    The tool will not work if multiple do-scripts of the same name---but in
-    different directories---are run at the same time! Avoid this situation.
-
-Usage::
-
-    ctx(features='run_do_script', 
-        source='some_script.do',
-        target=['some_table.tex', 'some_figure.eps'],
-        deps='some_data.csv')
-"""
-
-
-import os, re, sys
-from waflib import Task, TaskGen, Logs
-
-if sys.platform == 'darwin':
-	STATA_COMMANDS = ['Stata64MP', 'StataMP',
-								'Stata64SE', 'StataSE', 
-								'Stata64', 'Stata']
-	STATAFLAGS = '-e -q do'
-	STATAENCODING = 'MacRoman'
-elif sys.platform.startswith('linux'):
-	STATA_COMMANDS = ['stata-mp', 'stata-se', 'stata']
-	STATAFLAGS = '-b -q do'
-	# Not sure whether this is correct...
-	STATAENCODING = 'Latin-1'
-elif sys.platform.lower().startswith('win'):
-	STATA_COMMANDS = ['StataMP-64', 'StataMP-ia',
-								'StataMP', 'StataSE-64',
-								'StataSE-ia', 'StataSE',
-								'Stata-64', 'Stata-ia',
-								'Stata.e', 'WMPSTATA',
-								'WSESTATA', 'WSTATA']
-	STATAFLAGS = '/e do'
-	STATAENCODING = 'Latin-1'
-else:
-	raise Exception("Unknown sys.platform: %s " % sys.platform)
-
-def configure(ctx):
-	ctx.find_program(STATA_COMMANDS, var='STATACMD', errmsg="""\n
-No Stata executable found!\n\n
-If Stata is needed:\n
-	1) Check the settings of your system path.
-	2) Note we are looking for Stata executables called: %s
-	   If yours has a different name, please report to hmgaudecker [at] gmail\n
-Else:\n
-	Do not load the 'run_do_script' tool in the main wscript.\n\n""" % STATA_COMMANDS)
-	ctx.env.STATAFLAGS = STATAFLAGS
-	ctx.env.STATAENCODING = STATAENCODING
-
-class run_do_script_base(Task.Task):
-	"""Run a Stata do-script from the bldnode directory."""
-	run_str = '"${STATACMD}" ${STATAFLAGS} "${SRC[0].abspath()}" "${DOFILETRUNK}"'
-	shell = True
-
-class run_do_script(run_do_script_base):
-	"""Use the log file automatically kept by Stata for error-catching.
-	Erase it if the task finished without error. If not, it will show 
-	up as do_script.log in the bldnode directory.
-	"""
-	def run(self):
-		run_do_script_base.run(self)
-		ret, log_tail  = self.check_erase_log_file()
-		if ret:
-			Logs.error("""Running Stata on %r failed with code %r.\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
-				self.inputs[0], ret, self.env.LOGFILEPATH, log_tail)
-		return ret
-
-	def check_erase_log_file(self):
-		"""Parse Stata's default log file and erase it if everything okay.
-
-		Parser is based on Brendan Halpin's shell script found here:
-			http://teaching.sociology.ul.ie/bhalpin/wordpress/?p=122
-		"""
-
-		if sys.version_info.major >= 3:
-			kwargs = {'file': self.env.LOGFILEPATH, 'mode': 'r', 'encoding': self.env.STATAENCODING}
-		else:
-			kwargs = {'name': self.env.LOGFILEPATH, 'mode': 'r'}
-		with open(**kwargs) as log:
-			log_tail = log.readlines()[-10:]
-			for line in log_tail:
-				error_found = re.match(r"r\(([0-9]+)\)", line)
-				if error_found:
-					return error_found.group(1), ''.join(log_tail)
-				else:
-					pass
-		# Only end up here if the parser did not identify an error.
-		os.remove(self.env.LOGFILEPATH)
-		return None, None
-
-
-@TaskGen.feature('run_do_script')
-@TaskGen.before_method('process_source')
-def apply_run_do_script(tg):
-	"""Task generator customising the options etc. to call Stata in batch
-	mode for running a do-script.
-	"""
-
-	# Convert sources and targets to nodes
-	src_node = tg.path.find_resource(tg.source)
-	tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
-
-	tsk = tg.create_task('run_do_script', src=src_node, tgt=tgt_nodes)
-	tsk.env.DOFILETRUNK = os.path.splitext(src_node.name)[0]
-	tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s.log' % (tsk.env.DOFILETRUNK))
-
-	# dependencies (if the attribute 'deps' changes, trigger a recompilation)
-	for x in tg.to_list(getattr(tg, 'deps', [])):
-		node = tg.path.find_resource(x)
-		if not node:
-			tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
-		tsk.dep_nodes.append(node)
-	Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
-
-	# Bypass the execution of process_source by setting the source to an empty list
-	tg.source = []
-
diff --git a/waflib/extras/run_m_script.py b/waflib/extras/run_m_script.py
deleted file mode 100644
index b5f27eb..0000000
--- a/waflib/extras/run_m_script.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Hans-Martin von Gaudecker, 2012
-
-"""
-Run a Matlab script.
-
-Note that the script is run in the directory where it lives -- Matlab won't
-allow it any other way.
-
-For error-catching purposes, keep an own log-file that is destroyed if the
-task finished without error. If not, it will show up as mscript_[index].log 
-in the bldnode directory.
-
-Usage::
-
-    ctx(features='run_m_script', 
-        source='some_script.m',
-        target=['some_table.tex', 'some_figure.eps'],
-        deps='some_data.mat')
-"""
-
-import os, sys
-from waflib import Task, TaskGen, Logs
-
-MATLAB_COMMANDS = ['matlab']
-
-def configure(ctx):
-	ctx.find_program(MATLAB_COMMANDS, var='MATLABCMD', errmsg = """\n
-No Matlab executable found!\n\n
-If Matlab is needed:\n
-    1) Check the settings of your system path.
-    2) Note we are looking for Matlab executables called: %s
-       If yours has a different name, please report to hmgaudecker [at] gmail\n
-Else:\n
-    Do not load the 'run_m_script' tool in the main wscript.\n\n"""  % MATLAB_COMMANDS)
-	ctx.env.MATLABFLAGS = '-wait -nojvm -nosplash -minimize'
-
-class run_m_script_base(Task.Task):
-	"""Run a Matlab script."""
-	run_str = '"${MATLABCMD}" ${MATLABFLAGS} -logfile "${LOGFILEPATH}" -r "try, ${MSCRIPTTRUNK}, exit(0), catch err, disp(err.getReport()), exit(1), end"'
-	shell = True
-
-class run_m_script(run_m_script_base):
-	"""Erase the Matlab overall log file if everything went okay, else raise an
-	error and print its 10 last lines.
-	"""
-	def run(self):
-		ret = run_m_script_base.run(self)
-		logfile = self.env.LOGFILEPATH
-		if ret:
-			mode = 'r'
-			if sys.version_info.major >= 3:
-				mode = 'rb'
-			with open(logfile, mode=mode) as f:
-				tail = f.readlines()[-10:]
-			Logs.error("""Running Matlab on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
-				self.inputs[0], ret, logfile, '\n'.join(tail))
-		else:
-			os.remove(logfile)
-		return ret
-
-@TaskGen.feature('run_m_script')
-@TaskGen.before_method('process_source')
-def apply_run_m_script(tg):
-	"""Task generator customising the options etc. to call Matlab in batch
-	mode for running a m-script.
-	"""
-
-	# Convert sources and targets to nodes 
-	src_node = tg.path.find_resource(tg.source)
-	tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
-
-	tsk = tg.create_task('run_m_script', src=src_node, tgt=tgt_nodes)
-	tsk.cwd = src_node.parent.abspath()
-	tsk.env.MSCRIPTTRUNK = os.path.splitext(src_node.name)[0]
-	tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (tsk.env.MSCRIPTTRUNK, tg.idx))
-
-	# dependencies (if the attribute 'deps' changes, trigger a recompilation)
-	for x in tg.to_list(getattr(tg, 'deps', [])):
-		node = tg.path.find_resource(x)
-		if not node:
-			tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
-		tsk.dep_nodes.append(node)
-	Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
-
-	# Bypass the execution of process_source by setting the source to an empty list
-	tg.source = []
diff --git a/waflib/extras/run_py_script.py b/waflib/extras/run_py_script.py
deleted file mode 100644
index 3670381..0000000
--- a/waflib/extras/run_py_script.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Hans-Martin von Gaudecker, 2012
-
-"""
-Run a Python script in the directory specified by **ctx.bldnode**.
-
-Select a Python version by specifying the **version** keyword for
-the task generator instance as integer 2 or 3. Default is 3.
-
-If the build environment has an attribute "PROJECT_PATHS" with
-a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
-Same a string passed to the optional **add_to_pythonpath**
-keyword (appended after the PROJECT_ROOT).
-
-Usage::
-
-    ctx(features='run_py_script', version=3,
-        source='some_script.py',
-        target=['some_table.tex', 'some_figure.eps'],
-        deps='some_data.csv',
-        add_to_pythonpath='src/some/library')
-"""
-
-import os, re
-from waflib import Task, TaskGen, Logs
-
-
-def configure(conf):
-	"""TODO: Might need to be updated for Windows once
-	"PEP 397":http://www.python.org/dev/peps/pep-0397/ is settled.
-	"""
-	conf.find_program('python', var='PY2CMD', mandatory=False)
-	conf.find_program('python3', var='PY3CMD', mandatory=False)
-	if not conf.env.PY2CMD and not conf.env.PY3CMD:
-		conf.fatal("No Python interpreter found!")
-
-class run_py_2_script(Task.Task):
-	"""Run a Python 2 script."""
-	run_str = '${PY2CMD} ${SRC[0].abspath()}'
-	shell=True
-
-class run_py_3_script(Task.Task):
-	"""Run a Python 3 script."""
-	run_str = '${PY3CMD} ${SRC[0].abspath()}'
-	shell=True
-
-@TaskGen.feature('run_py_script')
-@TaskGen.before_method('process_source')
-def apply_run_py_script(tg):
-	"""Task generator for running either Python 2 or Python 3 on a single
-	script.
-
-	Attributes:
-
-		* source -- A **single** source node or string. (required)
-		* target -- A single target or list of targets (nodes or strings)
-		* deps -- A single dependency or list of dependencies (nodes or strings)
-		* add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable
-
-	If the build environment has an attribute "PROJECT_PATHS" with
-	a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
-	"""
-
-	# Set the Python version to use, default to 3.
-	v = getattr(tg, 'version', 3)
-	if v not in (2, 3):
-		raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v)
-
-	# Convert sources and targets to nodes
-	src_node = tg.path.find_resource(tg.source)
-	tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
-
-	# Create the task.
-	tsk = tg.create_task('run_py_%d_script' %v, src=src_node, tgt=tgt_nodes)
-
-	# custom execution environment
-	# TODO use a list and  os.sep.join(lst) at the end instead of concatenating strings
-	tsk.env.env = dict(os.environ)
-	tsk.env.env['PYTHONPATH'] = tsk.env.env.get('PYTHONPATH', '')
-	project_paths = getattr(tsk.env, 'PROJECT_PATHS', None)
-	if project_paths and 'PROJECT_ROOT' in project_paths:
-		tsk.env.env['PYTHONPATH'] += os.pathsep + project_paths['PROJECT_ROOT'].abspath()
-	if getattr(tg, 'add_to_pythonpath', None):
-		tsk.env.env['PYTHONPATH'] += os.pathsep + tg.add_to_pythonpath
-
-	# Clean up the PYTHONPATH -- replace double occurrences of path separator
-	tsk.env.env['PYTHONPATH'] = re.sub(os.pathsep + '+', os.pathsep, tsk.env.env['PYTHONPATH'])
-
-	# Clean up the PYTHONPATH -- doesn't like starting with path separator
-	if tsk.env.env['PYTHONPATH'].startswith(os.pathsep):
-		 tsk.env.env['PYTHONPATH'] = tsk.env.env['PYTHONPATH'][1:]
-
-	# dependencies (if the attribute 'deps' changes, trigger a recompilation)
-	for x in tg.to_list(getattr(tg, 'deps', [])):
-		node = tg.path.find_resource(x)
-		if not node:
-			tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
-		tsk.dep_nodes.append(node)
-	Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
-
-	# Bypass the execution of process_source by setting the source to an empty list
-	tg.source = []
-
diff --git a/waflib/extras/run_r_script.py b/waflib/extras/run_r_script.py
deleted file mode 100644
index b0d8f2b..0000000
--- a/waflib/extras/run_r_script.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Hans-Martin von Gaudecker, 2012
-
-"""
-Run a R script in the directory specified by **ctx.bldnode**.
-
-For error-catching purposes, keep an own log-file that is destroyed if the
-task finished without error. If not, it will show up as rscript_[index].log
-in the bldnode directory.
-
-Usage::
-
-    ctx(features='run_r_script',
-        source='some_script.r',
-        target=['some_table.tex', 'some_figure.eps'],
-        deps='some_data.csv')
-"""
-
-
-import os, sys
-from waflib import Task, TaskGen, Logs
-
-R_COMMANDS = ['RTerm', 'R', 'r']
-
-def configure(ctx):
-	ctx.find_program(R_COMMANDS, var='RCMD', errmsg = """\n
-No R executable found!\n\n
-If R is needed:\n
-	1) Check the settings of your system path.
-	2) Note we are looking for R executables called: %s
-	   If yours has a different name, please report to hmgaudecker [at] gmail\n
-Else:\n
-	Do not load the 'run_r_script' tool in the main wscript.\n\n"""  % R_COMMANDS)
-	ctx.env.RFLAGS = 'CMD BATCH --slave'
-
-class run_r_script_base(Task.Task):
-	"""Run a R script."""
-	run_str = '"${RCMD}" ${RFLAGS} "${SRC[0].abspath()}" "${LOGFILEPATH}"'
-	shell = True
-
-class run_r_script(run_r_script_base):
-	"""Erase the R overall log file if everything went okay, else raise an
-	error and print its 10 last lines.
-	"""
-	def run(self):
-		ret = run_r_script_base.run(self)
-		logfile = self.env.LOGFILEPATH
-		if ret:
-			mode = 'r'
-			if sys.version_info.major >= 3:
-				mode = 'rb'
-			with open(logfile, mode=mode) as f:
-				tail = f.readlines()[-10:]
-			Logs.error("""Running R on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
-				self.inputs[0], ret, logfile, '\n'.join(tail))
-		else:
-			os.remove(logfile)
-		return ret
-
-
-@TaskGen.feature('run_r_script')
-@TaskGen.before_method('process_source')
-def apply_run_r_script(tg):
-	"""Task generator customising the options etc. to call R in batch
-	mode for running a R script.
-	"""
-
-	# Convert sources and targets to nodes
-	src_node = tg.path.find_resource(tg.source)
-	tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
-
-	tsk = tg.create_task('run_r_script', src=src_node, tgt=tgt_nodes)
-	tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (os.path.splitext(src_node.name)[0], tg.idx))
-
-	# dependencies (if the attribute 'deps' changes, trigger a recompilation)
-	for x in tg.to_list(getattr(tg, 'deps', [])):
-		node = tg.path.find_resource(x)
-		if not node:
-			tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
-		tsk.dep_nodes.append(node)
-	Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
-
-	# Bypass the execution of process_source by setting the source to an empty list
-	tg.source = []
-
diff --git a/waflib/extras/sas.py b/waflib/extras/sas.py
deleted file mode 100644
index 754c614..0000000
--- a/waflib/extras/sas.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Mark Coggeshall, 2010
-
-"SAS support"
-
-import os
-from waflib import Task, Errors, Logs
-from waflib.TaskGen import feature, before_method
-
-sas_fun, _ = Task.compile_fun('sas -sysin ${SRCFILE} -log ${LOGFILE} -print ${LSTFILE}', shell=False)
-
-class sas(Task.Task):
-	vars = ['SAS', 'SASFLAGS']
-	def run(task):
-		command = 'SAS'
-		fun = sas_fun
-
-		node = task.inputs[0]
-		logfilenode = node.change_ext('.log')
-		lstfilenode = node.change_ext('.lst')
-
-		# set the cwd
-		task.cwd = task.inputs[0].parent.get_src().abspath()
-		Logs.debug('runner: %r on %r', command, node)
-
-		SASINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep
-		task.env.env = {'SASINPUTS': SASINPUTS}
-
-		task.env.SRCFILE = node.abspath()
-		task.env.LOGFILE = logfilenode.abspath()
-		task.env.LSTFILE = lstfilenode.abspath()
-		ret = fun(task)
-		if ret:
-			Logs.error('Running %s on %r returned a non-zero exit', command, node)
-			Logs.error('SRCFILE = %r', node)
-			Logs.error('LOGFILE = %r', logfilenode)
-			Logs.error('LSTFILE = %r', lstfilenode)
-		return ret
-
-@feature('sas')
-@before_method('process_source')
-def apply_sas(self):
-	if not getattr(self, 'type', None) in ('sas',):
-		self.type = 'sas'
-
-	self.env['logdir'] = getattr(self, 'logdir', 'log')
-	self.env['lstdir'] = getattr(self, 'lstdir', 'lst')
-
-	deps_lst = []
-
-	if getattr(self, 'deps', None):
-		deps = self.to_list(self.deps)
-		for filename in deps:
-			n = self.path.find_resource(filename)
-			if not n:
-				n = self.bld.root.find_resource(filename)
-			if not n:
-				raise Errors.WafError('cannot find input file %s for processing' % filename)
-			if not n in deps_lst:
-				deps_lst.append(n)
-
-	for node in self.to_nodes(self.source):
-		if self.type == 'sas':
-			task = self.create_task('sas', src=node)
-		task.dep_nodes = deps_lst
-	self.source = []
-
-def configure(self):
-	self.find_program('sas', var='SAS', mandatory=False)
-
diff --git a/waflib/extras/satellite_assembly.py b/waflib/extras/satellite_assembly.py
deleted file mode 100644
index 005eb07..0000000
--- a/waflib/extras/satellite_assembly.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/python
-# encoding: utf-8
-# vim: tabstop=4 noexpandtab
-
-"""
-Create a satellite assembly from "*.??.txt" files. ?? stands for a language code.
-
-The projects Resources subfolder contains resources.??.txt string files for several languages.
-The build folder will hold the satellite assemblies as ./??/ExeName.resources.dll
-
-#gen becomes template (It is called gen because it also uses resx.py).
-bld(source='Resources/resources.de.txt',gen=ExeName)
-"""
-
-import os, re
-from waflib import Task
-from waflib.TaskGen import feature,before_method
-
-class al(Task.Task):
-	run_str = '${AL} ${ALFLAGS}'
-
-@feature('satellite_assembly')
-@before_method('process_source')
-def satellite_assembly(self):
-	if not getattr(self, 'gen', None):
-		self.bld.fatal('satellite_assembly needs a template assembly provided with the "gen" parameter')
-	res_lang = re.compile(r'(.*)\.(\w\w)\.(?:resx|txt)',flags=re.I)
-
-	# self.source can contain node objects, so this will break in one way or another
-	self.source = self.to_list(self.source)
-	for i, x in enumerate(self.source):
-		#x = 'resources/resources.de.resx'
-		#x = 'resources/resources.de.txt'
-		mo = res_lang.match(x)
-		if mo:
-			template = os.path.splitext(self.gen)[0]
-			templatedir, templatename = os.path.split(template)
-			res = mo.group(1)
-			lang = mo.group(2)
-			#./Resources/resources.de.resources
-			resources = self.path.find_or_declare(res+ '.' + lang + '.resources')
-			self.create_task('resgen', self.to_nodes(x), [resources])
-			#./de/Exename.resources.dll
-			satellite = self.path.find_or_declare(os.path.join(templatedir,lang,templatename) + '.resources.dll')
-			tsk = self.create_task('al',[resources],[satellite])
-			tsk.env.append_value('ALFLAGS','/template:'+os.path.join(self.path.relpath(),self.gen))
-			tsk.env.append_value('ALFLAGS','/embed:'+resources.relpath())
-			tsk.env.append_value('ALFLAGS','/culture:'+lang)
-			tsk.env.append_value('ALFLAGS','/out:'+satellite.relpath())
-			self.source[i] = None
-	# remove the None elements that we just substituted
-	self.source = list(filter(lambda x:x, self.source))
-
-def configure(ctx):
-	ctx.find_program('al', var='AL', mandatory=True)
-	ctx.load('resx')
-
diff --git a/waflib/extras/scala.py b/waflib/extras/scala.py
deleted file mode 100644
index a9880f0..0000000
--- a/waflib/extras/scala.py
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010 (ita)
-
-"""
-Scala support
-
-scalac outputs files a bit where it wants to
-"""
-
-import os
-from waflib import Task, Utils, Node
-from waflib.TaskGen import feature, before_method, after_method
-
-from waflib.Tools import ccroot
-ccroot.USELIB_VARS['scalac'] = set(['CLASSPATH', 'SCALACFLAGS'])
-
-from waflib.Tools import javaw
-
-@feature('scalac')
-@before_method('process_source')
-def apply_scalac(self):
-
-	Utils.def_attrs(self, jarname='', classpath='',
-		sourcepath='.', srcdir='.',
-		jar_mf_attributes={}, jar_mf_classpath=[])
-
-	outdir = getattr(self, 'outdir', None)
-	if outdir:
-		if not isinstance(outdir, Node.Node):
-			outdir = self.path.get_bld().make_node(self.outdir)
-	else:
-		outdir = self.path.get_bld()
-	outdir.mkdir()
-	self.env['OUTDIR'] = outdir.abspath()
-
-	self.scalac_task = tsk = self.create_task('scalac')
-	tmp = []
-
-	srcdir = getattr(self, 'srcdir', '')
-	if isinstance(srcdir, Node.Node):
-		srcdir = [srcdir]
-	for x in Utils.to_list(srcdir):
-		if isinstance(x, Node.Node):
-			y = x
-		else:
-			y = self.path.find_dir(x)
-			if not y:
-				self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
-		tmp.append(y)
-	tsk.srcdir = tmp
-
-# reuse some code
-feature('scalac')(javaw.use_javac_files)
-after_method('apply_scalac')(javaw.use_javac_files)
-
-feature('scalac')(javaw.set_classpath)
-after_method('apply_scalac', 'use_scalac_files')(javaw.set_classpath)
-
-
-SOURCE_RE = '**/*.scala'
-class scalac(javaw.javac):
-	color = 'GREEN'
-	vars    = ['CLASSPATH', 'SCALACFLAGS', 'SCALAC', 'OUTDIR']
-
-	def runnable_status(self):
-		"""
-		Wait for dependent tasks to be complete, then read the file system to find the input nodes.
-		"""
-		for t in self.run_after:
-			if not t.hasrun:
-				return Task.ASK_LATER
-
-		if not self.inputs:
-			global SOURCE_RE
-			self.inputs  = []
-			for x in self.srcdir:
-				self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
-		return super(javaw.javac, self).runnable_status()
-
-	def run(self):
-		"""
-		Execute the scalac compiler
-		"""
-		env = self.env
-		gen = self.generator
-		bld = gen.bld
-		wd = bld.bldnode.abspath()
-		def to_list(xx):
-			if isinstance(xx, str):
-				return [xx]
-			return xx
-		self.last_cmd = lst = []
-		lst.extend(to_list(env['SCALAC']))
-		lst.extend(['-classpath'])
-		lst.extend(to_list(env['CLASSPATH']))
-		lst.extend(['-d'])
-		lst.extend(to_list(env['OUTDIR']))
-		lst.extend(to_list(env['SCALACFLAGS']))
-		lst.extend([a.abspath() for a in self.inputs])
-		lst = [x for x in lst if x]
-		try:
-			self.out = self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, output=0, quiet=0)[1]
-		except:
-			self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None)
-
-def configure(self):
-	"""
-	Detect the scalac program
-	"""
-	# If SCALA_HOME is set, we prepend it to the path list
-	java_path = self.environ['PATH'].split(os.pathsep)
-	v = self.env
-
-	if 'SCALA_HOME' in self.environ:
-		java_path = [os.path.join(self.environ['SCALA_HOME'], 'bin')] + java_path
-		self.env['SCALA_HOME'] = [self.environ['SCALA_HOME']]
-
-	for x in 'scalac scala'.split():
-		self.find_program(x, var=x.upper(), path_list=java_path)
-
-	if 'CLASSPATH' in self.environ:
-		v['CLASSPATH'] = self.environ['CLASSPATH']
-
-	v.SCALACFLAGS = ['-verbose']
-	if not v['SCALAC']:
-		self.fatal('scalac is required for compiling scala classes')
-
diff --git a/waflib/extras/slow_qt4.py b/waflib/extras/slow_qt4.py
deleted file mode 100644
index ec7880b..0000000
--- a/waflib/extras/slow_qt4.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#! /usr/bin/env python
-# Thomas Nagy, 2011 (ita)
-
-"""
-Create _moc.cpp files
-
-The builds are 30-40% faster when .moc files are included,
-you should NOT use this tool. If you really
-really want it:
-
-def configure(conf):
-	conf.load('compiler_cxx qt4')
-	conf.load('slow_qt4')
-
-See playground/slow_qt/wscript for a complete example.
-"""
-
-from waflib.TaskGen import extension
-from waflib import Task
-import waflib.Tools.qt4
-import waflib.Tools.cxx
-
-@extension(*waflib.Tools.qt4.EXT_QT4)
-def cxx_hook(self, node):
-	return self.create_compiled_task('cxx_qt', node)
-
-class cxx_qt(Task.classes['cxx']):
-	def runnable_status(self):
-		ret = Task.classes['cxx'].runnable_status(self)
-		if ret != Task.ASK_LATER and not getattr(self, 'moc_done', None):
-
-			try:
-				cache = self.generator.moc_cache
-			except AttributeError:
-				cache = self.generator.moc_cache = {}
-
-			deps = self.generator.bld.node_deps[self.uid()]
-			for x in [self.inputs[0]] + deps:
-				if x.read().find('Q_OBJECT') > 0:
-
-					# process "foo.h -> foo.moc" only if "foo.cpp" is in the sources for the current task generator
-					# this code will work because it is in the main thread (runnable_status)
-					if x.name.rfind('.') > -1: # a .h file...
-						name = x.name[:x.name.rfind('.')]
-						for tsk in self.generator.compiled_tasks:
-							if tsk.inputs and tsk.inputs[0].name.startswith(name):
-								break
-						else:
-							# no corresponding file, continue
-							continue
-
-					# the file foo.cpp could be compiled for a static and a shared library - hence the %number in the name
-					cxx_node = x.parent.get_bld().make_node(x.name.replace('.', '_') + '_%d_moc.cpp' % self.generator.idx)
-					if cxx_node in cache:
-						continue
-					cache[cxx_node] = self
-
-					tsk = Task.classes['moc'](env=self.env, generator=self.generator)
-					tsk.set_inputs(x)
-					tsk.set_outputs(cxx_node)
-
-					if x.name.endswith('.cpp'):
-						# moc is trying to be too smart but it is too dumb:
-						# why forcing the #include when Q_OBJECT is in the cpp file?
-						gen = self.generator.bld.producer
-						gen.outstanding.append(tsk)
-						gen.total += 1
-						self.set_run_after(tsk)
-					else:
-						cxxtsk = Task.classes['cxx'](env=self.env, generator=self.generator)
-						cxxtsk.set_inputs(tsk.outputs)
-						cxxtsk.set_outputs(cxx_node.change_ext('.o'))
-						cxxtsk.set_run_after(tsk)
-
-						try:
-							self.more_tasks.extend([tsk, cxxtsk])
-						except AttributeError:
-							self.more_tasks = [tsk, cxxtsk]
-
-						try:
-							link = self.generator.link_task
-						except AttributeError:
-							pass
-						else:
-							link.set_run_after(cxxtsk)
-							link.inputs.extend(cxxtsk.outputs)
-							link.inputs.sort(key=lambda x: x.abspath())
-
-			self.moc_done = True
-
-		for t in self.run_after:
-			if not t.hasrun:
-				return Task.ASK_LATER
-
-		return ret
-
diff --git a/waflib/extras/softlink_libs.py b/waflib/extras/softlink_libs.py
deleted file mode 100644
index 50c777f..0000000
--- a/waflib/extras/softlink_libs.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#! /usr/bin/env python
-# per rosengren 2011
-
-from waflib.TaskGen import feature, after_method
-from waflib.Task import Task, always_run
-from os.path import basename, isabs
-from os import tmpfile, linesep
-
-def options(opt):
-	grp = opt.add_option_group('Softlink Libraries Options')
-	grp.add_option('--exclude', default='/usr/lib,/lib', help='No symbolic links are created for libs within [%default]')
-
-def configure(cnf):
-	cnf.find_program('ldd')
-	if not cnf.env.SOFTLINK_EXCLUDE:
-		cnf.env.SOFTLINK_EXCLUDE = cnf.options.exclude.split(',')
-
-@feature('softlink_libs')
-@after_method('process_rule')
-def add_finder(self):
-	tgt = self.path.find_or_declare(self.target)
-	self.create_task('sll_finder', tgt=tgt)
-	self.create_task('sll_installer', tgt=tgt)
-	always_run(sll_installer)
-
-class sll_finder(Task):
-	ext_out = 'softlink_libs'
-	def run(self):
-		bld = self.generator.bld
-		linked=[]
-		target_paths = []
-		for g in bld.groups:
-			for tgen in g:
-				# FIXME it might be better to check if there is a link_task (getattr?)
-				target_paths += [tgen.path.get_bld().bldpath()]
-				linked += [t.outputs[0].bldpath()
-					for t in getattr(tgen, 'tasks', [])
-					if t.__class__.__name__ in
-					['cprogram', 'cshlib', 'cxxprogram', 'cxxshlib']]
-		lib_list = []
-		if len(linked):
-			cmd = [self.env.LDD] + linked
-			# FIXME add DYLD_LIBRARY_PATH+PATH for osx+win32
-			ldd_env = {'LD_LIBRARY_PATH': ':'.join(target_paths + self.env.LIBPATH)}
-			# FIXME the with syntax will not work in python 2
-			with tmpfile() as result:
-				self.exec_command(cmd, env=ldd_env, stdout=result)
-				result.seek(0)
-				for line in result.readlines():
-					words = line.split()
-					if len(words) < 3 or words[1] != '=>':
-						continue
-					lib = words[2]
-					if lib == 'not':
-						continue
-					if any([lib.startswith(p) for p in
-							[bld.bldnode.abspath(), '('] +
-							self.env.SOFTLINK_EXCLUDE]):
-						continue
-					if not isabs(lib):
-						continue
-					lib_list.append(lib)
-			lib_list = sorted(set(lib_list))
-		self.outputs[0].write(linesep.join(lib_list + self.env.DYNAMIC_LIBS))
-		return 0
-
-class sll_installer(Task):
-	ext_in = 'softlink_libs'
-	def run(self):
-		tgt = self.outputs[0]
-		self.generator.bld.install_files('${LIBDIR}', tgt, postpone=False)
-		lib_list=tgt.read().split()
-		for lib in lib_list:
-			self.generator.bld.symlink_as('${LIBDIR}/'+basename(lib), lib, postpone=False)
-		return 0
-
diff --git a/waflib/extras/stale.py b/waflib/extras/stale.py
deleted file mode 100644
index cac3f46..0000000
--- a/waflib/extras/stale.py
+++ /dev/null
@@ -1,98 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-# Thomas Nagy, 2006-2015 (ita)
-
-"""
-Add a pre-build hook to remove build files (declared in the system)
-that do not have a corresponding target
-
-This can be used for example to remove the targets
-that have changed name without performing
-a full 'waf clean'
-
-Of course, it will only work if there are no dynamically generated
-nodes/tasks, in which case the method will have to be modified
-to exclude some folders for example.
-
-Make sure to set bld.post_mode = waflib.Build.POST_AT_ONCE
-"""
-
-from waflib import Logs, Build
-from waflib.Runner import Parallel
-
-DYNAMIC_EXT = [] # add your non-cleanable files/extensions here
-MOC_H_EXTS = '.cpp .cxx .hpp .hxx .h'.split()
-
-def can_delete(node):
-	"""Imperfect moc cleanup which does not look for a Q_OBJECT macro in the files"""
-	if not node.name.endswith('.moc'):
-		return True
-	base = node.name[:-4]
-	p1 = node.parent.get_src()
-	p2 = node.parent.get_bld()
-	for k in MOC_H_EXTS:
-		h_name = base + k
-		n = p1.search_node(h_name)
-		if n:
-			return False
-		n = p2.search_node(h_name)
-		if n:
-			return False
-
-		# foo.cpp.moc, foo.h.moc, etc.
-		if base.endswith(k):
-			return False
-
-	return True
-
-# recursion over the nodes to find the stale files
-def stale_rec(node, nodes):
-	if node.abspath() in node.ctx.env[Build.CFG_FILES]:
-		return
-
-	if getattr(node, 'children', []):
-		for x in node.children.values():
-			if x.name != "c4che":
-				stale_rec(x, nodes)
-	else:
-		for ext in DYNAMIC_EXT:
-			if node.name.endswith(ext):
-				break
-		else:
-			if not node in nodes:
-				if can_delete(node):
-					Logs.warn('Removing stale file -> %r', node)
-					node.delete()
-
-old = Parallel.refill_task_list
-def refill_task_list(self):
-	iit = old(self)
-	bld = self.bld
-
-	# execute this operation only once
-	if getattr(self, 'stale_done', False):
-		return iit
-	self.stale_done = True
-
-	# this does not work in partial builds
-	if bld.targets != '*':
-		return iit
-
-	# this does not work in dynamic builds
-	if getattr(bld, 'post_mode') == Build.POST_AT_ONCE:
-		return iit
-
-	# obtain the nodes to use during the build
-	nodes = []
-	for tasks in bld.groups:
-		for x in tasks:
-			try:
-				nodes.extend(x.outputs)
-			except AttributeError:
-				pass
-
-	stale_rec(bld.bldnode, nodes)
-	return iit
-
-Parallel.refill_task_list = refill_task_list
-
diff --git a/waflib/extras/stracedeps.py b/waflib/extras/stracedeps.py
deleted file mode 100644
index 37d82cb..0000000
--- a/waflib/extras/stracedeps.py
+++ /dev/null
@@ -1,174 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-"""
-Execute tasks through strace to obtain dependencies after the process is run. This
-scheme is similar to that of the Fabricate script.
-
-To use::
-
-  def configure(conf):
-     conf.load('strace')
-
-WARNING:
-* This will not work when advanced scanners are needed (qt4/qt5)
-* The overhead of running 'strace' is significant (56s -> 1m29s)
-* It will not work on Windows :-)
-"""
-
-import os, re, threading
-from waflib import Task, Logs, Utils
-
-#TRACECALLS = 'trace=access,chdir,clone,creat,execve,exit_group,fork,lstat,lstat64,mkdir,open,rename,stat,stat64,symlink,vfork'
-TRACECALLS = 'trace=process,file'
-
-BANNED = ('/tmp', '/proc', '/sys', '/dev')
-
-s_process = r'(?:clone|fork|vfork)\(.*?(?P<npid>\d+)'
-s_file = r'(?P<call>\w+)\("(?P<path>([^"\\]|\\.)*)"(.*)'
-re_lines = re.compile(r'^(?P<pid>\d+)\s+(?:(?:%s)|(?:%s))\r*$' % (s_file, s_process), re.IGNORECASE | re.MULTILINE)
-strace_lock = threading.Lock()
-
-def configure(conf):
-	conf.find_program('strace')
-
-def task_method(func):
-	# Decorator function to bind/replace methods on the base Task class
-	#
-	# The methods Task.exec_command and Task.sig_implicit_deps already exists and are rarely overridden
-	# we thus expect that we are the only ones doing this
-	try:
-		setattr(Task.Task, 'nostrace_%s' % func.__name__, getattr(Task.Task, func.__name__))
-	except AttributeError:
-		pass
-	setattr(Task.Task, func.__name__, func)
-	return func
-
-@task_method
-def get_strace_file(self):
-	try:
-		return self.strace_file
-	except AttributeError:
-		pass
-
-	if self.outputs:
-		ret = self.outputs[0].abspath() + '.strace'
-	else:
-		ret = '%s%s%d%s' % (self.generator.bld.bldnode.abspath(), os.sep, id(self), '.strace')
-	self.strace_file = ret
-	return ret
-
-@task_method
-def get_strace_args(self):
-	return (self.env.STRACE or ['strace']) + ['-e', TRACECALLS, '-f', '-o', self.get_strace_file()]
-
-@task_method
-def exec_command(self, cmd, **kw):
-	bld = self.generator.bld
-	if not 'cwd' in kw:
-		kw['cwd'] = self.get_cwd()
-
-	args = self.get_strace_args()
-	fname = self.get_strace_file()
-	if isinstance(cmd, list):
-		cmd = args + cmd
-	else:
-		cmd = '%s %s' % (' '.join(args), cmd)
-
-	try:
-		ret = bld.exec_command(cmd, **kw)
-	finally:
-		if not ret:
-			self.parse_strace_deps(fname, kw['cwd'])
-	return ret
-
-@task_method
-def sig_implicit_deps(self):
-	# bypass the scanner functions
-	return
-
-@task_method
-def parse_strace_deps(self, path, cwd):
-	# uncomment the following line to disable the dependencies and force a file scan
-	# return
-	try:
-		cnt = Utils.readf(path)
-	finally:
-		try:
-			os.remove(path)
-		except OSError:
-			pass
-
-	if not isinstance(cwd, str):
-		cwd = cwd.abspath()
-
-	nodes = []
-	bld = self.generator.bld
-	try:
-		cache = bld.strace_cache
-	except AttributeError:
-		cache = bld.strace_cache = {}
-
-	# chdir and relative paths
-	pid_to_cwd = {}
-
-	global BANNED
-	done = set()
-	for m in re.finditer(re_lines, cnt):
-		# scraping the output of strace
-		pid = m.group('pid')
-		if m.group('npid'):
-			npid = m.group('npid')
-			pid_to_cwd[npid] = pid_to_cwd.get(pid, cwd)
-			continue
-
-		p = m.group('path').replace('\\"', '"')
-
-		if p == '.' or m.group().find('= -1 ENOENT') > -1:
-			# just to speed it up a bit
-			continue
-
-		if not os.path.isabs(p):
-			p = os.path.join(pid_to_cwd.get(pid, cwd), p)
-
-		call = m.group('call')
-		if call == 'chdir':
-			pid_to_cwd[pid] = p
-			continue
-
-		if p in done:
-			continue
-		done.add(p)
-
-		for x in BANNED:
-			if p.startswith(x):
-				break
-		else:
-			if p.endswith('/') or os.path.isdir(p):
-				continue
-
-			try:
-				node = cache[p]
-			except KeyError:
-				strace_lock.acquire()
-				try:
-					cache[p] = node = bld.root.find_node(p)
-					if not node:
-						continue
-				finally:
-					strace_lock.release()
-			nodes.append(node)
-
-	# record the dependencies then force the task signature recalculation for next time
-	if Logs.verbose:
-		Logs.debug('deps: real scanner for %r returned %r', self, nodes)
-	bld = self.generator.bld
-	bld.node_deps[self.uid()] = nodes
-	bld.raw_deps[self.uid()] = []
-	try:
-		del self.cache_sig
-	except AttributeError:
-		pass
-	self.signature()
-
diff --git a/waflib/extras/swig.py b/waflib/extras/swig.py
deleted file mode 100644
index 740ab46..0000000
--- a/waflib/extras/swig.py
+++ /dev/null
@@ -1,237 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-# Petar Forai
-# Thomas Nagy 2008-2010 (ita)
-
-import re
-from waflib import Task, Logs
-from waflib.TaskGen import extension, feature, after_method
-from waflib.Configure import conf
-from waflib.Tools import c_preproc
-
-"""
-tasks have to be added dynamically:
-- swig interface files may be created at runtime
-- the module name may be unknown in advance
-"""
-
-SWIG_EXTS = ['.swig', '.i']
-
-re_module = re.compile(r'%module(?:\s*\(.*\))?\s+(.+)', re.M)
-
-re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
-re_2 = re.compile(r'[#%](?:include|import(?:\(module=".*"\))+|python(?:begin|code)) [<"](.*)[">]', re.M)
-
-class swig(Task.Task):
-	color   = 'BLUE'
-	run_str = '${SWIG} ${SWIGFLAGS} ${SWIGPATH_ST:INCPATHS} ${SWIGDEF_ST:DEFINES} ${SRC}'
-	ext_out = ['.h'] # might produce .h files although it is not mandatory
-	vars = ['SWIG_VERSION', 'SWIGDEPS']
-
-	def runnable_status(self):
-		for t in self.run_after:
-			if not t.hasrun:
-				return Task.ASK_LATER
-
-		if not getattr(self, 'init_outputs', None):
-			self.init_outputs = True
-			if not getattr(self, 'module', None):
-				# search the module name
-				txt = self.inputs[0].read()
-				m = re_module.search(txt)
-				if not m:
-					raise ValueError("could not find the swig module name")
-				self.module = m.group(1)
-
-			swig_c(self)
-
-			# add the language-specific output files as nodes
-			# call funs in the dict swig_langs
-			for x in self.env['SWIGFLAGS']:
-				# obtain the language
-				x = x[1:]
-				try:
-					fun = swig_langs[x]
-				except KeyError:
-					pass
-				else:
-					fun(self)
-
-		return super(swig, self).runnable_status()
-
-	def scan(self):
-		"scan for swig dependencies, climb the .i files"
-		lst_src = []
-
-		seen = []
-		missing = []
-		to_see = [self.inputs[0]]
-
-		while to_see:
-			node = to_see.pop(0)
-			if node in seen:
-				continue
-			seen.append(node)
-			lst_src.append(node)
-
-			# read the file
-			code = node.read()
-			code = c_preproc.re_nl.sub('', code)
-			code = c_preproc.re_cpp.sub(c_preproc.repl, code)
-
-			# find .i files and project headers
-			names = re_2.findall(code)
-			for n in names:
-				for d in self.generator.includes_nodes + [node.parent]:
-					u = d.find_resource(n)
-					if u:
-						to_see.append(u)
-						break
-				else:
-					missing.append(n)
-		return (lst_src, missing)
-
-# provide additional language processing
-swig_langs = {}
-def swigf(fun):
-	swig_langs[fun.__name__.replace('swig_', '')] = fun
-	return fun
-swig.swigf = swigf
-
-def swig_c(self):
-	ext = '.swigwrap_%d.c' % self.generator.idx
-	flags = self.env['SWIGFLAGS']
-	if '-c++' in flags:
-		ext += 'xx'
-	out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
-
-	if '-c++' in flags:
-		c_tsk = self.generator.cxx_hook(out_node)
-	else:
-		c_tsk = self.generator.c_hook(out_node)
-
-	c_tsk.set_run_after(self)
-
-	# transfer weights from swig task to c task
-	if getattr(self, 'weight', None):
-		c_tsk.weight = self.weight
-	if getattr(self, 'tree_weight', None):
-		c_tsk.tree_weight = self.tree_weight
-
-	try:
-		self.more_tasks.append(c_tsk)
-	except AttributeError:
-		self.more_tasks = [c_tsk]
-
-	try:
-		ltask = self.generator.link_task
-	except AttributeError:
-		pass
-	else:
-		ltask.set_run_after(c_tsk)
-		# setting input nodes does not declare the build order
-		# because the build already started, but it sets
-		# the dependency to enable rebuilds
-		ltask.inputs.append(c_tsk.outputs[0])
-
-	self.outputs.append(out_node)
-
-	if not '-o' in self.env['SWIGFLAGS']:
-		self.env.append_value('SWIGFLAGS', ['-o', self.outputs[0].abspath()])
-
-@swigf
-def swig_python(tsk):
-	node = tsk.inputs[0].parent
-	if tsk.outdir:
-		node = tsk.outdir
-	tsk.set_outputs(node.find_or_declare(tsk.module+'.py'))
-
-@swigf
-def swig_ocaml(tsk):
-	node = tsk.inputs[0].parent
-	if tsk.outdir:
-		node = tsk.outdir
-	tsk.set_outputs(node.find_or_declare(tsk.module+'.ml'))
-	tsk.set_outputs(node.find_or_declare(tsk.module+'.mli'))
-
-@extension(*SWIG_EXTS)
-def i_file(self, node):
-	# the task instance
-	tsk = self.create_task('swig')
-	tsk.set_inputs(node)
-	tsk.module = getattr(self, 'swig_module', None)
-
-	flags = self.to_list(getattr(self, 'swig_flags', []))
-	tsk.env.append_value('SWIGFLAGS', flags)
-
-	tsk.outdir = None
-	if '-outdir' in flags:
-		outdir = flags[flags.index('-outdir')+1]
-		outdir = tsk.generator.bld.bldnode.make_node(outdir)
-		outdir.mkdir()
-		tsk.outdir = outdir
-
-@feature('c', 'cxx', 'd', 'fc', 'asm')
-@after_method('apply_link', 'process_source')
-def enforce_swig_before_link(self):
-	try:
-		link_task = self.link_task
-	except AttributeError:
-		pass
-	else:
-		for x in self.tasks:
-			if x.__class__.__name__ == 'swig':
-				link_task.run_after.add(x)
-
-@conf
-def check_swig_version(conf, minver=None):
-	"""
-	Check if the swig tool is found matching a given minimum version.
-	minver should be a tuple, eg. to check for swig >= 1.3.28 pass (1,3,28) as minver.
-
-	If successful, SWIG_VERSION is defined as 'MAJOR.MINOR'
-	(eg. '1.3') of the actual swig version found.
-
-	:param minver: minimum version
-	:type minver: tuple of int
-	:return: swig version
-	:rtype: tuple of int
-	"""
-	assert minver is None or isinstance(minver, tuple)
-	swigbin = conf.env['SWIG']
-	if not swigbin:
-		conf.fatal('could not find the swig executable')
-
-	# Get swig version string
-	cmd = swigbin + ['-version']
-	Logs.debug('swig: Running swig command %r', cmd)
-	reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
-	swig_out = conf.cmd_and_log(cmd)
-	swigver_tuple = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')])
-
-	# Compare swig version with the minimum required
-	result = (minver is None) or (swigver_tuple >= minver)
-
-	if result:
-		# Define useful environment variables
-		swigver = '.'.join([str(x) for x in swigver_tuple[:2]])
-		conf.env['SWIG_VERSION'] = swigver
-
-	# Feedback
-	swigver_full = '.'.join(map(str, swigver_tuple[:3]))
-	if minver is None:
-		conf.msg('Checking for swig version', swigver_full)
-	else:
-		minver_str = '.'.join(map(str, minver))
-		conf.msg('Checking for swig version >= %s' % (minver_str,), swigver_full, color=result and 'GREEN' or 'YELLOW')
-
-	if not result:
-		conf.fatal('The swig version is too old, expecting %r' % (minver,))
-
-	return swigver_tuple
-
-def configure(conf):
-	conf.find_program('swig', var='SWIG')
-	conf.env.SWIGPATH_ST = '-I%s'
-	conf.env.SWIGDEF_ST = '-D%s'
-
diff --git a/waflib/extras/syms.py b/waflib/extras/syms.py
deleted file mode 100644
index dfa0059..0000000
--- a/waflib/extras/syms.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-This tool supports the export_symbols_regex to export the symbols in a shared library.
-by default, all symbols are exported by gcc, and nothing by msvc.
-to use the tool, do something like:
-
-def build(ctx):
-	ctx(features='c cshlib syms', source='a.c b.c', export_symbols_regex='mylib_.*', target='testlib')
-
-only the symbols starting with 'mylib_' will be exported.
-"""
-
-import re
-from waflib.Context import STDOUT
-from waflib.Task import Task
-from waflib.Errors import WafError
-from waflib.TaskGen import feature, after_method
-
-class gen_sym(Task):
-	def run(self):
-		obj = self.inputs[0]
-		kw = {}
-
-		reg = getattr(self.generator, 'export_symbols_regex', '.+?')
-		if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
-			re_nm = re.compile(r'External\s+\|\s+_(?P<symbol>%s)\b' % reg)
-			cmd = (self.env.DUMPBIN or ['dumpbin']) + ['/symbols', obj.abspath()]
-		else:
-			if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows
-				re_nm = re.compile(r'(T|D)\s+_(?P<symbol>%s)\b' % reg)
-			elif self.env.DEST_BINFMT=='mac-o':
-				re_nm=re.compile(r'(T|D)\s+(?P<symbol>_?%s)\b' % reg)
-			else:
-				re_nm = re.compile(r'(T|D)\s+(?P<symbol>%s)\b' % reg)
-			cmd = (self.env.NM or ['nm']) + ['-g', obj.abspath()]
-		syms = [m.group('symbol') for m in re_nm.finditer(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))]
-		self.outputs[0].write('%r' % syms)
-
-class compile_sym(Task):
-	def run(self):
-		syms = {}
-		for x in self.inputs:
-			slist = eval(x.read())
-			for s in slist:
-				syms[s] = 1
-		lsyms = list(syms.keys())
-		lsyms.sort()
-		if self.env.DEST_BINFMT == 'pe':
-			self.outputs[0].write('EXPORTS\n' + '\n'.join(lsyms))
-		elif self.env.DEST_BINFMT == 'elf':
-			self.outputs[0].write('{ global:\n' + ';\n'.join(lsyms) + ";\nlocal: *; };\n")
-		elif self.env.DEST_BINFMT=='mac-o':
-			self.outputs[0].write('\n'.join(lsyms) + '\n')
-		else:
-			raise WafError('NotImplemented')
-
-@feature('syms')
-@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local', 'propagate_uselib_vars')
-def do_the_symbol_stuff(self):
-	def_node = self.path.find_or_declare(getattr(self, 'sym_file', self.target + '.def'))
-	compiled_tasks = getattr(self, 'compiled_tasks', None)
-	if compiled_tasks:
-		ins = [x.outputs[0] for x in compiled_tasks]
-		self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
-		self.create_task('compile_sym', [x.outputs[0] for x in self.gen_sym_tasks], def_node)
-
-	link_task = getattr(self, 'link_task', None)
-	if link_task:
-		self.link_task.dep_nodes.append(def_node)
-
-		if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
-			self.link_task.env.append_value('LINKFLAGS', ['/def:' + def_node.bldpath()])
-		elif self.env.DEST_BINFMT == 'pe':
-			# gcc on windows takes *.def as an additional input
-			self.link_task.inputs.append(def_node)
-		elif self.env.DEST_BINFMT == 'elf':
-			self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + def_node.bldpath()])
-		elif self.env.DEST_BINFMT=='mac-o':
-			self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,' + def_node.bldpath()])
-		else:
-			raise WafError('NotImplemented')
-
diff --git a/waflib/extras/ticgt.py b/waflib/extras/ticgt.py
deleted file mode 100644
index f43a7ea..0000000
--- a/waflib/extras/ticgt.py
+++ /dev/null
@@ -1,300 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-
-# Texas Instruments code generator support (experimental)
-# When reporting issues, please directly assign the bug to the maintainer.
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2012"
-
-"""
-TI cgt6x is a compiler suite for TI DSPs.
-
-The toolchain does pretty weird things, and I'm sure I'm missing some of them.
-But still, the tool saves time.
-
-What this tool does is:
-
-- create a TI compiler environment
-- create TI compiler features, to handle some specifics about this compiler
-  It has a few idiosyncracies, such as not giving the liberty of the .o file names
-- automatically activate them when using the TI compiler
-- handle the tconf tool
-  The tool
-
-TODO:
-
-- the set_platform_flags() function is not nice
-- more tests
-- broaden tool scope, if needed
-
-"""
-
-import os, re
-
-from waflib import Options, Utils, Task, TaskGen
-from waflib.Tools import c, ccroot, c_preproc
-from waflib.Configure import conf
-from waflib.TaskGen import feature, before_method
-from waflib.Tools.c import cprogram
-
-opj = os.path.join
-
-@conf
-def find_ticc(conf):
-	conf.find_program(['cl6x'], var='CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
-	conf.env.CC_NAME = 'ticc'
-
-@conf
-def find_tild(conf):
-	conf.find_program(['lnk6x'], var='LINK_CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
-	conf.env.LINK_CC_NAME = 'tild'
-
-@conf
-def find_tiar(conf):
-	conf.find_program(['ar6x'], var='AR', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
-	conf.env.AR_NAME = 'tiar'
-	conf.env.ARFLAGS = 'qru'
-
-@conf
-def ticc_common_flags(conf):
-	v = conf.env
-
-	if not v['LINK_CC']:
-		v['LINK_CC'] = v['CC']
-	v['CCLNK_SRC_F']	 = []
-	v['CCLNK_TGT_F']	 = ['-o']
-	v['CPPPATH_ST']	  = '-I%s'
-	v['DEFINES_ST']	  = '-d%s'
-
-	v['LIB_ST']	      = '-l%s' # template for adding libs
-	v['LIBPATH_ST']	  = '-i%s' # template for adding libpaths
-	v['STLIB_ST']	    = '-l=%s.lib'
-	v['STLIBPATH_ST']	= '-i%s'
-
-	# program
-	v['cprogram_PATTERN']    = '%s.out'
-
-	# static lib
-	#v['LINKFLAGS_cstlib']    = ['-Wl,-Bstatic']
-	v['cstlib_PATTERN']      = '%s.lib'
-
-def configure(conf):
-	v = conf.env
-	v.TI_CGT_DIR = getattr(Options.options, 'ti-cgt-dir', "")
-	v.TI_DSPLINK_DIR = getattr(Options.options, 'ti-dsplink-dir', "")
-	v.TI_BIOSUTILS_DIR = getattr(Options.options, 'ti-biosutils-dir', "")
-	v.TI_DSPBIOS_DIR = getattr(Options.options, 'ti-dspbios-dir', "")
-	v.TI_XDCTOOLS_DIR = getattr(Options.options, 'ti-xdctools-dir', "")
-	conf.find_ticc()
-	conf.find_tiar()
-	conf.find_tild()
-	conf.ticc_common_flags()
-	conf.cc_load_tools()
-	conf.cc_add_flags()
-	conf.link_add_flags()
-	conf.find_program(['tconf'], var='TCONF', path_list=v.TI_XDCTOOLS_DIR)
-
-	conf.env.TCONF_INCLUDES += [
-	 opj(conf.env.TI_DSPBIOS_DIR, 'packages'),
-	]
-
-	conf.env.INCLUDES += [
-	 opj(conf.env.TI_CGT_DIR, 'include'),
-	]
-
-	conf.env.LIBPATH += [
-	 opj(conf.env.TI_CGT_DIR, "lib"),
-	]
-
-	conf.env.INCLUDES_DSPBIOS += [
-	 opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'include'),
-	]
-
-	conf.env.LIBPATH_DSPBIOS += [
-	 opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'lib'),
-	]
-
-	conf.env.INCLUDES_DSPLINK += [
-	 opj(conf.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc'),
-	]
-
-@conf
-def ti_set_debug(cfg, debug=1):
-	"""
-	Sets debug flags for the compiler.
-
-	TODO:
-	- for each TI CFLAG/INCLUDES/LINKFLAGS/LIBPATH replace RELEASE by DEBUG
-	- -g --no_compress
-	"""
-	if debug:
-		cfg.env.CFLAGS += "-d_DEBUG -dDEBUG -dDDSP_DEBUG".split()
-
-@conf
-def ti_dsplink_set_platform_flags(cfg, splat, dsp, dspbios_ver, board):
-	"""
-	Sets the INCLUDES, LINKFLAGS for DSPLINK and TCONF_INCLUDES
-	For the specific hardware.
-
-	Assumes that DSPLINK was built in its own folder.
-
-	:param splat: short platform name (eg. OMAPL138)
-	:param dsp: DSP name (eg. 674X)
-	:param dspbios_ver: string identifying DspBios version (eg. 5.XX)
-	:param board: board name (eg. OMAPL138GEM)
-
-	"""
-	d1 = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver)
-	d = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver, board)
-	cfg.env.TCONF_INCLUDES += [d1, d]
-	cfg.env.INCLUDES_DSPLINK += [
-	 opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', dsp),
-	 d,
-	]
-
-	cfg.env.LINKFLAGS_DSPLINK += [
-	 opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'export', 'BIN', 'DspBios', splat, board+'_0', 'RELEASE', 'dsplink%s.lib' % x)
-	 for x in ('', 'pool', 'mpcs', 'mplist', 'msg', 'data', 'notify', 'ringio')
-	]
-
-
-def options(opt):
-	opt.add_option('--with-ti-cgt', type='string', dest='ti-cgt-dir', help = 'Specify alternate cgt root folder', default="")
-	opt.add_option('--with-ti-biosutils', type='string', dest='ti-biosutils-dir', help = 'Specify alternate biosutils folder', default="")
-	opt.add_option('--with-ti-dspbios', type='string', dest='ti-dspbios-dir', help = 'Specify alternate dspbios folder', default="")
-	opt.add_option('--with-ti-dsplink', type='string', dest='ti-dsplink-dir', help = 'Specify alternate dsplink folder', default="")
-	opt.add_option('--with-ti-xdctools', type='string', dest='ti-xdctools-dir', help = 'Specify alternate xdctools folder', default="")
-
-class ti_cprogram(cprogram):
-	"""
-	Link object files into a c program
-	
-	Changes:
-
-	- the linked executable to have a relative path (because we can)
-	- put the LIBPATH first
-	"""
-	run_str = '${LINK_CC} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].bldpath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} '
-
-@feature("c")
-@before_method('apply_link')
-def use_ti_cprogram(self):
-	"""
-	Automatically uses ti_cprogram link process
-	"""
-	if 'cprogram' in self.features and self.env.CC_NAME == 'ticc':
-		self.features.insert(0, "ti_cprogram")
-
-class ti_c(Task.Task):
-	"""
-	Compile task for the TI codegen compiler
-
-	This compiler does not allow specifying the output file name, only the output path.
-
-	"""
-	"Compile C files into object files"
-	run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${SRC} -c ${OUT} ${CPPFLAGS}'
-	vars    = ['CCDEPS'] # unused variable to depend on, just in case
-	ext_in  = ['.h'] # set the build order easily by using ext_out=['.h']
-	scan    = c_preproc.scan
-
-def create_compiled_task(self, name, node):
-	"""
-	Overrides ccroot.create_compiled_task to support ti_c
-	"""
-	out = '%s' % (node.change_ext('.obj').name)
-	if self.env.CC_NAME == 'ticc':
-		name = 'ti_c'
-	task = self.create_task(name, node, node.parent.find_or_declare(out))
-	self.env.OUT = '-fr%s' % (node.parent.get_bld().abspath())
-	try:
-		self.compiled_tasks.append(task)
-	except AttributeError:
-		self.compiled_tasks = [task]
-	return task
-
-@TaskGen.extension('.c')
-def c_hook(self, node):
-	"Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
-	if self.env.CC_NAME == 'ticc':
-		return create_compiled_task(self, 'ti_c', node)
-	else:
-		return self.create_compiled_task('c', node)
-
-
-@feature("ti-tconf")
-@before_method('process_source')
-def apply_tconf(self):
-	sources = [x.get_src() for x in self.to_nodes(self.source, path=self.path.get_src())]
-	node = sources[0]
-	assert(sources[0].name.endswith(".tcf"))
-	if len(sources) > 1:
-		assert(sources[1].name.endswith(".cmd"))
-
-	target = getattr(self, 'target', self.source)
-	target_node = node.get_bld().parent.find_or_declare(node.name)
-	
-	procid = "%d" % int(getattr(self, 'procid', 0))
-
-	importpaths = []
-	includes = Utils.to_list(getattr(self, 'includes', []))
-	for x in includes + self.env.TCONF_INCLUDES:
-		if x == os.path.abspath(x):
-			importpaths.append(x)
-		else:
-			relpath = self.path.find_node(x).path_from(target_node.parent)
-			importpaths.append(relpath)
-
-	task = self.create_task('ti_tconf', sources, target_node.change_ext('.cdb'))
-	task.path = self.path
-	task.includes = includes
-	task.cwd = target_node.parent.abspath()
-	task.env = self.env.derive()
-	task.env["TCONFSRC"] = node.path_from(target_node.parent)
-	task.env["TCONFINC"] = '-Dconfig.importPath=%s' % ";".join(importpaths)
-	task.env['TCONFPROGNAME'] = '-Dconfig.programName=%s' % target
-	task.env['PROCID'] = procid
-	task.outputs = [
-	 target_node.change_ext("cfg_c.c"),
-	 target_node.change_ext("cfg.s62"),
-	 target_node.change_ext("cfg.cmd"),
-	]
-
-	create_compiled_task(self, 'ti_c', task.outputs[1])
-	ctask = create_compiled_task(self, 'ti_c', task.outputs[0])
-	ctask.env = self.env.derive()
-
-	self.add_those_o_files(target_node.change_ext("cfg.cmd"))
-	if len(sources) > 1:
-		self.add_those_o_files(sources[1])
-	self.source = []
-
-re_tconf_include = re.compile(r'(?P<type>utils\.importFile)\("(?P<file>.*)"\)',re.M)
-class ti_tconf(Task.Task):
-	run_str = '${TCONF} ${TCONFINC} ${TCONFPROGNAME} ${TCONFSRC} ${PROCID}'
-	color   = 'PINK'
-
-	def scan(self):
-		includes = Utils.to_list(getattr(self, 'includes', []))
-
-		def deps(node):
-			nodes, names = [], []
-			if node:
-				code = Utils.readf(node.abspath())
-				for match in re_tconf_include.finditer(code):
-					path = match.group('file')
-					if path:
-						for x in includes:
-							filename = opj(x, path)
-							fi = self.path.find_resource(filename)
-							if fi:
-								subnodes, subnames = deps(fi)
-								nodes += subnodes
-								names += subnames
-								nodes.append(fi)
-								names.append(path)
-								break
-			return nodes, names
-		return deps(self.inputs[0])
-
diff --git a/waflib/extras/unity.py b/waflib/extras/unity.py
deleted file mode 100644
index 78128ed..0000000
--- a/waflib/extras/unity.py
+++ /dev/null
@@ -1,108 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-Compile whole groups of C/C++ files at once
-(C and C++ files are processed independently though).
-
-To enable globally::
-
-	def options(opt):
-		opt.load('compiler_cxx')
-	def build(bld):
-		bld.load('compiler_cxx unity')
-
-To enable for specific task generators only::
-
-	def build(bld):
-		bld(features='c cprogram unity', source='main.c', ...)
-
-The file order is often significant in such builds, so it can be
-necessary to adjust the order of source files and the batch sizes.
-To control the amount of files processed in a batch per target
-(the default is 50)::
-
-	def build(bld):
-		bld(features='c cprogram', unity_size=20)
-
-"""
-
-from waflib import Task, Options
-from waflib.Tools import c_preproc
-from waflib import TaskGen
-
-MAX_BATCH = 50
-
-EXTS_C = ('.c',)
-EXTS_CXX = ('.cpp','.cc','.cxx','.C','.c++')
-
-def options(opt):
-	global MAX_BATCH
-	opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH,
-		help='default unity batch size (0 disables unity builds)')
-
-@TaskGen.taskgen_method
-def batch_size(self):
-	default = getattr(Options.options, 'batchsize', MAX_BATCH)
-	if default < 1:
-		return 0
-	return getattr(self, 'unity_size', default)
-
-
-class unity(Task.Task):
-	color = 'BLUE'
-	scan = c_preproc.scan
-	def to_include(self, node):
-		ret = node.path_from(self.outputs[0].parent)
-		ret = ret.replace('\\', '\\\\').replace('"', '\\"')
-		return ret
-	def run(self):
-		lst = ['#include "%s"\n' % self.to_include(node) for node in self.inputs]
-		txt = ''.join(lst)
-		self.outputs[0].write(txt)
-	def __str__(self):
-		node = self.outputs[0]
-		return node.path_from(node.ctx.launch_node())
-
-def bind_unity(obj, cls_name, exts):
-	if not 'mappings' in obj.__dict__:
-		obj.mappings = dict(obj.mappings)
-
-	for j in exts:
-		fun = obj.mappings[j]
-		if fun.__name__ == 'unity_fun':
-			raise ValueError('Attempt to bind unity mappings multiple times %r' % j)
-
-		def unity_fun(self, node):
-			cnt = self.batch_size()
-			if cnt <= 1:
-				return fun(self, node)
-			x = getattr(self, 'master_%s' % cls_name, None)
-			if not x or len(x.inputs) >= cnt:
-				x = self.create_task('unity')
-				setattr(self, 'master_%s' % cls_name, x)
-
-				cnt_cur = getattr(self, 'cnt_%s' % cls_name, 0)
-				c_node = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, cls_name))
-				x.outputs = [c_node]
-				setattr(self, 'cnt_%s' % cls_name, cnt_cur + 1)
-				fun(self, c_node)
-			x.inputs.append(node)
-
-		obj.mappings[j] = unity_fun
-
-@TaskGen.feature('unity')
-@TaskGen.before('process_source')
-def single_unity(self):
-	lst = self.to_list(self.features)
-	if 'c' in lst:
-		bind_unity(self, 'c', EXTS_C)
-	if 'cxx' in lst:
-		bind_unity(self, 'cxx', EXTS_CXX)
-
-def build(bld):
-	if bld.env.CC_NAME:
-		bind_unity(TaskGen.task_gen, 'c', EXTS_C)
-	if bld.env.CXX_NAME:
-		bind_unity(TaskGen.task_gen, 'cxx', EXTS_CXX)
-
diff --git a/waflib/extras/use_config.py b/waflib/extras/use_config.py
deleted file mode 100644
index ef5129f..0000000
--- a/waflib/extras/use_config.py
+++ /dev/null
@@ -1,185 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-# Mathieu Courtois - EDF R&D, 2013 - http://www.code-aster.org
-
-"""
-When a project has a lot of options the 'waf configure' command line can be
-very long and it becomes a cause of error.
-This tool provides a convenient way to load a set of configuration parameters
-from a local file or from a remote url.
-
-The configuration parameters are stored in a Python file that is imported as
-an extra waf tool can be.
-
-Example:
-$ waf configure --use-config-dir=http://www.anywhere.org --use-config=myconf1 ...
-
-The file 'myconf1' will be downloaded from 'http://www.anywhere.org'
-(or 'http://www.anywhere.org/wafcfg').
-If the files are available locally, it could be:
-$ waf configure --use-config-dir=/somewhere/myconfigurations --use-config=myconf1 ...
-
-The configuration of 'myconf1.py' is automatically loaded by calling
-its 'configure' function. In this example, it defines environment variables and
-set options:
-
-def configure(self):
-	self.env['CC'] = 'gcc-4.8'
-	self.env.append_value('LIBPATH', [...])
-	self.options.perlbinary = '/usr/local/bin/perl'
-	self.options.pyc = False
-
-The corresponding command line should have been:
-$ CC=gcc-4.8 LIBPATH=... waf configure --nopyc --with-perl-binary=/usr/local/bin/perl
-
-
-This is an extra tool, not bundled with the default waf binary.
-To add the use_config tool to the waf file:
-$ ./waf-light --tools=use_config
-
-When using this tool, the wscript will look like:
-
-	def options(opt):
-		opt.load('use_config')
-
-	def configure(conf):
-		conf.load('use_config')
-"""
-
-import sys
-import os.path as osp
-import os
-
-local_repo = ''
-"""Local repository containing additional Waf tools (plugins)"""
-remote_repo = 'https://gitlab.com/ita1024/waf/raw/master/'
-"""
-Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::
-
-	$ waf configure --download
-"""
-
-remote_locs = ['waflib/extras', 'waflib/Tools']
-"""
-Remote directories for use with :py:const:`waflib.extras.use_config.remote_repo`
-"""
-
-
-try:
-	from urllib import request
-except ImportError:
-	from urllib import urlopen
-else:
-	urlopen = request.urlopen
-
-
-from waflib import Errors, Context, Logs, Utils, Options, Configure
-
-try:
-	from urllib.parse import urlparse
-except ImportError:
-	from urlparse import urlparse
-
-
-
-
-DEFAULT_DIR = 'wafcfg'
-# add first the current wafcfg subdirectory
-sys.path.append(osp.abspath(DEFAULT_DIR))
-
-def options(self):
-	group = self.add_option_group('configure options')
-	group.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing')
-
-	group.add_option('--use-config', action='store', default=None,
-					 metavar='CFG', dest='use_config',
-					 help='force the configuration parameters by importing '
-						  'CFG.py. Several modules may be provided (comma '
-						  'separated).')
-	group.add_option('--use-config-dir', action='store', default=DEFAULT_DIR,
-					 metavar='CFG_DIR', dest='use_config_dir',
-					 help='path or url where to find the configuration file')
-
-def download_check(node):
-	"""
-	Hook to check for the tools which are downloaded. Replace with your function if necessary.
-	"""
-	pass
-
-
-def download_tool(tool, force=False, ctx=None):
-	"""
-	Download a Waf tool from the remote repository defined in :py:const:`waflib.extras.use_config.remote_repo`::
-
-		$ waf configure --download
-	"""
-	for x in Utils.to_list(remote_repo):
-		for sub in Utils.to_list(remote_locs):
-			url = '/'.join((x, sub, tool + '.py'))
-			try:
-				web = urlopen(url)
-				try:
-					if web.getcode() != 200:
-						continue
-				except AttributeError:
-					pass
-			except Exception:
-				# on python3 urlopen throws an exception
-				# python 2.3 does not have getcode and throws an exception to fail
-				continue
-			else:
-				tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
-				tmp.write(web.read(), 'wb')
-				Logs.warn('Downloaded %s from %s', tool, url)
-				download_check(tmp)
-				try:
-					module = Context.load_tool(tool)
-				except Exception:
-					Logs.warn('The tool %s from %s is unusable', tool, url)
-					try:
-						tmp.delete()
-					except Exception:
-						pass
-					continue
-				return module
-
-	raise Errors.WafError('Could not load the Waf tool')
-
-def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
-	try:
-		module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path)
-	except ImportError as e:
-		if not ctx or not hasattr(Options.options, 'download'):
-			Logs.error('Could not load %r during options phase (download unavailable at this point)' % tool)
-			raise
-		if Options.options.download:
-			module = download_tool(tool, ctx=ctx)
-			if not module:
-				ctx.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
-		else:
-			ctx.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e))
-	return module
-
-Context.load_tool_default = Context.load_tool
-Context.load_tool = load_tool
-Configure.download_tool = download_tool
-
-def configure(self):
-	opts = self.options
-	use_cfg = opts.use_config
-	if use_cfg is None:
-		return
-	url = urlparse(opts.use_config_dir)
-	kwargs = {}
-	if url.scheme:
-		kwargs['download'] = True
-		kwargs['remote_url'] = url.geturl()
-		# search first with the exact url, else try with +'/wafcfg'
-		kwargs['remote_locs'] = ['', DEFAULT_DIR]
-	tooldir = url.geturl() + ' ' + DEFAULT_DIR
-	for cfg in use_cfg.split(','):
-		Logs.pprint('NORMAL', "Searching configuration '%s'..." % cfg)
-		self.load(cfg, tooldir=tooldir, **kwargs)
-	self.start_msg('Checking for configuration')
-	self.end_msg(use_cfg)
-
diff --git a/waflib/extras/valadoc.py b/waflib/extras/valadoc.py
deleted file mode 100644
index c50f69e..0000000
--- a/waflib/extras/valadoc.py
+++ /dev/null
@@ -1,140 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-# Nicolas Joseph 2009
-
-"""
-ported from waf 1.5:
-TODO: tabs vs spaces
-"""
-
-from waflib import Task, Utils, Errors, Logs
-from waflib.TaskGen import feature
-
-VALADOC_STR = '${VALADOC}'
-
-class valadoc(Task.Task):
-	vars  = ['VALADOC', 'VALADOCFLAGS']
-	color = 'BLUE'
-	after = ['cprogram', 'cstlib', 'cshlib', 'cxxprogram', 'cxxstlib', 'cxxshlib']
-	quiet = True # no outputs .. this is weird
-
-	def __init__(self, *k, **kw):
-		Task.Task.__init__(self, *k, **kw)
-		self.output_dir = ''
-		self.doclet = ''
-		self.package_name = ''
-		self.package_version = ''
-		self.files = []
-		self.vapi_dirs = []
-		self.protected = True
-		self.private = False
-		self.inherit = False
-		self.deps = False
-		self.vala_defines = []
-		self.vala_target_glib = None
-		self.enable_non_null_experimental = False
-		self.force = False
-
-	def run(self):
-		if not self.env['VALADOCFLAGS']:
-			self.env['VALADOCFLAGS'] = ''
-		cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
-		cmd.append ('-o %s' % self.output_dir)
-		if getattr(self, 'doclet', None):
-			cmd.append ('--doclet %s' % self.doclet)
-		cmd.append ('--package-name %s' % self.package_name)
-		if getattr(self, 'package_version', None):
-			cmd.append ('--package-version %s' % self.package_version)
-		if getattr(self, 'packages', None):
-			for package in self.packages:
-				cmd.append ('--pkg %s' % package)
-		if getattr(self, 'vapi_dirs', None):
-			for vapi_dir in self.vapi_dirs:
-				cmd.append ('--vapidir %s' % vapi_dir)
-		if not getattr(self, 'protected', None):
-			cmd.append ('--no-protected')
-		if getattr(self, 'private', None):
-			cmd.append ('--private')
-		if getattr(self, 'inherit', None):
-			cmd.append ('--inherit')
-		if getattr(self, 'deps', None):
-			cmd.append ('--deps')
-		if getattr(self, 'vala_defines', None):
-			for define in self.vala_defines:
-				cmd.append ('--define %s' % define)
-		if getattr(self, 'vala_target_glib', None):
-			cmd.append ('--target-glib=%s' % self.vala_target_glib)
-		if getattr(self, 'enable_non_null_experimental', None):
-			cmd.append ('--enable-non-null-experimental')
-		if getattr(self, 'force', None):
-			cmd.append ('--force')
-		cmd.append (' '.join ([x.abspath() for x in self.files]))
-		return self.generator.bld.exec_command(' '.join(cmd))
-
-@feature('valadoc')
-def process_valadoc(self):
-	"""
-	Generate API documentation from Vala source code with valadoc
-
-	doc = bld(
-		features = 'valadoc',
-		output_dir = '../doc/html',
-		package_name = 'vala-gtk-example',
-		package_version = '1.0.0',
-		packages = 'gtk+-2.0',
-		vapi_dirs = '../vapi',
-		force = True
-	)
-
-	path = bld.path.find_dir ('../src')
-	doc.files = path.ant_glob (incl='**/*.vala')
-	"""
-
-	task = self.create_task('valadoc')
-	if getattr(self, 'output_dir', None):
-		task.output_dir = self.path.find_or_declare(self.output_dir).abspath()
-	else:
-		Errors.WafError('no output directory')
-	if getattr(self, 'doclet', None):
-		task.doclet = self.doclet
-	else:
-		Errors.WafError('no doclet directory')
-	if getattr(self, 'package_name', None):
-		task.package_name = self.package_name
-	else:
-		Errors.WafError('no package name')
-	if getattr(self, 'package_version', None):
-		task.package_version = self.package_version
-	if getattr(self, 'packages', None):
-		task.packages = Utils.to_list(self.packages)
-	if getattr(self, 'vapi_dirs', None):
-		vapi_dirs = Utils.to_list(self.vapi_dirs)
-		for vapi_dir in vapi_dirs:
-			try:
-				task.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
-			except AttributeError:
-				Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
-	if getattr(self, 'files', None):
-		task.files = self.files
-	else:
-		Errors.WafError('no input file')
-	if getattr(self, 'protected', None):
-		task.protected = self.protected
-	if getattr(self, 'private', None):
-		task.private = self.private
-	if getattr(self, 'inherit', None):
-		task.inherit = self.inherit
-	if getattr(self, 'deps', None):
-		task.deps = self.deps
-	if getattr(self, 'vala_defines', None):
-		task.vala_defines = Utils.to_list(self.vala_defines)
-	if getattr(self, 'vala_target_glib', None):
-		task.vala_target_glib = self.vala_target_glib
-	if getattr(self, 'enable_non_null_experimental', None):
-		task.enable_non_null_experimental = self.enable_non_null_experimental
-	if getattr(self, 'force', None):
-		task.force = self.force
-
-def configure(conf):
-	conf.find_program('valadoc', errmsg='You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
-
diff --git a/waflib/extras/waf_xattr.py b/waflib/extras/waf_xattr.py
deleted file mode 100644
index 351dd63..0000000
--- a/waflib/extras/waf_xattr.py
+++ /dev/null
@@ -1,150 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-Use extended attributes instead of database files
-
-1. Input files will be made writable
-2. This is only for systems providing extended filesystem attributes
-3. By default, hashes are calculated only if timestamp/size change (HASH_CACHE below)
-4. The module enables "deep_inputs" on all tasks by propagating task signatures
-5. This module also skips task signature comparisons for task code changes due to point 4.
-6. This module is for Python3/Linux only, but it could be extended to Python2/other systems
-   using the xattr library
-7. For projects in which tasks always declare output files, it should be possible to
-   store the rest of build context attributes on output files (imp_sigs, raw_deps and node_deps)
-   but this is not done here
-
-On a simple C++ project benchmark, the variations before and after adding waf_xattr.py were observed:
-total build time: 20s -> 22s
-no-op build time: 2.4s -> 1.8s
-pickle file size: 2.9MB -> 2.6MB
-"""
-
-import os
-from waflib import Logs, Node, Task, Utils, Errors
-from waflib.Task import SKIP_ME, RUN_ME, CANCEL_ME, ASK_LATER, SKIPPED, MISSING
-
-HASH_CACHE = True
-SIG_VAR = 'user.waf.sig'
-SEP = ','.encode()
-TEMPLATE = '%b%d,%d'.encode()
-
-try:
-	PermissionError
-except NameError:
-	PermissionError = IOError
-
-def getxattr(self):
-	return os.getxattr(self.abspath(), SIG_VAR)
-
-def setxattr(self, val):
-	os.setxattr(self.abspath(), SIG_VAR, val)
-
-def h_file(self):
-	try:
-		ret = getxattr(self)
-	except OSError:
-		if HASH_CACHE:
-			st = os.stat(self.abspath())
-			mtime = st.st_mtime
-			size = st.st_size
-	else:
-		if len(ret) == 16:
-			# for build directory files
-			return ret
-
-		if HASH_CACHE:
-			# check if timestamp and mtime match to avoid re-hashing
-			st = os.stat(self.abspath())
-			mtime, size = ret[16:].split(SEP)
-			if int(1000 * st.st_mtime) == int(mtime) and st.st_size == int(size):
-				return ret[:16]
-
-	ret = Utils.h_file(self.abspath())
-	if HASH_CACHE:
-		val = TEMPLATE % (ret, int(1000 * st.st_mtime), int(st.st_size))
-		try:
-			setxattr(self, val)
-		except PermissionError:
-			os.chmod(self.abspath(), st.st_mode | 128)
-			setxattr(self, val)
-	return ret
-
-def runnable_status(self):
-	bld = self.generator.bld
-	if bld.is_install < 0:
-		return SKIP_ME
-
-	for t in self.run_after:
-		if not t.hasrun:
-			return ASK_LATER
-		elif t.hasrun < SKIPPED:
-			# a dependency has an error
-			return CANCEL_ME
-
-	# first compute the signature
-	try:
-		new_sig = self.signature()
-	except Errors.TaskNotReady:
-		return ASK_LATER
-
-	if not self.outputs:
-		# compare the signature to a signature computed previously
-		# this part is only for tasks with no output files
-		key = self.uid()
-		try:
-			prev_sig = bld.task_sigs[key]
-		except KeyError:
-			Logs.debug('task: task %r must run: it was never run before or the task code changed', self)
-			return RUN_ME
-		if new_sig != prev_sig:
-			Logs.debug('task: task %r must run: the task signature changed', self)
-			return RUN_ME
-
-	# compare the signatures of the outputs to make a decision
-	for node in self.outputs:
-		try:
-			sig = node.h_file()
-		except EnvironmentError:
-			Logs.debug('task: task %r must run: an output node does not exist', self)
-			return RUN_ME
-		if sig != new_sig:
-			Logs.debug('task: task %r must run: an output node is stale', self)
-			return RUN_ME
-
-	return (self.always_run and RUN_ME) or SKIP_ME
-
-def post_run(self):
-	bld = self.generator.bld
-	sig = self.signature()
-	for node in self.outputs:
-		if not node.exists():
-			self.hasrun = MISSING
-			self.err_msg = '-> missing file: %r' % node.abspath()
-			raise Errors.WafError(self.err_msg)
-		os.setxattr(node.abspath(), 'user.waf.sig', sig)
-	if not self.outputs:
-		# only for task with no outputs
-		bld.task_sigs[self.uid()] = sig
-	if not self.keep_last_cmd:
-		try:
-			del self.last_cmd
-		except AttributeError:
-			pass
-
-try:
-	os.getxattr
-except AttributeError:
-	pass
-else:
-	h_file.__doc__ = Node.Node.h_file.__doc__
-
-	# keep file hashes as file attributes
-	Node.Node.h_file = h_file
-
-	# enable "deep_inputs" on all tasks
-	Task.Task.runnable_status = runnable_status
-	Task.Task.post_run = post_run
-	Task.Task.sig_deep_inputs = Utils.nada
-
diff --git a/waflib/extras/why.py b/waflib/extras/why.py
deleted file mode 100644
index 1bb941f..0000000
--- a/waflib/extras/why.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010 (ita)
-
-"""
-This tool modifies the task signature scheme to store and obtain
-information about the task execution (why it must run, etc)::
-
-	def configure(conf):
-		conf.load('why')
-
-After adding the tool, a full rebuild is necessary:
-waf clean build --zones=task
-"""
-
-from waflib import Task, Utils, Logs, Errors
-
-def signature(self):
-	# compute the result one time, and suppose the scan_signature will give the good result
-	try:
-		return self.cache_sig
-	except AttributeError:
-		pass
-
-	self.m = Utils.md5()
-	self.m.update(self.hcode)
-	id_sig = self.m.digest()
-
-	# explicit deps
-	self.m = Utils.md5()
-	self.sig_explicit_deps()
-	exp_sig = self.m.digest()
-
-	# env vars
-	self.m = Utils.md5()
-	self.sig_vars()
-	var_sig = self.m.digest()
-
-	# implicit deps / scanner results
-	self.m = Utils.md5()
-	if self.scan:
-		try:
-			self.sig_implicit_deps()
-		except Errors.TaskRescan:
-			return self.signature()
-	impl_sig = self.m.digest()
-
-	ret = self.cache_sig = impl_sig + id_sig + exp_sig + var_sig
-	return ret
-
-
-Task.Task.signature = signature
-
-old = Task.Task.runnable_status
-def runnable_status(self):
-	ret = old(self)
-	if ret == Task.RUN_ME:
-		try:
-			old_sigs = self.generator.bld.task_sigs[self.uid()]
-		except (KeyError, AttributeError):
-			Logs.debug("task: task must run as no previous signature exists")
-		else:
-			new_sigs = self.cache_sig
-			def v(x):
-				return Utils.to_hex(x)
-
-			Logs.debug('Task %r', self)
-			msgs = ['* Implicit or scanner dependency', '* Task code', '* Source file, explicit or manual dependency', '* Configuration data variable']
-			tmp = 'task: -> %s: %s %s'
-			for x in range(len(msgs)):
-				l = len(Utils.SIG_NIL)
-				a = new_sigs[x*l : (x+1)*l]
-				b = old_sigs[x*l : (x+1)*l]
-				if (a != b):
-					Logs.debug(tmp, msgs[x].ljust(35), v(a), v(b))
-	return ret
-Task.Task.runnable_status = runnable_status
-
diff --git a/waflib/extras/win32_opts.py b/waflib/extras/win32_opts.py
deleted file mode 100644
index 9f7443c..0000000
--- a/waflib/extras/win32_opts.py
+++ /dev/null
@@ -1,170 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-Windows-specific optimizations
-
-This module can help reducing the overhead of listing files on windows
-(more than 10000 files). Python 3.5 already provides the listdir
-optimization though.
-"""
-
-import os
-from waflib import Utils, Build, Node, Logs
-
-try:
-	TP = '%s\\*'.decode('ascii')
-except AttributeError:
-	TP = '%s\\*'
-
-if Utils.is_win32:
-	from waflib.Tools import md5_tstamp
-	import ctypes, ctypes.wintypes
-
-	FindFirstFile        = ctypes.windll.kernel32.FindFirstFileW
-	FindNextFile         = ctypes.windll.kernel32.FindNextFileW
-	FindClose            = ctypes.windll.kernel32.FindClose
-	FILE_ATTRIBUTE_DIRECTORY = 0x10
-	INVALID_HANDLE_VALUE = -1
-	UPPER_FOLDERS = ('.', '..')
-	try:
-		UPPER_FOLDERS = [unicode(x) for x in UPPER_FOLDERS]
-	except NameError:
-		pass
-
-	def cached_hash_file(self):
-		try:
-			cache = self.ctx.cache_listdir_cache_hash_file
-		except AttributeError:
-			cache = self.ctx.cache_listdir_cache_hash_file = {}
-
-		if id(self.parent) in cache:
-			try:
-				t = cache[id(self.parent)][self.name]
-			except KeyError:
-				raise IOError('Not a file')
-		else:
-			# an opportunity to list the files and the timestamps at once
-			findData = ctypes.wintypes.WIN32_FIND_DATAW()
-			find     = FindFirstFile(TP % self.parent.abspath(), ctypes.byref(findData))
-
-			if find == INVALID_HANDLE_VALUE:
-				cache[id(self.parent)] = {}
-				raise IOError('Not a file')
-
-			cache[id(self.parent)] = lst_files = {}
-			try:
-				while True:
-					if findData.cFileName not in UPPER_FOLDERS:
-						thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
-						if not thatsadir:
-							ts = findData.ftLastWriteTime
-							d = (ts.dwLowDateTime << 32) | ts.dwHighDateTime
-							lst_files[str(findData.cFileName)] = d
-					if not FindNextFile(find, ctypes.byref(findData)):
-						break
-			except Exception:
-				cache[id(self.parent)] = {}
-				raise IOError('Not a file')
-			finally:
-				FindClose(find)
-			t = lst_files[self.name]
-
-		fname = self.abspath()
-		if fname in Build.hashes_md5_tstamp:
-			if Build.hashes_md5_tstamp[fname][0] == t:
-				return Build.hashes_md5_tstamp[fname][1]
-
-		try:
-			fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
-		except OSError:
-			raise IOError('Cannot read from %r' % fname)
-		f = os.fdopen(fd, 'rb')
-		m = Utils.md5()
-		rb = 1
-		try:
-			while rb:
-				rb = f.read(200000)
-				m.update(rb)
-		finally:
-			f.close()
-
-		# ensure that the cache is overwritten
-		Build.hashes_md5_tstamp[fname] = (t, m.digest())
-		return m.digest()
-	Node.Node.cached_hash_file = cached_hash_file
-
-	def get_bld_sig_win32(self):
-		try:
-			return self.ctx.hash_cache[id(self)]
-		except KeyError:
-			pass
-		except AttributeError:
-			self.ctx.hash_cache = {}
-		self.ctx.hash_cache[id(self)] = ret = Utils.h_file(self.abspath())
-		return ret
-	Node.Node.get_bld_sig = get_bld_sig_win32
-
-	def isfile_cached(self):
-		# optimize for nt.stat calls, assuming there are many files for few folders
-		try:
-			cache = self.__class__.cache_isfile_cache
-		except AttributeError:
-			cache = self.__class__.cache_isfile_cache = {}
-
-		try:
-			c1 = cache[id(self.parent)]
-		except KeyError:
-			c1 = cache[id(self.parent)] = []
-
-			curpath = self.parent.abspath()
-			findData = ctypes.wintypes.WIN32_FIND_DATAW()
-			find     = FindFirstFile(TP % curpath, ctypes.byref(findData))
-
-			if find == INVALID_HANDLE_VALUE:
-				Logs.error("invalid win32 handle isfile_cached %r", self.abspath())
-				return os.path.isfile(self.abspath())
-
-			try:
-				while True:
-					if findData.cFileName not in UPPER_FOLDERS:
-						thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
-						if not thatsadir:
-							c1.append(str(findData.cFileName))
-					if not FindNextFile(find, ctypes.byref(findData)):
-						break
-			except Exception as e:
-				Logs.error('exception while listing a folder %r %r', self.abspath(), e)
-				return os.path.isfile(self.abspath())
-			finally:
-				FindClose(find)
-		return self.name in c1
-	Node.Node.isfile_cached = isfile_cached
-
-	def find_or_declare_win32(self, lst):
-		# assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile
-		if isinstance(lst, str):
-			lst = [x for x in Utils.split_path(lst) if x and x != '.']
-
-		node = self.get_bld().search_node(lst)
-		if node:
-			if not node.isfile_cached():
-				try:
-					node.parent.mkdir()
-				except OSError:
-					pass
-			return node
-		self = self.get_src()
-		node = self.find_node(lst)
-		if node:
-			if not node.isfile_cached():
-				try:
-					node.parent.mkdir()
-				except OSError:
-					pass
-			return node
-		node = self.get_bld().make_node(lst)
-		node.parent.mkdir()
-		return node
-	Node.Node.find_or_declare = find_or_declare_win32
-
diff --git a/waflib/extras/wix.py b/waflib/extras/wix.py
deleted file mode 100644
index d87bfbb..0000000
--- a/waflib/extras/wix.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/python
-# encoding: utf-8
-# vim: tabstop=4 noexpandtab
-
-"""
-Windows Installer XML Tool (WiX)
-
-.wxs --- candle ---> .wxobj --- light ---> .msi
-
-bld(features='wix', some.wxs, gen='some.msi', candleflags=[..], lightflags=[..])
-
-bld(features='wix', source=['bundle.wxs','WixBalExtension'], gen='setup.exe', candleflags=[..])
-"""
-
-import os, copy
-from waflib import TaskGen
-from waflib import Task
-from waflib.Utils import winreg
-
-class candle(Task.Task):
-	run_str = '${CANDLE} -nologo ${CANDLEFLAGS} -out ${TGT} ${SRC[0].abspath()}',
-
-class light(Task.Task):
-	run_str = "${LIGHT} -nologo -b ${SRC[0].parent.abspath()} ${LIGHTFLAGS} -out ${TGT} ${SRC[0].abspath()}"
-
-@TaskGen.feature('wix')
-@TaskGen.before_method('process_source')
-def wix(self):
-	#X.wxs -> ${SRC} for CANDLE
-	#X.wxobj -> ${SRC} for LIGHT
-	#X.dll -> -ext X in ${LIGHTFLAGS}
-	#X.wxl -> wixui.wixlib -loc X.wxl in ${LIGHTFLAGS}
-	wxobj = []
-	wxs = []
-	exts = []
-	wxl = []
-	rest = []
-	for x in self.source:
-		if x.endswith('.wxobj'):
-			wxobj.append(x)
-		elif x.endswith('.wxs'):
-			wxobj.append(self.path.find_or_declare(x[:-4]+'.wxobj'))
-			wxs.append(x)
-		elif x.endswith('.dll'):
-			exts.append(x[:-4])
-		elif '.' not in x:
-			exts.append(x)
-		elif x.endswith('.wxl'):
-			wxl.append(x)
-		else:
-			rest.append(x)
-	self.source = self.to_nodes(rest) #.wxs
-
-	cndl = self.create_task('candle', self.to_nodes(wxs), self.to_nodes(wxobj))
-	lght = self.create_task('light', self.to_nodes(wxobj), self.path.find_or_declare(self.gen))
-
-	cndl.env.CANDLEFLAGS = copy.copy(getattr(self,'candleflags',[]))
-	lght.env.LIGHTFLAGS = copy.copy(getattr(self,'lightflags',[]))
-
-	for x in wxl:
-		lght.env.append_value('LIGHTFLAGS','wixui.wixlib')
-		lght.env.append_value('LIGHTFLAGS','-loc')
-		lght.env.append_value('LIGHTFLAGS',x)
-	for x in exts:
-		cndl.env.append_value('CANDLEFLAGS','-ext')
-		cndl.env.append_value('CANDLEFLAGS',x)
-		lght.env.append_value('LIGHTFLAGS','-ext')
-		lght.env.append_value('LIGHTFLAGS',x)
-
-#wix_bin_path()
-def wix_bin_path():
-	basekey = r"SOFTWARE\Microsoft\.NETFramework\AssemblyFolders"
-	query = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, basekey)
-	cnt=winreg.QueryInfoKey(query)[0]
-	thiskey = r'C:\Program Files (x86)\WiX Toolset v3.10\SDK'
-	for i in range(cnt-1,-1,-1):
-		thiskey = winreg.EnumKey(query,i)
-		if 'WiX' in thiskey:
-			break
-	winreg.CloseKey(query)
-	return os.path.normpath(winreg.QueryValue(winreg.HKEY_LOCAL_MACHINE, basekey+r'\\'+thiskey)+'..\\bin')
-
-def configure(ctx):
-	path_list=[wix_bin_path()]
-	ctx.find_program('candle', var='CANDLE', mandatory=True, path_list = path_list)
-	ctx.find_program('light', var='LIGHT', mandatory=True, path_list = path_list)
-
diff --git a/waflib/extras/xcode6.py b/waflib/extras/xcode6.py
deleted file mode 100644
index 91bbff1..0000000
--- a/waflib/extras/xcode6.py
+++ /dev/null
@@ -1,727 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# XCode 3/XCode 4/XCode 6/Xcode 7 generator for Waf
-# Based on work by Nicolas Mercier 2011
-# Extended by Simon Warg 2015, https://github.com/mimon
-# XCode project file format based on http://www.monobjc.net/xcode-project-file-format.html
-
-"""
-See playground/xcode6/ for usage examples.
-
-"""
-
-from waflib import Context, TaskGen, Build, Utils, Errors, Logs
-import os, sys
-
-# FIXME too few extensions
-XCODE_EXTS = ['.c', '.cpp', '.m', '.mm']
-
-HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
-
-MAP_EXT = {
-	'': "folder",
-	'.h' :  "sourcecode.c.h",
-
-	'.hh':  "sourcecode.cpp.h",
-	'.inl': "sourcecode.cpp.h",
-	'.hpp': "sourcecode.cpp.h",
-
-	'.c':   "sourcecode.c.c",
-
-	'.m':   "sourcecode.c.objc",
-
-	'.mm':  "sourcecode.cpp.objcpp",
-
-	'.cc':  "sourcecode.cpp.cpp",
-
-	'.cpp': "sourcecode.cpp.cpp",
-	'.C':   "sourcecode.cpp.cpp",
-	'.cxx': "sourcecode.cpp.cpp",
-	'.c++': "sourcecode.cpp.cpp",
-
-	'.l':   "sourcecode.lex", # luthor
-	'.ll':  "sourcecode.lex",
-
-	'.y':   "sourcecode.yacc",
-	'.yy':  "sourcecode.yacc",
-
-	'.plist': "text.plist.xml",
-	".nib":   "wrapper.nib",
-	".xib":   "text.xib",
-}
-
-# Used in PBXNativeTarget elements
-PRODUCT_TYPE_APPLICATION = 'com.apple.product-type.application'
-PRODUCT_TYPE_FRAMEWORK = 'com.apple.product-type.framework'
-PRODUCT_TYPE_EXECUTABLE = 'com.apple.product-type.tool'
-PRODUCT_TYPE_LIB_STATIC = 'com.apple.product-type.library.static'
-PRODUCT_TYPE_LIB_DYNAMIC = 'com.apple.product-type.library.dynamic'
-PRODUCT_TYPE_EXTENSION = 'com.apple.product-type.kernel-extension'
-PRODUCT_TYPE_IOKIT = 'com.apple.product-type.kernel-extension.iokit'
-
-# Used in PBXFileReference elements
-FILE_TYPE_APPLICATION = 'wrapper.cfbundle'
-FILE_TYPE_FRAMEWORK = 'wrapper.framework'
-FILE_TYPE_LIB_DYNAMIC = 'compiled.mach-o.dylib'
-FILE_TYPE_LIB_STATIC = 'archive.ar'
-FILE_TYPE_EXECUTABLE = 'compiled.mach-o.executable'
-
-# Tuple packs of the above
-TARGET_TYPE_FRAMEWORK = (PRODUCT_TYPE_FRAMEWORK, FILE_TYPE_FRAMEWORK, '.framework')
-TARGET_TYPE_APPLICATION = (PRODUCT_TYPE_APPLICATION, FILE_TYPE_APPLICATION, '.app')
-TARGET_TYPE_DYNAMIC_LIB = (PRODUCT_TYPE_LIB_DYNAMIC, FILE_TYPE_LIB_DYNAMIC, '.dylib')
-TARGET_TYPE_STATIC_LIB = (PRODUCT_TYPE_LIB_STATIC, FILE_TYPE_LIB_STATIC, '.a')
-TARGET_TYPE_EXECUTABLE = (PRODUCT_TYPE_EXECUTABLE, FILE_TYPE_EXECUTABLE, '')
-
-# Maps target type string to its data
-TARGET_TYPES = {
-	'framework': TARGET_TYPE_FRAMEWORK,
-	'app': TARGET_TYPE_APPLICATION,
-	'dylib': TARGET_TYPE_DYNAMIC_LIB,
-	'stlib': TARGET_TYPE_STATIC_LIB,
-	'exe' :TARGET_TYPE_EXECUTABLE,
-}
-
-def delete_invalid_values(dct):
-	""" Deletes entries that are dictionaries or sets """
-	for k, v in list(dct.items()):
-		if isinstance(v, dict) or isinstance(v, set):
-			del dct[k]
-	return dct
-
-"""
-Configuration of the global project settings. Sets an environment variable 'PROJ_CONFIGURATION'
-which is a dictionary of configuration name and buildsettings pair.
-E.g.:
-env.PROJ_CONFIGURATION = {
-	'Debug': {
-		'ARCHS': 'x86',
-		...
-	}
-	'Release': {
-		'ARCHS' x86_64'
-		...
-	}
-}
-The user can define a completely customized dictionary in configure() stage. Otherwise a default Debug/Release will be created
-based on env variable
-"""
-def configure(self):
-	if not self.env.PROJ_CONFIGURATION:
-		self.to_log("A default project configuration was created since no custom one was given in the configure(conf) stage. Define your custom project settings by adding PROJ_CONFIGURATION to env. The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.\n")
-
-	# Check for any added config files added by the tool 'c_config'.
-	if 'cfg_files' in self.env:
-		self.env.INCLUDES = Utils.to_list(self.env.INCLUDES) + [os.path.abspath(os.path.dirname(f)) for f in self.env.cfg_files]
-
-	# Create default project configuration?
-	if 'PROJ_CONFIGURATION' not in self.env:
-		defaults = delete_invalid_values(self.env.get_merged_dict())
-		self.env.PROJ_CONFIGURATION = {
-			"Debug": defaults,
-			"Release": defaults,
-		}
-
-	# Some build settings are required to be present by XCode. We will supply default values
-	# if user hasn't defined any.
-	defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')]
-	for cfgname,settings in self.env.PROJ_CONFIGURATION.items():
-		for default_var, default_val in defaults_required:
-			if default_var not in settings:
-				settings[default_var] = default_val
-
-	# Error check customization
-	if not isinstance(self.env.PROJ_CONFIGURATION, dict):
-		raise Errors.ConfigurationError("The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.")
-
-part1 = 0
-part2 = 10000
-part3 = 0
-id = 562000999
-def newid():
-	global id
-	id += 1
-	return "%04X%04X%04X%012d" % (0, 10000, 0, id)
-
-"""
-Represents a tree node in the XCode project plist file format.
-When written to a file, all attributes of XCodeNode are stringified together with
-its value. However, attributes starting with an underscore _ are ignored
-during that process and allows you to store arbitrary values that are not supposed
-to be written out.
-"""
-class XCodeNode(object):
-	def __init__(self):
-		self._id = newid()
-		self._been_written = False
-
-	def tostring(self, value):
-		if isinstance(value, dict):
-			result = "{\n"
-			for k,v in value.items():
-				result = result + "\t\t\t%s = %s;\n" % (k, self.tostring(v))
-			result = result + "\t\t}"
-			return result
-		elif isinstance(value, str):
-			return "\"%s\"" % value
-		elif isinstance(value, list):
-			result = "(\n"
-			for i in value:
-				result = result + "\t\t\t%s,\n" % self.tostring(i)
-			result = result + "\t\t)"
-			return result
-		elif isinstance(value, XCodeNode):
-			return value._id
-		else:
-			return str(value)
-
-	def write_recursive(self, value, file):
-		if isinstance(value, dict):
-			for k,v in value.items():
-				self.write_recursive(v, file)
-		elif isinstance(value, list):
-			for i in value:
-				self.write_recursive(i, file)
-		elif isinstance(value, XCodeNode):
-			value.write(file)
-
-	def write(self, file):
-		if not self._been_written:
-			self._been_written = True
-			for attribute,value in self.__dict__.items():
-				if attribute[0] != '_':
-					self.write_recursive(value, file)
-			w = file.write
-			w("\t%s = {\n" % self._id)
-			w("\t\tisa = %s;\n" % self.__class__.__name__)
-			for attribute,value in self.__dict__.items():
-				if attribute[0] != '_':
-					w("\t\t%s = %s;\n" % (attribute, self.tostring(value)))
-			w("\t};\n\n")
-
-# Configurations
-class XCBuildConfiguration(XCodeNode):
-	def __init__(self, name, settings = {}, env=None):
-		XCodeNode.__init__(self)
-		self.baseConfigurationReference = ""
-		self.buildSettings = settings
-		self.name = name
-		if env and env.ARCH:
-			settings['ARCHS'] = " ".join(env.ARCH)
-
-
-class XCConfigurationList(XCodeNode):
-	def __init__(self, configlst):
-		""" :param configlst: list of XCConfigurationList """
-		XCodeNode.__init__(self)
-		self.buildConfigurations = configlst
-		self.defaultConfigurationIsVisible = 0
-		self.defaultConfigurationName = configlst and configlst[0].name or ""
-
-# Group/Files
-class PBXFileReference(XCodeNode):
-	def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"):
-
-		XCodeNode.__init__(self)
-		self.fileEncoding = 4
-		if not filetype:
-			_, ext = os.path.splitext(name)
-			filetype = MAP_EXT.get(ext, 'text')
-		self.lastKnownFileType = filetype
-		self.explicitFileType = filetype
-		self.name = name
-		self.path = path
-		self.sourceTree = sourcetree
-
-	def __hash__(self):
-		return (self.path+self.name).__hash__()
-
-	def __eq__(self, other):
-		return (self.path, self.name) == (other.path, other.name)
-
-class PBXBuildFile(XCodeNode):
-	""" This element indicate a file reference that is used in a PBXBuildPhase (either as an include or resource). """
-	def __init__(self, fileRef, settings={}):
-		XCodeNode.__init__(self)
-
-		# fileRef is a reference to a PBXFileReference object
-		self.fileRef = fileRef
-
-		# A map of key/value pairs for additional settings.
-		self.settings = settings
-
-	def __hash__(self):
-		return (self.fileRef).__hash__()
-
-	def __eq__(self, other):
-		return self.fileRef == other.fileRef
-
-class PBXGroup(XCodeNode):
-	def __init__(self, name, sourcetree = 'SOURCE_TREE'):
-		XCodeNode.__init__(self)
-		self.children = []
-		self.name = name
-		self.sourceTree = sourcetree
-
-		# Maintain a lookup table for all PBXFileReferences
-		# that are contained in this group.
-		self._filerefs = {}
-
-	def add(self, sources):
-		"""
-		Add a list of PBXFileReferences to this group
-
-		:param sources: list of PBXFileReferences objects
-		"""
-		self._filerefs.update(dict(zip(sources, sources)))
-		self.children.extend(sources)
-
-	def get_sub_groups(self):
-		"""
-		Returns all child PBXGroup objects contained in this group
-		"""
-		return list(filter(lambda x: isinstance(x, PBXGroup), self.children))
-
-	def find_fileref(self, fileref):
-		"""
-		Recursively search this group for an existing PBXFileReference. Returns None
-		if none were found.
-
-		The reason you'd want to reuse existing PBXFileReferences from a PBXGroup is that XCode doesn't like PBXFileReferences that aren't part of a PBXGroup hierarchy.
-		If it isn't, the consequence is that certain UI features like 'Reveal in Finder'
-		stops working.
-		"""
-		if fileref in self._filerefs:
-			return self._filerefs[fileref]
-		elif self.children:
-			for childgroup in self.get_sub_groups():
-				f = childgroup.find_fileref(fileref)
-				if f:
-					return f
-		return None
-
-class PBXContainerItemProxy(XCodeNode):
-	""" This is the element for to decorate a target item. """
-	def __init__(self, containerPortal, remoteGlobalIDString, remoteInfo='', proxyType=1):
-		XCodeNode.__init__(self)
-		self.containerPortal = containerPortal # PBXProject
-		self.remoteGlobalIDString = remoteGlobalIDString # PBXNativeTarget
-		self.remoteInfo = remoteInfo # Target name
-		self.proxyType = proxyType
-
-class PBXTargetDependency(XCodeNode):
-	""" This is the element for referencing other target through content proxies. """
-	def __init__(self, native_target, proxy):
-		XCodeNode.__init__(self)
-		self.target = native_target
-		self.targetProxy = proxy
-
-class PBXFrameworksBuildPhase(XCodeNode):
-	""" This is the element for the framework link build phase, i.e. linking to frameworks """
-	def __init__(self, pbxbuildfiles):
-		XCodeNode.__init__(self)
-		self.buildActionMask = 2147483647
-		self.runOnlyForDeploymentPostprocessing = 0
-		self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
-
-class PBXHeadersBuildPhase(XCodeNode):
-	""" This is the element for adding header files to be packaged into the .framework """
-	def __init__(self, pbxbuildfiles):
-		XCodeNode.__init__(self)
-		self.buildActionMask = 2147483647
-		self.runOnlyForDeploymentPostprocessing = 0
-		self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
-
-class PBXCopyFilesBuildPhase(XCodeNode):
-	"""
-	Represents the PBXCopyFilesBuildPhase section. PBXBuildFile
-	can be added to this node to copy files after build is done.
-	"""
-	def __init__(self, pbxbuildfiles, dstpath, dstSubpathSpec=0, *args, **kwargs):
-			XCodeNode.__init__(self)
-			self.files = pbxbuildfiles
-			self.dstPath = dstpath
-			self.dstSubfolderSpec = dstSubpathSpec
-
-class PBXSourcesBuildPhase(XCodeNode):
-	""" Represents the 'Compile Sources' build phase in a Xcode target """
-	def __init__(self, buildfiles):
-		XCodeNode.__init__(self)
-		self.files = buildfiles # List of PBXBuildFile objects
-
-class PBXLegacyTarget(XCodeNode):
-	def __init__(self, action, target=''):
-		XCodeNode.__init__(self)
-		self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})])
-		if not target:
-			self.buildArgumentsString = "%s %s" % (sys.argv[0], action)
-		else:
-			self.buildArgumentsString = "%s %s --targets=%s" % (sys.argv[0], action, target)
-		self.buildPhases = []
-		self.buildToolPath = sys.executable
-		self.buildWorkingDirectory = ""
-		self.dependencies = []
-		self.name = target or action
-		self.productName = target or action
-		self.passBuildSettingsInEnvironment = 0
-
-class PBXShellScriptBuildPhase(XCodeNode):
-	def __init__(self, action, target):
-		XCodeNode.__init__(self)
-		self.buildActionMask = 2147483647
-		self.files = []
-		self.inputPaths = []
-		self.outputPaths = []
-		self.runOnlyForDeploymentPostProcessing = 0
-		self.shellPath = "/bin/sh"
-		self.shellScript = "%s %s %s --targets=%s" % (sys.executable, sys.argv[0], action, target)
-
-class PBXNativeTarget(XCodeNode):
-	""" Represents a target in XCode, e.g. App, DyLib, Framework etc. """
-	def __init__(self, target, node, target_type=TARGET_TYPE_APPLICATION, configlist=[], buildphases=[]):
-		XCodeNode.__init__(self)
-		product_type = target_type[0]
-		file_type = target_type[1]
-
-		self.buildConfigurationList = XCConfigurationList(configlist)
-		self.buildPhases = buildphases
-		self.buildRules = []
-		self.dependencies = []
-		self.name = target
-		self.productName = target
-		self.productType = product_type # See TARGET_TYPE_ tuples constants
-		self.productReference = PBXFileReference(node.name, node.abspath(), file_type, '')
-
-	def add_configuration(self, cf):
-		""" :type cf: XCBuildConfiguration """
-		self.buildConfigurationList.buildConfigurations.append(cf)
-
-	def add_build_phase(self, phase):
-		# Some build phase types may appear only once. If a phase type already exists, then merge them.
-		if ( (phase.__class__ == PBXFrameworksBuildPhase)
-			or (phase.__class__ == PBXSourcesBuildPhase) ):
-			for b in self.buildPhases:
-				if b.__class__ == phase.__class__:
-					b.files.extend(phase.files)
-					return
-		self.buildPhases.append(phase)
-
-	def add_dependency(self, depnd):
-		self.dependencies.append(depnd)
-
-# Root project object
-class PBXProject(XCodeNode):
-	def __init__(self, name, version, env):
-		XCodeNode.__init__(self)
-
-		if not isinstance(env.PROJ_CONFIGURATION, dict):
-			raise Errors.WafError("Error: env.PROJ_CONFIGURATION must be a dictionary. This is done for you if you do not define one yourself. However, did you load the xcode module at the end of your wscript configure() ?")
-
-		# Retrieve project configuration
-		configurations = []
-		for config_name, settings in env.PROJ_CONFIGURATION.items():
-			cf = XCBuildConfiguration(config_name, settings)
-			configurations.append(cf)
-
-		self.buildConfigurationList = XCConfigurationList(configurations)
-		self.compatibilityVersion = version[0]
-		self.hasScannedForEncodings = 1
-		self.mainGroup = PBXGroup(name)
-		self.projectRoot = ""
-		self.projectDirPath = ""
-		self.targets = []
-		self._objectVersion = version[1]
-
-	def create_target_dependency(self, target, name):
-		""" : param target : PXBNativeTarget """
-		proxy = PBXContainerItemProxy(self, target, name)
-		dependency = PBXTargetDependency(target, proxy)
-		return dependency
-
-	def write(self, file):
-
-		# Make sure this is written only once
-		if self._been_written:
-			return
-
-		w = file.write
-		w("// !$*UTF8*$!\n")
-		w("{\n")
-		w("\tarchiveVersion = 1;\n")
-		w("\tclasses = {\n")
-		w("\t};\n")
-		w("\tobjectVersion = %d;\n" % self._objectVersion)
-		w("\tobjects = {\n\n")
-
-		XCodeNode.write(self, file)
-
-		w("\t};\n")
-		w("\trootObject = %s;\n" % self._id)
-		w("}\n")
-
-	def add_target(self, target):
-		self.targets.append(target)
-
-	def get_target(self, name):
-		""" Get a reference to PBXNativeTarget if it exists """
-		for t in self.targets:
-			if t.name == name:
-				return t
-		return None
-
-@TaskGen.feature('c', 'cxx')
-@TaskGen.after('propagate_uselib_vars', 'apply_incpaths')
-def process_xcode(self):
-	bld = self.bld
-	try:
-		p = bld.project
-	except AttributeError:
-		return
-
-	if not hasattr(self, 'target_type'):
-		return
-
-	products_group = bld.products_group
-
-	target_group = PBXGroup(self.name)
-	p.mainGroup.children.append(target_group)
-
-	# Determine what type to build - framework, app bundle etc.
-	target_type = getattr(self, 'target_type', 'app')
-	if target_type not in TARGET_TYPES:
-		raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'" % (target_type, "', '".join(TARGET_TYPES.keys()), self.name))
-	else:
-		target_type = TARGET_TYPES[target_type]
-	file_ext = target_type[2]
-
-	# Create the output node
-	target_node = self.path.find_or_declare(self.name+file_ext)
-	target = PBXNativeTarget(self.name, target_node, target_type, [], [])
-
-	products_group.children.append(target.productReference)
-
-	# Pull source files from the 'source' attribute and assign them to a UI group.
-	# Use a default UI group named 'Source' unless the user
-	# provides a 'group_files' dictionary to customize the UI grouping.
-	sources = getattr(self, 'source', [])
-	if hasattr(self, 'group_files'):
-		group_files = getattr(self, 'group_files', [])
-		for grpname,files in group_files.items():
-			group = bld.create_group(grpname, files)
-			target_group.children.append(group)
-	else:
-		group = bld.create_group('Source', sources)
-		target_group.children.append(group)
-
-	# Create a PBXFileReference for each source file.
-	# If the source file already exists as a PBXFileReference in any of the UI groups, then
-	# reuse that PBXFileReference object (XCode does not like it if we don't reuse)
-	for idx, path in enumerate(sources):
-		fileref = PBXFileReference(path.name, path.abspath())
-		existing_fileref = target_group.find_fileref(fileref)
-		if existing_fileref:
-			sources[idx] = existing_fileref
-		else:
-			sources[idx] = fileref
-
-	# If the 'source' attribute contains any file extension that XCode can't work with,
-	# then remove it. The allowed file extensions are defined in XCODE_EXTS.
-	is_valid_file_extension = lambda file: os.path.splitext(file.path)[1] in XCODE_EXTS
-	sources = list(filter(is_valid_file_extension, sources))
-
-	buildfiles = [bld.unique_buildfile(PBXBuildFile(x)) for x in sources]
-	target.add_build_phase(PBXSourcesBuildPhase(buildfiles))
-
-	# Check if any framework to link against is some other target we've made
-	libs = getattr(self, 'tmp_use_seen', [])
-	for lib in libs:
-		use_target = p.get_target(lib)
-		if use_target:
-			# Create an XCode dependency so that XCode knows to build the other target before this target
-			dependency = p.create_target_dependency(use_target, use_target.name)
-			target.add_dependency(dependency)
-
-			buildphase = PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)])
-			target.add_build_phase(buildphase)
-			if lib in self.env.LIB:
-				self.env.LIB = list(filter(lambda x: x != lib, self.env.LIB))
-
-	# If 'export_headers' is present, add files to the Headers build phase in xcode.
-	# These are files that'll get packed into the Framework for instance.
-	exp_hdrs = getattr(self, 'export_headers', [])
-	hdrs = bld.as_nodes(Utils.to_list(exp_hdrs))
-	files = [p.mainGroup.find_fileref(PBXFileReference(n.name, n.abspath())) for n in hdrs]
-	files = [PBXBuildFile(f, {'ATTRIBUTES': ('Public',)}) for f in files]
-	buildphase = PBXHeadersBuildPhase(files)
-	target.add_build_phase(buildphase)
-
-	# Merge frameworks and libs into one list, and prefix the frameworks
-	frameworks = Utils.to_list(self.env.FRAMEWORK)
-	frameworks = ' '.join(['-framework %s' % (f.split('.framework')[0]) for f in frameworks])
-
-	libs = Utils.to_list(self.env.STLIB) + Utils.to_list(self.env.LIB)
-	libs = ' '.join(bld.env['STLIB_ST'] % t for t in libs)
-
-	# Override target specific build settings
-	bldsettings = {
-		'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'],
-		'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR) ,
-		'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH),
-		'OTHER_LDFLAGS': libs + ' ' + frameworks,
-		'OTHER_LIBTOOLFLAGS': bld.env['LINKFLAGS'],
-		'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']),
-		'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']),
-		'INSTALL_PATH': []
-	}
-
-	# Install path
-	installpaths = Utils.to_list(getattr(self, 'install', []))
-	prodbuildfile = PBXBuildFile(target.productReference)
-	for instpath in installpaths:
-		bldsettings['INSTALL_PATH'].append(instpath)
-		target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile], instpath))
-
-	if not bldsettings['INSTALL_PATH']:
-		del bldsettings['INSTALL_PATH']
-
-	# Create build settings which can override the project settings. Defaults to none if user
-	# did not pass argument. This will be filled up with target specific
-	# search paths, libs to link etc.
-	settings = getattr(self, 'settings', {})
-
-	# The keys represents different build configuration, e.g. Debug, Release and so on..
-	# Insert our generated build settings to all configuration names
-	keys = set(settings.keys() + bld.env.PROJ_CONFIGURATION.keys())
-	for k in keys:
-		if k in settings:
-			settings[k].update(bldsettings)
-		else:
-			settings[k] = bldsettings
-
-	for k,v in settings.items():
-		target.add_configuration(XCBuildConfiguration(k, v))
-
-	p.add_target(target)
-
-
-class xcode(Build.BuildContext):
-	cmd = 'xcode6'
-	fun = 'build'
-
-	def as_nodes(self, files):
-		""" Returns a list of waflib.Nodes from a list of string of file paths """
-		nodes = []
-		for x in files:
-			if not isinstance(x, str):
-				d = x
-			else:
-				d = self.srcnode.find_node(x)
-				if not d:
-					raise Errors.WafError('File \'%s\' was not found' % x)
-			nodes.append(d)
-		return nodes
-
-	def create_group(self, name, files):
-		"""
-		Returns a new PBXGroup containing the files (paths) passed in the files arg
-		:type files: string
-		"""
-		group = PBXGroup(name)
-		"""
-		Do not use unique file reference here, since XCode seem to allow only one file reference
-		to be referenced by a group.
-		"""
-		files_ = []
-		for d in self.as_nodes(Utils.to_list(files)):
-			fileref = PBXFileReference(d.name, d.abspath())
-			files_.append(fileref)
-		group.add(files_)
-		return group
-
-	def unique_buildfile(self, buildfile):
-		"""
-		Returns a unique buildfile, possibly an existing one.
-		Use this after you've constructed a PBXBuildFile to make sure there is
-		only one PBXBuildFile for the same file in the same project.
-		"""
-		try:
-			build_files = self.build_files
-		except AttributeError:
-			build_files = self.build_files = {}
-
-		if buildfile not in build_files:
-			build_files[buildfile] = buildfile
-		return build_files[buildfile]
-
-	def execute(self):
-		"""
-		Entry point
-		"""
-		self.restore()
-		if not self.all_envs:
-			self.load_envs()
-		self.recurse([self.run_dir])
-
-		appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
-
-		p = PBXProject(appname, ('Xcode 3.2', 46), self.env)
-
-		# If we don't create a Products group, then
-		# XCode will create one, which entails that
-		# we'll start to see duplicate files in the UI
-		# for some reason.
-		products_group = PBXGroup('Products')
-		p.mainGroup.children.append(products_group)
-
-		self.project = p
-		self.products_group = products_group
-
-		# post all task generators
-		# the process_xcode method above will be called for each target
-		if self.targets and self.targets != '*':
-			(self._min_grp, self._exact_tg) = self.get_targets()
-
-		self.current_group = 0
-		while self.current_group < len(self.groups):
-			self.post_group()
-			self.current_group += 1
-
-		node = self.bldnode.make_node('%s.xcodeproj' % appname)
-		node.mkdir()
-		node = node.make_node('project.pbxproj')
-		with open(node.abspath(), 'w') as f:
-			p.write(f)
-		Logs.pprint('GREEN', 'Wrote %r' % node.abspath())
-
-def bind_fun(tgtype):
-	def fun(self, *k, **kw):
-		tgtype = fun.__name__
-		if tgtype == 'shlib' or tgtype == 'dylib':
-			features = 'cxx cxxshlib'
-			tgtype = 'dylib'
-		elif tgtype == 'framework':
-			features = 'cxx cxxshlib'
-			tgtype = 'framework'
-		elif tgtype == 'program':
-			features = 'cxx cxxprogram'
-			tgtype = 'exe'
-		elif tgtype == 'app':
-			features = 'cxx cxxprogram'
-			tgtype = 'app'
-		elif tgtype == 'stlib':
-			features = 'cxx cxxstlib'
-			tgtype = 'stlib'
-		lst = kw['features'] = Utils.to_list(kw.get('features', []))
-		for x in features.split():
-			if not x in kw['features']:
-				lst.append(x)
-
-		kw['target_type'] = tgtype
-		return self(*k, **kw)
-	fun.__name__ = tgtype
-	setattr(Build.BuildContext, tgtype, fun)
-	return fun
-
-for xx in 'app framework dylib shlib stlib program'.split():
-	bind_fun(xx)
-
-- 
cgit v1.2.1