diff options
author | David Robillard <d@drobilla.net> | 2019-04-21 22:53:32 +0200 |
---|---|---|
committer | David Robillard <d@drobilla.net> | 2019-04-21 22:55:04 +0200 |
commit | 4be819c27f627d0fad0f63cb5817bfca8460541a (patch) | |
tree | 46e2d6880484bc5621a7b148775a2aa47b838d9b /waflib | |
parent | 9b7bfdd92d9a12b0d7db59f0ec0bb790fb827406 (diff) | |
download | lv2-4be819c27f627d0fad0f63cb5817bfca8460541a.tar.xz |
Switch to using a submodule for autowaf
Diffstat (limited to 'waflib')
184 files changed, 0 insertions, 41937 deletions
diff --git a/waflib b/waflib new file mode 160000 +Subproject 2314e236ca6e7d94a26c3c17091da0f25f5867f diff --git a/waflib/.gitignore b/waflib/.gitignore deleted file mode 100644 index 8d35cb3..0000000 --- a/waflib/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -__pycache__ -*.pyc diff --git a/waflib/Build.py b/waflib/Build.py deleted file mode 100644 index 8143dbc..0000000 --- a/waflib/Build.py +++ /dev/null @@ -1,1496 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -""" -Classes related to the build phase (build, clean, install, step, etc) - -The inheritance tree is the following: - -""" - -import os, sys, errno, re, shutil, stat -try: - import cPickle -except ImportError: - import pickle as cPickle -from waflib import Node, Runner, TaskGen, Utils, ConfigSet, Task, Logs, Options, Context, Errors - -CACHE_DIR = 'c4che' -"""Name of the cache directory""" - -CACHE_SUFFIX = '_cache.py' -"""ConfigSet cache files for variants are written under :py:attr:´waflib.Build.CACHE_DIR´ in the form ´variant_name´_cache.py""" - -INSTALL = 1337 -"""Positive value '->' install, see :py:attr:`waflib.Build.BuildContext.is_install`""" - -UNINSTALL = -1337 -"""Negative value '<-' uninstall, see :py:attr:`waflib.Build.BuildContext.is_install`""" - -SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split() -"""Build class members to save between the runs; these should be all dicts -except for `root` which represents a :py:class:`waflib.Node.Node` instance -""" - -CFG_FILES = 'cfg_files' -"""Files from the build directory to hash before starting the build (``config.h`` written during the configuration)""" - -POST_AT_ONCE = 0 -"""Post mode: all task generators are posted before any task executed""" - -POST_LAZY = 1 -"""Post mode: post the task generators group after group, the tasks in the next group are created when the tasks in the previous groups are done""" - -PROTOCOL = -1 -if sys.platform == 'cli': - PROTOCOL = 0 - -class BuildContext(Context.Context): - '''executes the build''' - - cmd = 'build' - variant = '' - - def __init__(self, **kw): - super(BuildContext, self).__init__(**kw) - - self.is_install = 0 - """Non-zero value when installing or uninstalling file""" - - self.top_dir = kw.get('top_dir', Context.top_dir) - """See :py:attr:`waflib.Context.top_dir`; prefer :py:attr:`waflib.Build.BuildContext.srcnode`""" - - self.out_dir = kw.get('out_dir', Context.out_dir) - """See :py:attr:`waflib.Context.out_dir`; prefer :py:attr:`waflib.Build.BuildContext.bldnode`""" - - self.run_dir = kw.get('run_dir', Context.run_dir) - """See :py:attr:`waflib.Context.run_dir`""" - - self.launch_dir = Context.launch_dir - """See :py:attr:`waflib.Context.out_dir`; prefer :py:meth:`waflib.Build.BuildContext.launch_node`""" - - self.post_mode = POST_LAZY - """Whether to post the task generators at once or group-by-group (default is group-by-group)""" - - self.cache_dir = kw.get('cache_dir') - if not self.cache_dir: - self.cache_dir = os.path.join(self.out_dir, CACHE_DIR) - - self.all_envs = {} - """Map names to :py:class:`waflib.ConfigSet.ConfigSet`, the empty string must map to the default environment""" - - # ======================================= # - # cache variables - - self.node_sigs = {} - """Dict mapping build nodes to task identifier (uid), it indicates whether a task created a particular file (persists across builds)""" - - self.task_sigs = {} - """Dict mapping task identifiers (uid) to task signatures (persists across builds)""" - - self.imp_sigs = {} - """Dict mapping task identifiers (uid) to implicit task dependencies used for scanning targets (persists across builds)""" - - self.node_deps = {} - """Dict mapping task identifiers (uid) to node dependencies found by :py:meth:`waflib.Task.Task.scan` (persists across builds)""" - - self.raw_deps = {} - """Dict mapping task identifiers (uid) to custom data returned by :py:meth:`waflib.Task.Task.scan` (persists across builds)""" - - self.task_gen_cache_names = {} - - self.jobs = Options.options.jobs - """Amount of jobs to run in parallel""" - - self.targets = Options.options.targets - """List of targets to build (default: \\*)""" - - self.keep = Options.options.keep - """Whether the build should continue past errors""" - - self.progress_bar = Options.options.progress_bar - """ - Level of progress status: - - 0. normal output - 1. progress bar - 2. IDE output - 3. No output at all - """ - - # Manual dependencies. - self.deps_man = Utils.defaultdict(list) - """Manual dependencies set by :py:meth:`waflib.Build.BuildContext.add_manual_dependency`""" - - # just the structure here - self.current_group = 0 - """ - Current build group - """ - - self.groups = [] - """ - List containing lists of task generators - """ - - self.group_names = {} - """ - Map group names to the group lists. See :py:meth:`waflib.Build.BuildContext.add_group` - """ - - for v in SAVED_ATTRS: - if not hasattr(self, v): - setattr(self, v, {}) - - def get_variant_dir(self): - """Getter for the variant_dir attribute""" - if not self.variant: - return self.out_dir - return os.path.join(self.out_dir, os.path.normpath(self.variant)) - variant_dir = property(get_variant_dir, None) - - def __call__(self, *k, **kw): - """ - Create a task generator and add it to the current build group. The following forms are equivalent:: - - def build(bld): - tg = bld(a=1, b=2) - - def build(bld): - tg = bld() - tg.a = 1 - tg.b = 2 - - def build(bld): - tg = TaskGen.task_gen(a=1, b=2) - bld.add_to_group(tg, None) - - :param group: group name to add the task generator to - :type group: string - """ - kw['bld'] = self - ret = TaskGen.task_gen(*k, **kw) - self.task_gen_cache_names = {} # reset the cache, each time - self.add_to_group(ret, group=kw.get('group')) - return ret - - def __copy__(self): - """ - Build contexts cannot be copied - - :raises: :py:class:`waflib.Errors.WafError` - """ - raise Errors.WafError('build contexts cannot be copied') - - def load_envs(self): - """ - The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method - creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those - files and stores them in :py:attr:`waflib.Build.BuildContext.allenvs`. - """ - node = self.root.find_node(self.cache_dir) - if not node: - raise Errors.WafError('The project was not configured: run "waf configure" first!') - lst = node.ant_glob('**/*%s' % CACHE_SUFFIX, quiet=True) - - if not lst: - raise Errors.WafError('The cache directory is empty: reconfigure the project') - - for x in lst: - name = x.path_from(node).replace(CACHE_SUFFIX, '').replace('\\', '/') - env = ConfigSet.ConfigSet(x.abspath()) - self.all_envs[name] = env - for f in env[CFG_FILES]: - newnode = self.root.find_resource(f) - if not newnode or not newnode.exists(): - raise Errors.WafError('Missing configuration file %r, reconfigure the project!' % f) - - def init_dirs(self): - """ - Initialize the project directory and the build directory by creating the nodes - :py:attr:`waflib.Build.BuildContext.srcnode` and :py:attr:`waflib.Build.BuildContext.bldnode` - corresponding to ``top_dir`` and ``variant_dir`` respectively. The ``bldnode`` directory is - created if necessary. - """ - if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)): - raise Errors.WafError('The project was not configured: run "waf configure" first!') - - self.path = self.srcnode = self.root.find_dir(self.top_dir) - self.bldnode = self.root.make_node(self.variant_dir) - self.bldnode.mkdir() - - def execute(self): - """ - Restore data from previous builds and call :py:meth:`waflib.Build.BuildContext.execute_build`. - Overrides from :py:func:`waflib.Context.Context.execute` - """ - self.restore() - if not self.all_envs: - self.load_envs() - self.execute_build() - - def execute_build(self): - """ - Execute the build by: - - * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`) - * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions - * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks - * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions - """ - - Logs.info("Waf: Entering directory `%s'", self.variant_dir) - self.recurse([self.run_dir]) - self.pre_build() - - # display the time elapsed in the progress bar - self.timer = Utils.Timer() - - try: - self.compile() - finally: - if self.progress_bar == 1 and sys.stderr.isatty(): - c = self.producer.processed or 1 - m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL) - Logs.info(m, extra={'stream': sys.stderr, 'c1': Logs.colors.cursor_off, 'c2' : Logs.colors.cursor_on}) - Logs.info("Waf: Leaving directory `%s'", self.variant_dir) - try: - self.producer.bld = None - del self.producer - except AttributeError: - pass - self.post_build() - - def restore(self): - """ - Load data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS` - """ - try: - env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, 'build.config.py')) - except EnvironmentError: - pass - else: - if env.version < Context.HEXVERSION: - raise Errors.WafError('Project was configured with a different version of Waf, please reconfigure it') - - for t in env.tools: - self.setup(**t) - - dbfn = os.path.join(self.variant_dir, Context.DBFILE) - try: - data = Utils.readf(dbfn, 'rb') - except (EnvironmentError, EOFError): - # handle missing file/empty file - Logs.debug('build: Could not load the build cache %s (missing)', dbfn) - else: - try: - Node.pickle_lock.acquire() - Node.Nod3 = self.node_class - try: - data = cPickle.loads(data) - except Exception as e: - Logs.debug('build: Could not pickle the build cache %s: %r', dbfn, e) - else: - for x in SAVED_ATTRS: - setattr(self, x, data.get(x, {})) - finally: - Node.pickle_lock.release() - - self.init_dirs() - - def store(self): - """ - Store data for next runs, set the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`. Uses a temporary - file to avoid problems on ctrl+c. - """ - data = {} - for x in SAVED_ATTRS: - data[x] = getattr(self, x) - db = os.path.join(self.variant_dir, Context.DBFILE) - - try: - Node.pickle_lock.acquire() - Node.Nod3 = self.node_class - x = cPickle.dumps(data, PROTOCOL) - finally: - Node.pickle_lock.release() - - Utils.writef(db + '.tmp', x, m='wb') - - try: - st = os.stat(db) - os.remove(db) - if not Utils.is_win32: # win32 has no chown but we're paranoid - os.chown(db + '.tmp', st.st_uid, st.st_gid) - except (AttributeError, OSError): - pass - - # do not use shutil.move (copy is not thread-safe) - os.rename(db + '.tmp', db) - - def compile(self): - """ - Run the build by creating an instance of :py:class:`waflib.Runner.Parallel` - The cache file is written when at least a task was executed. - - :raises: :py:class:`waflib.Errors.BuildError` in case the build fails - """ - Logs.debug('build: compile()') - - # delegate the producer-consumer logic to another object to reduce the complexity - self.producer = Runner.Parallel(self, self.jobs) - self.producer.biter = self.get_build_iterator() - try: - self.producer.start() - except KeyboardInterrupt: - if self.is_dirty(): - self.store() - raise - else: - if self.is_dirty(): - self.store() - - if self.producer.error: - raise Errors.BuildError(self.producer.error) - - def is_dirty(self): - return self.producer.dirty - - def setup(self, tool, tooldir=None, funs=None): - """ - Import waf tools defined during the configuration:: - - def configure(conf): - conf.load('glib2') - - def build(bld): - pass # glib2 is imported implicitly - - :param tool: tool list - :type tool: list - :param tooldir: optional tool directory (sys.path) - :type tooldir: list of string - :param funs: unused variable - """ - if isinstance(tool, list): - for i in tool: - self.setup(i, tooldir) - return - - module = Context.load_tool(tool, tooldir) - if hasattr(module, "setup"): - module.setup(self) - - def get_env(self): - """Getter for the env property""" - try: - return self.all_envs[self.variant] - except KeyError: - return self.all_envs[''] - def set_env(self, val): - """Setter for the env property""" - self.all_envs[self.variant] = val - - env = property(get_env, set_env) - - def add_manual_dependency(self, path, value): - """ - Adds a dependency from a node object to a value:: - - def build(bld): - bld.add_manual_dependency( - bld.path.find_resource('wscript'), - bld.root.find_resource('/etc/fstab')) - - :param path: file path - :type path: string or :py:class:`waflib.Node.Node` - :param value: value to depend - :type value: :py:class:`waflib.Node.Node`, byte object, or function returning a byte object - """ - if not path: - raise ValueError('Invalid input path %r' % path) - - if isinstance(path, Node.Node): - node = path - elif os.path.isabs(path): - node = self.root.find_resource(path) - else: - node = self.path.find_resource(path) - if not node: - raise ValueError('Could not find the path %r' % path) - - if isinstance(value, list): - self.deps_man[node].extend(value) - else: - self.deps_man[node].append(value) - - def launch_node(self): - """Returns the launch directory as a :py:class:`waflib.Node.Node` object (cached)""" - try: - # private cache - return self.p_ln - except AttributeError: - self.p_ln = self.root.find_dir(self.launch_dir) - return self.p_ln - - def hash_env_vars(self, env, vars_lst): - """ - Hashes configuration set variables:: - - def build(bld): - bld.hash_env_vars(bld.env, ['CXX', 'CC']) - - This method uses an internal cache. - - :param env: Configuration Set - :type env: :py:class:`waflib.ConfigSet.ConfigSet` - :param vars_lst: list of variables - :type vars_list: list of string - """ - - if not env.table: - env = env.parent - if not env: - return Utils.SIG_NIL - - idx = str(id(env)) + str(vars_lst) - try: - cache = self.cache_env - except AttributeError: - cache = self.cache_env = {} - else: - try: - return self.cache_env[idx] - except KeyError: - pass - - lst = [env[a] for a in vars_lst] - cache[idx] = ret = Utils.h_list(lst) - Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst) - return ret - - def get_tgen_by_name(self, name): - """ - Fetches a task generator by its name or its target attribute; - the name must be unique in a build:: - - def build(bld): - tg = bld(name='foo') - tg == bld.get_tgen_by_name('foo') - - This method use a private internal cache. - - :param name: Task generator name - :raises: :py:class:`waflib.Errors.WafError` in case there is no task genenerator by that name - """ - cache = self.task_gen_cache_names - if not cache: - # create the index lazily - for g in self.groups: - for tg in g: - try: - cache[tg.name] = tg - except AttributeError: - # raised if not a task generator, which should be uncommon - pass - try: - return cache[name] - except KeyError: - raise Errors.WafError('Could not find a task generator for the name %r' % name) - - def progress_line(self, idx, total, col1, col2): - """ - Computes a progress bar line displayed when running ``waf -p`` - - :returns: progress bar line - :rtype: string - """ - if not sys.stderr.isatty(): - return '' - - n = len(str(total)) - - Utils.rot_idx += 1 - ind = Utils.rot_chr[Utils.rot_idx % 4] - - pc = (100. * idx)/total - fs = "[%%%dd/%%d][%%s%%2d%%%%%%s][%s][" % (n, ind) - left = fs % (idx, total, col1, pc, col2) - right = '][%s%s%s]' % (col1, self.timer, col2) - - cols = Logs.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2) - if cols < 7: - cols = 7 - - ratio = ((cols * idx)//total) - 1 - - bar = ('='*ratio+'>').ljust(cols) - msg = Logs.indicator % (left, bar, right) - - return msg - - def declare_chain(self, *k, **kw): - """ - Wraps :py:func:`waflib.TaskGen.declare_chain` for convenience - """ - return TaskGen.declare_chain(*k, **kw) - - def pre_build(self): - """Executes user-defined methods before the build starts, see :py:meth:`waflib.Build.BuildContext.add_pre_fun`""" - for m in getattr(self, 'pre_funs', []): - m(self) - - def post_build(self): - """Executes user-defined methods after the build is successful, see :py:meth:`waflib.Build.BuildContext.add_post_fun`""" - for m in getattr(self, 'post_funs', []): - m(self) - - def add_pre_fun(self, meth): - """ - Binds a callback method to execute after the scripts are read and before the build starts:: - - def mycallback(bld): - print("Hello, world!") - - def build(bld): - bld.add_pre_fun(mycallback) - """ - try: - self.pre_funs.append(meth) - except AttributeError: - self.pre_funs = [meth] - - def add_post_fun(self, meth): - """ - Binds a callback method to execute immediately after the build is successful:: - - def call_ldconfig(bld): - bld.exec_command('/sbin/ldconfig') - - def build(bld): - if bld.cmd == 'install': - bld.add_pre_fun(call_ldconfig) - """ - try: - self.post_funs.append(meth) - except AttributeError: - self.post_funs = [meth] - - def get_group(self, x): - """ - Returns the build group named `x`, or the current group if `x` is None - - :param x: name or number or None - :type x: string, int or None - """ - if not self.groups: - self.add_group() - if x is None: - return self.groups[self.current_group] - if x in self.group_names: - return self.group_names[x] - return self.groups[x] - - def add_to_group(self, tgen, group=None): - """Adds a task or a task generator to the build; there is no attempt to remove it if it was already added.""" - assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.Task)) - tgen.bld = self - self.get_group(group).append(tgen) - - def get_group_name(self, g): - """ - Returns the name of the input build group - - :param g: build group object or build group index - :type g: integer or list - :return: name - :rtype: string - """ - if not isinstance(g, list): - g = self.groups[g] - for x in self.group_names: - if id(self.group_names[x]) == id(g): - return x - return '' - - def get_group_idx(self, tg): - """ - Returns the index of the group containing the task generator given as argument:: - - def build(bld): - tg = bld(name='nada') - 0 == bld.get_group_idx(tg) - - :param tg: Task generator object - :type tg: :py:class:`waflib.TaskGen.task_gen` - :rtype: int - """ - se = id(tg) - for i, tmp in enumerate(self.groups): - for t in tmp: - if id(t) == se: - return i - return None - - def add_group(self, name=None, move=True): - """ - Adds a new group of tasks/task generators. By default the new group becomes - the default group for new task generators (make sure to create build groups in order). - - :param name: name for this group - :type name: string - :param move: set this new group as default group (True by default) - :type move: bool - :raises: :py:class:`waflib.Errors.WafError` if a group by the name given already exists - """ - if name and name in self.group_names: - raise Errors.WafError('add_group: name %s already present', name) - g = [] - self.group_names[name] = g - self.groups.append(g) - if move: - self.current_group = len(self.groups) - 1 - - def set_group(self, idx): - """ - Sets the build group at position idx as current so that newly added - task generators are added to this one by default:: - - def build(bld): - bld(rule='touch ${TGT}', target='foo.txt') - bld.add_group() # now the current group is 1 - bld(rule='touch ${TGT}', target='bar.txt') - bld.set_group(0) # now the current group is 0 - bld(rule='touch ${TGT}', target='truc.txt') # build truc.txt before bar.txt - - :param idx: group name or group index - :type idx: string or int - """ - if isinstance(idx, str): - g = self.group_names[idx] - for i, tmp in enumerate(self.groups): - if id(g) == id(tmp): - self.current_group = i - break - else: - self.current_group = idx - - def total(self): - """ - Approximate task count: this value may be inaccurate if task generators - are posted lazily (see :py:attr:`waflib.Build.BuildContext.post_mode`). - The value :py:attr:`waflib.Runner.Parallel.total` is updated during the task execution. - - :rtype: int - """ - total = 0 - for group in self.groups: - for tg in group: - try: - total += len(tg.tasks) - except AttributeError: - total += 1 - return total - - def get_targets(self): - """ - This method returns a pair containing the index of the last build group to post, - and the list of task generator objects corresponding to the target names. - - This is used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator` - to perform partial builds:: - - $ waf --targets=myprogram,myshlib - - :return: the minimum build group index, and list of task generators - :rtype: tuple - """ - to_post = [] - min_grp = 0 - for name in self.targets.split(','): - tg = self.get_tgen_by_name(name) - m = self.get_group_idx(tg) - if m > min_grp: - min_grp = m - to_post = [tg] - elif m == min_grp: - to_post.append(tg) - return (min_grp, to_post) - - def get_all_task_gen(self): - """ - Returns a list of all task generators for troubleshooting purposes. - """ - lst = [] - for g in self.groups: - lst.extend(g) - return lst - - def post_group(self): - """ - Post task generators from the group indexed by self.current_group; used internally - by :py:meth:`waflib.Build.BuildContext.get_build_iterator` - """ - def tgpost(tg): - try: - f = tg.post - except AttributeError: - pass - else: - f() - - if self.targets == '*': - for tg in self.groups[self.current_group]: - tgpost(tg) - elif self.targets: - if self.current_group < self._min_grp: - for tg in self.groups[self.current_group]: - tgpost(tg) - else: - for tg in self._exact_tg: - tg.post() - else: - ln = self.launch_node() - if ln.is_child_of(self.bldnode): - Logs.warn('Building from the build directory, forcing --targets=*') - ln = self.srcnode - elif not ln.is_child_of(self.srcnode): - Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)', ln.abspath(), self.srcnode.abspath()) - ln = self.srcnode - - def is_post(tg, ln): - try: - p = tg.path - except AttributeError: - pass - else: - if p.is_child_of(ln): - return True - - def is_post_group(): - for i, g in enumerate(self.groups): - if i > self.current_group: - for tg in g: - if is_post(tg, ln): - return True - - if self.post_mode == POST_LAZY and ln != self.srcnode: - # partial folder builds require all targets from a previous build group - if is_post_group(): - ln = self.srcnode - - for tg in self.groups[self.current_group]: - if is_post(tg, ln): - tgpost(tg) - - def get_tasks_group(self, idx): - """ - Returns all task instances for the build group at position idx, - used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator` - - :rtype: list of :py:class:`waflib.Task.Task` - """ - tasks = [] - for tg in self.groups[idx]: - try: - tasks.extend(tg.tasks) - except AttributeError: # not a task generator - tasks.append(tg) - return tasks - - def get_build_iterator(self): - """ - Creates a Python generator object that returns lists of tasks that may be processed in parallel. - - :return: tasks which can be executed immediately - :rtype: generator returning lists of :py:class:`waflib.Task.Task` - """ - if self.targets and self.targets != '*': - (self._min_grp, self._exact_tg) = self.get_targets() - - if self.post_mode != POST_LAZY: - for self.current_group, _ in enumerate(self.groups): - self.post_group() - - for self.current_group, _ in enumerate(self.groups): - # first post the task generators for the group - if self.post_mode != POST_AT_ONCE: - self.post_group() - - # then extract the tasks - tasks = self.get_tasks_group(self.current_group) - - # if the constraints are set properly (ext_in/ext_out, before/after) - # the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds) - # (but leave set_file_constraints for the installation step) - # - # if the tasks have only files, set_file_constraints is required but set_precedence_constraints is not necessary - # - Task.set_file_constraints(tasks) - Task.set_precedence_constraints(tasks) - - self.cur_tasks = tasks - if tasks: - yield tasks - - while 1: - # the build stops once there are no tasks to process - yield [] - - def install_files(self, dest, files, **kw): - """ - Creates a task generator to install files on the system:: - - def build(bld): - bld.install_files('${DATADIR}', self.path.find_resource('wscript')) - - :param dest: path representing the destination directory - :type dest: :py:class:`waflib.Node.Node` or string (absolute path) - :param files: input files - :type files: list of strings or list of :py:class:`waflib.Node.Node` - :param env: configuration set to expand *dest* - :type env: :py:class:`waflib.ConfigSet.ConfigSet` - :param relative_trick: preserve the folder hierarchy when installing whole folders - :type relative_trick: bool - :param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node` - :type cwd: :py:class:`waflib.Node.Node` - :param postpone: execute the task immediately to perform the installation (False by default) - :type postpone: bool - """ - assert(dest) - tg = self(features='install_task', install_to=dest, install_from=files, **kw) - tg.dest = tg.install_to - tg.type = 'install_files' - if not kw.get('postpone', True): - tg.post() - return tg - - def install_as(self, dest, srcfile, **kw): - """ - Creates a task generator to install a file on the system with a different name:: - - def build(bld): - bld.install_as('${PREFIX}/bin', 'myapp', chmod=Utils.O755) - - :param dest: destination file - :type dest: :py:class:`waflib.Node.Node` or string (absolute path) - :param srcfile: input file - :type srcfile: string or :py:class:`waflib.Node.Node` - :param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node` - :type cwd: :py:class:`waflib.Node.Node` - :param env: configuration set for performing substitutions in dest - :type env: :py:class:`waflib.ConfigSet.ConfigSet` - :param postpone: execute the task immediately to perform the installation (False by default) - :type postpone: bool - """ - assert(dest) - tg = self(features='install_task', install_to=dest, install_from=srcfile, **kw) - tg.dest = tg.install_to - tg.type = 'install_as' - if not kw.get('postpone', True): - tg.post() - return tg - - def symlink_as(self, dest, src, **kw): - """ - Creates a task generator to install a symlink:: - - def build(bld): - bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') - - :param dest: absolute path of the symlink - :type dest: :py:class:`waflib.Node.Node` or string (absolute path) - :param src: link contents, which is a relative or absolute path which may exist or not - :type src: string - :param env: configuration set for performing substitutions in dest - :type env: :py:class:`waflib.ConfigSet.ConfigSet` - :param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started - :type add: bool - :param postpone: execute the task immediately to perform the installation - :type postpone: bool - :param relative_trick: make the symlink relative (default: ``False``) - :type relative_trick: bool - """ - assert(dest) - tg = self(features='install_task', install_to=dest, install_from=src, **kw) - tg.dest = tg.install_to - tg.type = 'symlink_as' - tg.link = src - # TODO if add: self.add_to_group(tsk) - if not kw.get('postpone', True): - tg.post() - return tg - -@TaskGen.feature('install_task') -@TaskGen.before_method('process_rule', 'process_source') -def process_install_task(self): - """Creates the installation task for the current task generator; uses :py:func:`waflib.Build.add_install_task` internally.""" - self.add_install_task(**self.__dict__) - -@TaskGen.taskgen_method -def add_install_task(self, **kw): - """ - Creates the installation task for the current task generator, and executes it immediately if necessary - - :returns: An installation task - :rtype: :py:class:`waflib.Build.inst` - """ - if not self.bld.is_install: - return - if not kw['install_to']: - return - - if kw['type'] == 'symlink_as' and Utils.is_win32: - if kw.get('win32_install'): - kw['type'] = 'install_as' - else: - # just exit - return - - tsk = self.install_task = self.create_task('inst') - tsk.chmod = kw.get('chmod', Utils.O644) - tsk.link = kw.get('link', '') or kw.get('install_from', '') - tsk.relative_trick = kw.get('relative_trick', False) - tsk.type = kw['type'] - tsk.install_to = tsk.dest = kw['install_to'] - tsk.install_from = kw['install_from'] - tsk.relative_base = kw.get('cwd') or kw.get('relative_base', self.path) - tsk.install_user = kw.get('install_user') - tsk.install_group = kw.get('install_group') - tsk.init_files() - if not kw.get('postpone', True): - tsk.run_now() - return tsk - -@TaskGen.taskgen_method -def add_install_files(self, **kw): - """ - Creates an installation task for files - - :returns: An installation task - :rtype: :py:class:`waflib.Build.inst` - """ - kw['type'] = 'install_files' - return self.add_install_task(**kw) - -@TaskGen.taskgen_method -def add_install_as(self, **kw): - """ - Creates an installation task for a single file - - :returns: An installation task - :rtype: :py:class:`waflib.Build.inst` - """ - kw['type'] = 'install_as' - return self.add_install_task(**kw) - -@TaskGen.taskgen_method -def add_symlink_as(self, **kw): - """ - Creates an installation task for a symbolic link - - :returns: An installation task - :rtype: :py:class:`waflib.Build.inst` - """ - kw['type'] = 'symlink_as' - return self.add_install_task(**kw) - -class inst(Task.Task): - """Task that installs files or symlinks; it is typically executed by :py:class:`waflib.Build.InstallContext` and :py:class:`waflib.Build.UnInstallContext`""" - def __str__(self): - """Returns an empty string to disable the standard task display""" - return '' - - def uid(self): - """Returns a unique identifier for the task""" - lst = self.inputs + self.outputs + [self.link, self.generator.path.abspath()] - return Utils.h_list(lst) - - def init_files(self): - """ - Initializes the task input and output nodes - """ - if self.type == 'symlink_as': - inputs = [] - else: - inputs = self.generator.to_nodes(self.install_from) - if self.type == 'install_as': - assert len(inputs) == 1 - self.set_inputs(inputs) - - dest = self.get_install_path() - outputs = [] - if self.type == 'symlink_as': - if self.relative_trick: - self.link = os.path.relpath(self.link, os.path.dirname(dest)) - outputs.append(self.generator.bld.root.make_node(dest)) - elif self.type == 'install_as': - outputs.append(self.generator.bld.root.make_node(dest)) - else: - for y in inputs: - if self.relative_trick: - destfile = os.path.join(dest, y.path_from(self.relative_base)) - else: - destfile = os.path.join(dest, y.name) - outputs.append(self.generator.bld.root.make_node(destfile)) - self.set_outputs(outputs) - - def runnable_status(self): - """ - Installation tasks are always executed, so this method returns either :py:const:`waflib.Task.ASK_LATER` or :py:const:`waflib.Task.RUN_ME`. - """ - ret = super(inst, self).runnable_status() - if ret == Task.SKIP_ME and self.generator.bld.is_install: - return Task.RUN_ME - return ret - - def post_run(self): - """ - Disables any post-run operations - """ - pass - - def get_install_path(self, destdir=True): - """ - Returns the destination path where files will be installed, pre-pending `destdir`. - - Relative paths will be interpreted relative to `PREFIX` if no `destdir` is given. - - :rtype: string - """ - if isinstance(self.install_to, Node.Node): - dest = self.install_to.abspath() - else: - dest = os.path.normpath(Utils.subst_vars(self.install_to, self.env)) - if not os.path.isabs(dest): - dest = os.path.join(self.env.PREFIX, dest) - if destdir and Options.options.destdir: - dest = os.path.join(Options.options.destdir, os.path.splitdrive(dest)[1].lstrip(os.sep)) - return dest - - def copy_fun(self, src, tgt): - """ - Copies a file from src to tgt, preserving permissions and trying to work - around path limitations on Windows platforms. On Unix-like platforms, - the owner/group of the target file may be set through install_user/install_group - - :param src: absolute path - :type src: string - :param tgt: absolute path - :type tgt: string - """ - # override this if you want to strip executables - # kw['tsk'].source is the task that created the files in the build - if Utils.is_win32 and len(tgt) > 259 and not tgt.startswith('\\\\?\\'): - tgt = '\\\\?\\' + tgt - shutil.copy2(src, tgt) - self.fix_perms(tgt) - - def rm_empty_dirs(self, tgt): - """ - Removes empty folders recursively when uninstalling. - - :param tgt: absolute path - :type tgt: string - """ - while tgt: - tgt = os.path.dirname(tgt) - try: - os.rmdir(tgt) - except OSError: - break - - def run(self): - """ - Performs file or symlink installation - """ - is_install = self.generator.bld.is_install - if not is_install: # unnecessary? - return - - for x in self.outputs: - if is_install == INSTALL: - x.parent.mkdir() - if self.type == 'symlink_as': - fun = is_install == INSTALL and self.do_link or self.do_unlink - fun(self.link, self.outputs[0].abspath()) - else: - fun = is_install == INSTALL and self.do_install or self.do_uninstall - launch_node = self.generator.bld.launch_node() - for x, y in zip(self.inputs, self.outputs): - fun(x.abspath(), y.abspath(), x.path_from(launch_node)) - - def run_now(self): - """ - Try executing the installation task right now - - :raises: :py:class:`waflib.Errors.TaskNotReady` - """ - status = self.runnable_status() - if status not in (Task.RUN_ME, Task.SKIP_ME): - raise Errors.TaskNotReady('Could not process %r: status %r' % (self, status)) - self.run() - self.hasrun = Task.SUCCESS - - def do_install(self, src, tgt, lbl, **kw): - """ - Copies a file from src to tgt with given file permissions. The actual copy is only performed - if the source and target file sizes or timestamps differ. When the copy occurs, - the file is always first removed and then copied so as to prevent stale inodes. - - :param src: file name as absolute path - :type src: string - :param tgt: file destination, as absolute path - :type tgt: string - :param lbl: file source description - :type lbl: string - :param chmod: installation mode - :type chmod: int - :raises: :py:class:`waflib.Errors.WafError` if the file cannot be written - """ - if not Options.options.force: - # check if the file is already there to avoid a copy - try: - st1 = os.stat(tgt) - st2 = os.stat(src) - except OSError: - pass - else: - # same size and identical timestamps -> make no copy - if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size: - if not self.generator.bld.progress_bar: - Logs.info('- install %s (from %s)', tgt, lbl) - return False - - if not self.generator.bld.progress_bar: - Logs.info('+ install %s (from %s)', tgt, lbl) - - # Give best attempt at making destination overwritable, - # like the 'install' utility used by 'make install' does. - try: - os.chmod(tgt, Utils.O644 | stat.S_IMODE(os.stat(tgt).st_mode)) - except EnvironmentError: - pass - - # following is for shared libs and stale inodes (-_-) - try: - os.remove(tgt) - except OSError: - pass - - try: - self.copy_fun(src, tgt) - except EnvironmentError as e: - if not os.path.exists(src): - Logs.error('File %r does not exist', src) - elif not os.path.isfile(src): - Logs.error('Input %r is not a file', src) - raise Errors.WafError('Could not install the file %r' % tgt, e) - - def fix_perms(self, tgt): - """ - Change the ownership of the file/folder/link pointed by the given path - This looks up for `install_user` or `install_group` attributes - on the task or on the task generator:: - - def build(bld): - bld.install_as('${PREFIX}/wscript', - 'wscript', - install_user='nobody', install_group='nogroup') - bld.symlink_as('${PREFIX}/wscript_link', - Utils.subst_vars('${PREFIX}/wscript', bld.env), - install_user='nobody', install_group='nogroup') - """ - if not Utils.is_win32: - user = getattr(self, 'install_user', None) or getattr(self.generator, 'install_user', None) - group = getattr(self, 'install_group', None) or getattr(self.generator, 'install_group', None) - if user or group: - Utils.lchown(tgt, user or -1, group or -1) - if not os.path.islink(tgt): - os.chmod(tgt, self.chmod) - - def do_link(self, src, tgt, **kw): - """ - Creates a symlink from tgt to src. - - :param src: file name as absolute path - :type src: string - :param tgt: file destination, as absolute path - :type tgt: string - """ - if os.path.islink(tgt) and os.readlink(tgt) == src: - if not self.generator.bld.progress_bar: - Logs.info('- symlink %s (to %s)', tgt, src) - else: - try: - os.remove(tgt) - except OSError: - pass - if not self.generator.bld.progress_bar: - Logs.info('+ symlink %s (to %s)', tgt, src) - os.symlink(src, tgt) - self.fix_perms(tgt) - - def do_uninstall(self, src, tgt, lbl, **kw): - """ - See :py:meth:`waflib.Build.inst.do_install` - """ - if not self.generator.bld.progress_bar: - Logs.info('- remove %s', tgt) - - #self.uninstall.append(tgt) - try: - os.remove(tgt) - except OSError as e: - if e.errno != errno.ENOENT: - if not getattr(self, 'uninstall_error', None): - self.uninstall_error = True - Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)') - if Logs.verbose > 1: - Logs.warn('Could not remove %s (error code %r)', e.filename, e.errno) - self.rm_empty_dirs(tgt) - - def do_unlink(self, src, tgt, **kw): - """ - See :py:meth:`waflib.Build.inst.do_link` - """ - try: - if not self.generator.bld.progress_bar: - Logs.info('- remove %s', tgt) - os.remove(tgt) - except OSError: - pass - self.rm_empty_dirs(tgt) - -class InstallContext(BuildContext): - '''installs the targets on the system''' - cmd = 'install' - - def __init__(self, **kw): - super(InstallContext, self).__init__(**kw) - self.is_install = INSTALL - -class UninstallContext(InstallContext): - '''removes the targets installed''' - cmd = 'uninstall' - - def __init__(self, **kw): - super(UninstallContext, self).__init__(**kw) - self.is_install = UNINSTALL - -class CleanContext(BuildContext): - '''cleans the project''' - cmd = 'clean' - def execute(self): - """ - See :py:func:`waflib.Build.BuildContext.execute`. - """ - self.restore() - if not self.all_envs: - self.load_envs() - - self.recurse([self.run_dir]) - try: - self.clean() - finally: - self.store() - - def clean(self): - """ - Remove most files from the build directory, and reset all caches. - - Custom lists of files to clean can be declared as `bld.clean_files`. - For example, exclude `build/program/myprogram` from getting removed:: - - def build(bld): - bld.clean_files = bld.bldnode.ant_glob('**', - excl='.lock* config.log c4che/* config.h program/myprogram', - quiet=True, generator=True) - """ - Logs.debug('build: clean called') - - if hasattr(self, 'clean_files'): - for n in self.clean_files: - n.delete() - elif self.bldnode != self.srcnode: - # would lead to a disaster if top == out - lst = [] - for env in self.all_envs.values(): - lst.extend(self.root.find_or_declare(f) for f in env[CFG_FILES]) - excluded_dirs = '.lock* *conf_check_*/** config.log %s/*' % CACHE_DIR - for n in self.bldnode.ant_glob('**/*', excl=excluded_dirs, quiet=True): - if n in lst: - continue - n.delete() - self.root.children = {} - - for v in SAVED_ATTRS: - if v == 'root': - continue - setattr(self, v, {}) - -class ListContext(BuildContext): - '''lists the targets to execute''' - cmd = 'list' - - def execute(self): - """ - In addition to printing the name of each build target, - a description column will include text for each task - generator which has a "description" field set. - - See :py:func:`waflib.Build.BuildContext.execute`. - """ - self.restore() - if not self.all_envs: - self.load_envs() - - self.recurse([self.run_dir]) - self.pre_build() - - # display the time elapsed in the progress bar - self.timer = Utils.Timer() - - for g in self.groups: - for tg in g: - try: - f = tg.post - except AttributeError: - pass - else: - f() - - try: - # force the cache initialization - self.get_tgen_by_name('') - except Errors.WafError: - pass - - targets = sorted(self.task_gen_cache_names) - - # figure out how much to left-justify, for largest target name - line_just = max(len(t) for t in targets) if targets else 0 - - for target in targets: - tgen = self.task_gen_cache_names[target] - - # Support displaying the description for the target - # if it was set on the tgen - descript = getattr(tgen, 'description', '') - if descript: - target = target.ljust(line_just) - descript = ': %s' % descript - - Logs.pprint('GREEN', target, label=descript) - -class StepContext(BuildContext): - '''executes tasks in a step-by-step fashion, for debugging''' - cmd = 'step' - - def __init__(self, **kw): - super(StepContext, self).__init__(**kw) - self.files = Options.options.files - - def compile(self): - """ - Overrides :py:meth:`waflib.Build.BuildContext.compile` to perform a partial build - on tasks matching the input/output pattern given (regular expression matching):: - - $ waf step --files=foo.c,bar.c,in:truc.c,out:bar.o - $ waf step --files=in:foo.cpp.1.o # link task only - - """ - if not self.files: - Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"') - BuildContext.compile(self) - return - - targets = [] - if self.targets and self.targets != '*': - targets = self.targets.split(',') - - for g in self.groups: - for tg in g: - if targets and tg.name not in targets: - continue - - try: - f = tg.post - except AttributeError: - pass - else: - f() - - for pat in self.files.split(','): - matcher = self.get_matcher(pat) - for tg in g: - if isinstance(tg, Task.Task): - lst = [tg] - else: - lst = tg.tasks - for tsk in lst: - do_exec = False - for node in tsk.inputs: - if matcher(node, output=False): - do_exec = True - break - for node in tsk.outputs: - if matcher(node, output=True): - do_exec = True - break - if do_exec: - ret = tsk.run() - Logs.info('%s -> exit %r', tsk, ret) - - def get_matcher(self, pat): - """ - Converts a step pattern into a function - - :param: pat: pattern of the form in:truc.c,out:bar.o - :returns: Python function that uses Node objects as inputs and returns matches - :rtype: function - """ - # this returns a function - inn = True - out = True - if pat.startswith('in:'): - out = False - pat = pat.replace('in:', '') - elif pat.startswith('out:'): - inn = False - pat = pat.replace('out:', '') - - anode = self.root.find_node(pat) - pattern = None - if not anode: - if not pat.startswith('^'): - pat = '^.+?%s' % pat - if not pat.endswith('$'): - pat = '%s$' % pat - pattern = re.compile(pat) - - def match(node, output): - if output and not out: - return False - if not output and not inn: - return False - - if anode: - return anode == node - else: - return pattern.match(node.abspath()) - return match - -class EnvContext(BuildContext): - """Subclass EnvContext to create commands that require configuration data in 'env'""" - fun = cmd = None - def execute(self): - """ - See :py:func:`waflib.Build.BuildContext.execute`. - """ - self.restore() - if not self.all_envs: - self.load_envs() - self.recurse([self.run_dir]) - diff --git a/waflib/COPYING b/waflib/COPYING deleted file mode 100644 index a4147d2..0000000 --- a/waflib/COPYING +++ /dev/null @@ -1,25 +0,0 @@ -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. The name of the author may not be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR -IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, -INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, -STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING -IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/waflib/ConfigSet.py b/waflib/ConfigSet.py deleted file mode 100644 index 901fba6..0000000 --- a/waflib/ConfigSet.py +++ /dev/null @@ -1,361 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -""" - -ConfigSet: a special dict - -The values put in :py:class:`ConfigSet` must be serializable (dicts, lists, strings) -""" - -import copy, re, os -from waflib import Logs, Utils -re_imp = re.compile(r'^(#)*?([^#=]*?)\ =\ (.*?)$', re.M) - -class ConfigSet(object): - """ - A copy-on-write dict with human-readable serialized format. The serialization format - is human-readable (python-like) and performed by using eval() and repr(). - For high performance prefer pickle. Do not store functions as they are not serializable. - - The values can be accessed by attributes or by keys:: - - from waflib.ConfigSet import ConfigSet - env = ConfigSet() - env.FOO = 'test' - env['FOO'] = 'test' - """ - __slots__ = ('table', 'parent') - def __init__(self, filename=None): - self.table = {} - """ - Internal dict holding the object values - """ - #self.parent = None - - if filename: - self.load(filename) - - def __contains__(self, key): - """ - Enables the *in* syntax:: - - if 'foo' in env: - print(env['foo']) - """ - if key in self.table: - return True - try: - return self.parent.__contains__(key) - except AttributeError: - return False # parent may not exist - - def keys(self): - """Dict interface""" - keys = set() - cur = self - while cur: - keys.update(cur.table.keys()) - cur = getattr(cur, 'parent', None) - keys = list(keys) - keys.sort() - return keys - - def __iter__(self): - return iter(self.keys()) - - def __str__(self): - """Text representation of the ConfigSet (for debugging purposes)""" - return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()]) - - def __getitem__(self, key): - """ - Dictionary interface: get value from key:: - - def configure(conf): - conf.env['foo'] = {} - print(env['foo']) - """ - try: - while 1: - x = self.table.get(key) - if not x is None: - return x - self = self.parent - except AttributeError: - return [] - - def __setitem__(self, key, value): - """ - Dictionary interface: set value from key - """ - self.table[key] = value - - def __delitem__(self, key): - """ - Dictionary interface: mark the value as missing - """ - self[key] = [] - - def __getattr__(self, name): - """ - Attribute access provided for convenience. The following forms are equivalent:: - - def configure(conf): - conf.env.value - conf.env['value'] - """ - if name in self.__slots__: - return object.__getattribute__(self, name) - else: - return self[name] - - def __setattr__(self, name, value): - """ - Attribute access provided for convenience. The following forms are equivalent:: - - def configure(conf): - conf.env.value = x - env['value'] = x - """ - if name in self.__slots__: - object.__setattr__(self, name, value) - else: - self[name] = value - - def __delattr__(self, name): - """ - Attribute access provided for convenience. The following forms are equivalent:: - - def configure(conf): - del env.value - del env['value'] - """ - if name in self.__slots__: - object.__delattr__(self, name) - else: - del self[name] - - def derive(self): - """ - Returns a new ConfigSet deriving from self. The copy returned - will be a shallow copy:: - - from waflib.ConfigSet import ConfigSet - env = ConfigSet() - env.append_value('CFLAGS', ['-O2']) - child = env.derive() - child.CFLAGS.append('test') # warning! this will modify 'env' - child.CFLAGS = ['-O3'] # new list, ok - child.append_value('CFLAGS', ['-O3']) # ok - - Use :py:func:`ConfigSet.detach` to detach the child from the parent. - """ - newenv = ConfigSet() - newenv.parent = self - return newenv - - def detach(self): - """ - Detaches this instance from its parent (if present) - - Modifying the parent :py:class:`ConfigSet` will not change the current object - Modifying this :py:class:`ConfigSet` will not modify the parent one. - """ - tbl = self.get_merged_dict() - try: - delattr(self, 'parent') - except AttributeError: - pass - else: - keys = tbl.keys() - for x in keys: - tbl[x] = copy.deepcopy(tbl[x]) - self.table = tbl - return self - - def get_flat(self, key): - """ - Returns a value as a string. If the input is a list, the value returned is space-separated. - - :param key: key to use - :type key: string - """ - s = self[key] - if isinstance(s, str): - return s - return ' '.join(s) - - def _get_list_value_for_modification(self, key): - """ - Returns a list value for further modification. - - The list may be modified inplace and there is no need to do this afterwards:: - - self.table[var] = value - """ - try: - value = self.table[key] - except KeyError: - try: - value = self.parent[key] - except AttributeError: - value = [] - else: - if isinstance(value, list): - # force a copy - value = value[:] - else: - value = [value] - self.table[key] = value - else: - if not isinstance(value, list): - self.table[key] = value = [value] - return value - - def append_value(self, var, val): - """ - Appends a value to the specified config key:: - - def build(bld): - bld.env.append_value('CFLAGS', ['-O2']) - - The value must be a list or a tuple - """ - if isinstance(val, str): # if there were string everywhere we could optimize this - val = [val] - current_value = self._get_list_value_for_modification(var) - current_value.extend(val) - - def prepend_value(self, var, val): - """ - Prepends a value to the specified item:: - - def configure(conf): - conf.env.prepend_value('CFLAGS', ['-O2']) - - The value must be a list or a tuple - """ - if isinstance(val, str): - val = [val] - self.table[var] = val + self._get_list_value_for_modification(var) - - def append_unique(self, var, val): - """ - Appends a value to the specified item only if it's not already present:: - - def build(bld): - bld.env.append_unique('CFLAGS', ['-O2', '-g']) - - The value must be a list or a tuple - """ - if isinstance(val, str): - val = [val] - current_value = self._get_list_value_for_modification(var) - - for x in val: - if x not in current_value: - current_value.append(x) - - def get_merged_dict(self): - """ - Computes the merged dictionary from the fusion of self and all its parent - - :rtype: a ConfigSet object - """ - table_list = [] - env = self - while 1: - table_list.insert(0, env.table) - try: - env = env.parent - except AttributeError: - break - merged_table = {} - for table in table_list: - merged_table.update(table) - return merged_table - - def store(self, filename): - """ - Serializes the :py:class:`ConfigSet` data to a file. See :py:meth:`ConfigSet.load` for reading such files. - - :param filename: file to use - :type filename: string - """ - try: - os.makedirs(os.path.split(filename)[0]) - except OSError: - pass - - buf = [] - merged_table = self.get_merged_dict() - keys = list(merged_table.keys()) - keys.sort() - - try: - fun = ascii - except NameError: - fun = repr - - for k in keys: - if k != 'undo_stack': - buf.append('%s = %s\n' % (k, fun(merged_table[k]))) - Utils.writef(filename, ''.join(buf)) - - def load(self, filename): - """ - Restores contents from a file (current values are not cleared). Files are written using :py:meth:`ConfigSet.store`. - - :param filename: file to use - :type filename: string - """ - tbl = self.table - code = Utils.readf(filename, m='r') - for m in re_imp.finditer(code): - g = m.group - tbl[g(2)] = eval(g(3)) - Logs.debug('env: %s', self.table) - - def update(self, d): - """ - Dictionary interface: replace values with the ones from another dict - - :param d: object to use the value from - :type d: dict-like object - """ - self.table.update(d) - - def stash(self): - """ - Stores the object state to provide transactionality semantics:: - - env = ConfigSet() - env.stash() - try: - env.append_value('CFLAGS', '-O3') - call_some_method(env) - finally: - env.revert() - - The history is kept in a stack, and is lost during the serialization by :py:meth:`ConfigSet.store` - """ - orig = self.table - tbl = self.table = self.table.copy() - for x in tbl.keys(): - tbl[x] = copy.deepcopy(tbl[x]) - self.undo_stack = self.undo_stack + [orig] - - def commit(self): - """ - Commits transactional changes. See :py:meth:`ConfigSet.stash` - """ - self.undo_stack.pop(-1) - - def revert(self): - """ - Reverts the object to a previous state. See :py:meth:`ConfigSet.stash` - """ - self.table = self.undo_stack.pop(-1) - diff --git a/waflib/Configure.py b/waflib/Configure.py deleted file mode 100644 index db09c0e..0000000 --- a/waflib/Configure.py +++ /dev/null @@ -1,639 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -""" -Configuration system - -A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``waf configure`` is called, it is used to: - -* create data dictionaries (ConfigSet instances) -* store the list of modules to import -* hold configuration routines such as ``find_program``, etc -""" - -import os, re, shlex, shutil, sys, time, traceback -from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors - -WAF_CONFIG_LOG = 'config.log' -"""Name of the configuration log file""" - -autoconfig = False -"""Execute the configuration automatically""" - -conf_template = '''# project %(app)s configured on %(now)s by -# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s) -# using %(args)s -#''' - -class ConfigurationContext(Context.Context): - '''configures the project''' - - cmd = 'configure' - - error_handlers = [] - """ - Additional functions to handle configuration errors - """ - - def __init__(self, **kw): - super(ConfigurationContext, self).__init__(**kw) - self.environ = dict(os.environ) - self.all_envs = {} - - self.top_dir = None - self.out_dir = None - - self.tools = [] # tools loaded in the configuration, and that will be loaded when building - - self.hash = 0 - self.files = [] - - self.tool_cache = [] - - self.setenv('') - - def setenv(self, name, env=None): - """ - Set a new config set for conf.env. If a config set of that name already exists, - recall it without modification. - - The name is the filename prefix to save to ``c4che/NAME_cache.py``, and it - is also used as *variants* by the build commands. - Though related to variants, whatever kind of data may be stored in the config set:: - - def configure(cfg): - cfg.env.ONE = 1 - cfg.setenv('foo') - cfg.env.ONE = 2 - - def build(bld): - 2 == bld.env_of_name('foo').ONE - - :param name: name of the configuration set - :type name: string - :param env: ConfigSet to copy, or an empty ConfigSet is created - :type env: :py:class:`waflib.ConfigSet.ConfigSet` - """ - if name not in self.all_envs or env: - if not env: - env = ConfigSet.ConfigSet() - self.prepare_env(env) - else: - env = env.derive() - self.all_envs[name] = env - self.variant = name - - def get_env(self): - """Getter for the env property""" - return self.all_envs[self.variant] - def set_env(self, val): - """Setter for the env property""" - self.all_envs[self.variant] = val - - env = property(get_env, set_env) - - def init_dirs(self): - """ - Initialize the project directory and the build directory - """ - - top = self.top_dir - if not top: - top = Options.options.top - if not top: - top = getattr(Context.g_module, Context.TOP, None) - if not top: - top = self.path.abspath() - top = os.path.abspath(top) - - self.srcnode = (os.path.isabs(top) and self.root or self.path).find_dir(top) - assert(self.srcnode) - - out = self.out_dir - if not out: - out = Options.options.out - if not out: - out = getattr(Context.g_module, Context.OUT, None) - if not out: - out = Options.lockfile.replace('.lock-waf_%s_' % sys.platform, '').replace('.lock-waf', '') - - # someone can be messing with symlinks - out = os.path.realpath(out) - - self.bldnode = (os.path.isabs(out) and self.root or self.path).make_node(out) - self.bldnode.mkdir() - - if not os.path.isdir(self.bldnode.abspath()): - self.fatal('Could not create the build directory %s' % self.bldnode.abspath()) - - def execute(self): - """ - See :py:func:`waflib.Context.Context.execute` - """ - self.init_dirs() - - self.cachedir = self.bldnode.make_node(Build.CACHE_DIR) - self.cachedir.mkdir() - - path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG) - self.logger = Logs.make_logger(path, 'cfg') - - app = getattr(Context.g_module, 'APPNAME', '') - if app: - ver = getattr(Context.g_module, 'VERSION', '') - if ver: - app = "%s (%s)" % (app, ver) - - params = {'now': time.ctime(), 'pyver': sys.hexversion, 'systype': sys.platform, 'args': " ".join(sys.argv), 'wafver': Context.WAFVERSION, 'abi': Context.ABI, 'app': app} - self.to_log(conf_template % params) - self.msg('Setting top to', self.srcnode.abspath()) - self.msg('Setting out to', self.bldnode.abspath()) - - if id(self.srcnode) == id(self.bldnode): - Logs.warn('Setting top == out') - elif id(self.path) != id(self.srcnode): - if self.srcnode.is_child_of(self.path): - Logs.warn('Are you certain that you do not want to set top="." ?') - - super(ConfigurationContext, self).execute() - - self.store() - - Context.top_dir = self.srcnode.abspath() - Context.out_dir = self.bldnode.abspath() - - # this will write a configure lock so that subsequent builds will - # consider the current path as the root directory (see prepare_impl). - # to remove: use 'waf distclean' - env = ConfigSet.ConfigSet() - env.argv = sys.argv - env.options = Options.options.__dict__ - env.config_cmd = self.cmd - - env.run_dir = Context.run_dir - env.top_dir = Context.top_dir - env.out_dir = Context.out_dir - - # conf.hash & conf.files hold wscript files paths and hash - # (used only by Configure.autoconfig) - env.hash = self.hash - env.files = self.files - env.environ = dict(self.environ) - env.launch_dir = Context.launch_dir - - if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN') or getattr(Options.options, 'no_lock_in_run')): - env.store(os.path.join(Context.run_dir, Options.lockfile)) - if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP') or getattr(Options.options, 'no_lock_in_top')): - env.store(os.path.join(Context.top_dir, Options.lockfile)) - if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT') or getattr(Options.options, 'no_lock_in_out')): - env.store(os.path.join(Context.out_dir, Options.lockfile)) - - def prepare_env(self, env): - """ - Insert *PREFIX*, *BINDIR* and *LIBDIR* values into ``env`` - - :type env: :py:class:`waflib.ConfigSet.ConfigSet` - :param env: a ConfigSet, usually ``conf.env`` - """ - if not env.PREFIX: - if Options.options.prefix or Utils.is_win32: - env.PREFIX = Options.options.prefix - else: - env.PREFIX = '/' - if not env.BINDIR: - if Options.options.bindir: - env.BINDIR = Options.options.bindir - else: - env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env) - if not env.LIBDIR: - if Options.options.libdir: - env.LIBDIR = Options.options.libdir - else: - env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env) - - def store(self): - """Save the config results into the cache file""" - n = self.cachedir.make_node('build.config.py') - n.write('version = 0x%x\ntools = %r\n' % (Context.HEXVERSION, self.tools)) - - if not self.all_envs: - self.fatal('nothing to store in the configuration context!') - - for key in self.all_envs: - tmpenv = self.all_envs[key] - tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX)) - - def load(self, tool_list, tooldir=None, funs=None, with_sys_path=True, cache=False): - """ - Load Waf tools, which will be imported whenever a build is started. - - :param tool_list: waf tools to import - :type tool_list: list of string - :param tooldir: paths for the imports - :type tooldir: list of string - :param funs: functions to execute from the waf tools - :type funs: list of string - :param cache: whether to prevent the tool from running twice - :type cache: bool - """ - - tools = Utils.to_list(tool_list) - if tooldir: - tooldir = Utils.to_list(tooldir) - for tool in tools: - # avoid loading the same tool more than once with the same functions - # used by composite projects - - if cache: - mag = (tool, id(self.env), tooldir, funs) - if mag in self.tool_cache: - self.to_log('(tool %s is already loaded, skipping)' % tool) - continue - self.tool_cache.append(mag) - - module = None - try: - module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path) - except ImportError as e: - self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, getattr(e, 'waf_sys_path', sys.path), e)) - except Exception as e: - self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs)) - self.to_log(traceback.format_exc()) - raise - - if funs is not None: - self.eval_rules(funs) - else: - func = getattr(module, 'configure', None) - if func: - if type(func) is type(Utils.readf): - func(self) - else: - self.eval_rules(func) - - self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs}) - - def post_recurse(self, node): - """ - Records the path and a hash of the scripts visited, see :py:meth:`waflib.Context.Context.post_recurse` - - :param node: script - :type node: :py:class:`waflib.Node.Node` - """ - super(ConfigurationContext, self).post_recurse(node) - self.hash = Utils.h_list((self.hash, node.read('rb'))) - self.files.append(node.abspath()) - - def eval_rules(self, rules): - """ - Execute configuration tests provided as list of functions to run - - :param rules: list of configuration method names - :type rules: list of string - """ - self.rules = Utils.to_list(rules) - for x in self.rules: - f = getattr(self, x) - if not f: - self.fatal('No such configuration function %r' % x) - f() - -def conf(f): - """ - Decorator: attach new configuration functions to :py:class:`waflib.Build.BuildContext` and - :py:class:`waflib.Configure.ConfigurationContext`. The methods bound will accept a parameter - named 'mandatory' to disable the configuration errors:: - - def configure(conf): - conf.find_program('abc', mandatory=False) - - :param f: method to bind - :type f: function - """ - def fun(*k, **kw): - mandatory = kw.pop('mandatory', True) - try: - return f(*k, **kw) - except Errors.ConfigurationError: - if mandatory: - raise - - fun.__name__ = f.__name__ - setattr(ConfigurationContext, f.__name__, fun) - setattr(Build.BuildContext, f.__name__, fun) - return f - -@conf -def add_os_flags(self, var, dest=None, dup=False): - """ - Import operating system environment values into ``conf.env`` dict:: - - def configure(conf): - conf.add_os_flags('CFLAGS') - - :param var: variable to use - :type var: string - :param dest: destination variable, by default the same as var - :type dest: string - :param dup: add the same set of flags again - :type dup: bool - """ - try: - flags = shlex.split(self.environ[var]) - except KeyError: - return - if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])): - self.env.append_value(dest or var, flags) - -@conf -def cmd_to_list(self, cmd): - """ - Detect if a command is written in pseudo shell like ``ccache g++`` and return a list. - - :param cmd: command - :type cmd: a string or a list of string - """ - if isinstance(cmd, str): - if os.path.isfile(cmd): - # do not take any risk - return [cmd] - if os.sep == '/': - return shlex.split(cmd) - else: - try: - return shlex.split(cmd, posix=False) - except TypeError: - # Python 2.5 on windows? - return shlex.split(cmd) - return cmd - -@conf -def check_waf_version(self, mini='1.9.99', maxi='2.1.0', **kw): - """ - Raise a Configuration error if the Waf version does not strictly match the given bounds:: - - conf.check_waf_version(mini='1.9.99', maxi='2.1.0') - - :type mini: number, tuple or string - :param mini: Minimum required version - :type maxi: number, tuple or string - :param maxi: Maximum allowed version - """ - self.start_msg('Checking for waf version in %s-%s' % (str(mini), str(maxi)), **kw) - ver = Context.HEXVERSION - if Utils.num2ver(mini) > ver: - self.fatal('waf version should be at least %r (%r found)' % (Utils.num2ver(mini), ver)) - if Utils.num2ver(maxi) < ver: - self.fatal('waf version should be at most %r (%r found)' % (Utils.num2ver(maxi), ver)) - self.end_msg('ok', **kw) - -@conf -def find_file(self, filename, path_list=[]): - """ - Find a file in a list of paths - - :param filename: name of the file to search for - :param path_list: list of directories to search - :return: the first matching filename; else a configuration exception is raised - """ - for n in Utils.to_list(filename): - for d in Utils.to_list(path_list): - p = os.path.expanduser(os.path.join(d, n)) - if os.path.exists(p): - return p - self.fatal('Could not find %r' % filename) - -@conf -def find_program(self, filename, **kw): - """ - Search for a program on the operating system - - When var is used, you may set os.environ[var] to help find a specific program version, for example:: - - $ CC='ccache gcc' waf configure - - :param path_list: paths to use for searching - :type param_list: list of string - :param var: store the result to conf.env[var] where var defaults to filename.upper() if not provided; the result is stored as a list of strings - :type var: string - :param value: obtain the program from the value passed exclusively - :type value: list or string (list is preferred) - :param exts: list of extensions for the binary (do not add an extension for portability) - :type exts: list of string - :param msg: name to display in the log, by default filename is used - :type msg: string - :param interpreter: interpreter for the program - :type interpreter: ConfigSet variable key - :raises: :py:class:`waflib.Errors.ConfigurationError` - """ - - exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py') - - environ = kw.get('environ', getattr(self, 'environ', os.environ)) - - ret = '' - - filename = Utils.to_list(filename) - msg = kw.get('msg', ', '.join(filename)) - - var = kw.get('var', '') - if not var: - var = re.sub(r'[-.]', '_', filename[0].upper()) - - path_list = kw.get('path_list', '') - if path_list: - path_list = Utils.to_list(path_list) - else: - path_list = environ.get('PATH', '').split(os.pathsep) - - if kw.get('value'): - # user-provided in command-line options and passed to find_program - ret = self.cmd_to_list(kw['value']) - elif environ.get(var): - # user-provided in the os environment - ret = self.cmd_to_list(environ[var]) - elif self.env[var]: - # a default option in the wscript file - ret = self.cmd_to_list(self.env[var]) - else: - if not ret: - ret = self.find_binary(filename, exts.split(','), path_list) - if not ret and Utils.winreg: - ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename) - if not ret and Utils.winreg: - ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename) - ret = self.cmd_to_list(ret) - - if ret: - if len(ret) == 1: - retmsg = ret[0] - else: - retmsg = ret - else: - retmsg = False - - self.msg('Checking for program %r' % msg, retmsg, **kw) - if not kw.get('quiet'): - self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret)) - - if not ret: - self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename) - - interpreter = kw.get('interpreter') - if interpreter is None: - if not Utils.check_exe(ret[0], env=environ): - self.fatal('Program %r is not executable' % ret) - self.env[var] = ret - else: - self.env[var] = self.env[interpreter] + ret - - return ret - -@conf -def find_binary(self, filenames, exts, paths): - for f in filenames: - for ext in exts: - exe_name = f + ext - if os.path.isabs(exe_name): - if os.path.isfile(exe_name): - return exe_name - else: - for path in paths: - x = os.path.expanduser(os.path.join(path, exe_name)) - if os.path.isfile(x): - return x - return None - -@conf -def run_build(self, *k, **kw): - """ - Create a temporary build context to execute a build. A reference to that build - context is kept on self.test_bld for debugging purposes, and you should not rely - on it too much (read the note on the cache below). - The parameters given in the arguments to this function are passed as arguments for - a single task generator created in the build. Only three parameters are obligatory: - - :param features: features to pass to a task generator created in the build - :type features: list of string - :param compile_filename: file to create for the compilation (default: *test.c*) - :type compile_filename: string - :param code: code to write in the filename to compile - :type code: string - - Though this function returns *0* by default, the build may set an attribute named *retval* on the - build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example. - - This function also provides a limited cache. To use it, provide the following option:: - - def options(opt): - opt.add_option('--confcache', dest='confcache', default=0, - action='count', help='Use a configuration cache') - - And execute the configuration with the following command-line:: - - $ waf configure --confcache - - """ - lst = [str(v) for (p, v) in kw.items() if p != 'env'] - h = Utils.h_list(lst) - dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h) - - try: - os.makedirs(dir) - except OSError: - pass - - try: - os.stat(dir) - except OSError: - self.fatal('cannot use the configuration test folder %r' % dir) - - cachemode = getattr(Options.options, 'confcache', None) - if cachemode == 1: - try: - proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build')) - except EnvironmentError: - pass - else: - ret = proj['cache_run_build'] - if isinstance(ret, str) and ret.startswith('Test does not build'): - self.fatal(ret) - return ret - - bdir = os.path.join(dir, 'testbuild') - - if not os.path.exists(bdir): - os.makedirs(bdir) - - cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build') - self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir) - bld.init_dirs() - bld.progress_bar = 0 - bld.targets = '*' - - bld.logger = self.logger - bld.all_envs.update(self.all_envs) # not really necessary - bld.env = kw['env'] - - bld.kw = kw - bld.conf = self - kw['build_fun'](bld) - ret = -1 - try: - try: - bld.compile() - except Errors.WafError: - ret = 'Test does not build: %s' % traceback.format_exc() - self.fatal(ret) - else: - ret = getattr(bld, 'retval', 0) - finally: - if cachemode == 1: - # cache the results each time - proj = ConfigSet.ConfigSet() - proj['cache_run_build'] = ret - proj.store(os.path.join(dir, 'cache_run_build')) - else: - shutil.rmtree(dir) - return ret - -@conf -def ret_msg(self, msg, args): - if isinstance(msg, str): - return msg - return msg(args) - -@conf -def test(self, *k, **kw): - - if not 'env' in kw: - kw['env'] = self.env.derive() - - # validate_c for example - if kw.get('validate'): - kw['validate'](kw) - - self.start_msg(kw['msg'], **kw) - ret = None - try: - ret = self.run_build(*k, **kw) - except self.errors.ConfigurationError: - self.end_msg(kw['errmsg'], 'YELLOW', **kw) - if Logs.verbose > 1: - raise - else: - self.fatal('The configuration failed') - else: - kw['success'] = ret - - if kw.get('post_check'): - ret = kw['post_check'](kw) - - if ret: - self.end_msg(kw['errmsg'], 'YELLOW', **kw) - self.fatal('The configuration failed %r' % ret) - else: - self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) - return ret - diff --git a/waflib/Context.py b/waflib/Context.py deleted file mode 100644 index 876ea46..0000000 --- a/waflib/Context.py +++ /dev/null @@ -1,737 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2010-2018 (ita) - -""" -Classes and functions enabling the command system -""" - -import os, re, imp, sys -from waflib import Utils, Errors, Logs -import waflib.Node - -# the following 3 constants are updated on each new release (do not touch) -HEXVERSION=0x2000f00 -"""Constant updated on new releases""" - -WAFVERSION="2.0.15" -"""Constant updated on new releases""" - -WAFREVISION="503db290b73ef738a495e0d116d6f8ee0b98dcc2" -"""Git revision when the waf version is updated""" - -ABI = 20 -"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)""" - -DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI) -"""Name of the pickle file for storing the build data""" - -APPNAME = 'APPNAME' -"""Default application name (used by ``waf dist``)""" - -VERSION = 'VERSION' -"""Default application version (used by ``waf dist``)""" - -TOP = 'top' -"""The variable name for the top-level directory in wscript files""" - -OUT = 'out' -"""The variable name for the output directory in wscript files""" - -WSCRIPT_FILE = 'wscript' -"""Name of the waf script files""" - -launch_dir = '' -"""Directory from which waf has been called""" -run_dir = '' -"""Location of the wscript file to use as the entry point""" -top_dir = '' -"""Location of the project directory (top), if the project was configured""" -out_dir = '' -"""Location of the build directory (out), if the project was configured""" -waf_dir = '' -"""Directory containing the waf modules""" - -default_encoding = Utils.console_encoding() -"""Encoding to use when reading outputs from other processes""" - -g_module = None -""" -Module representing the top-level wscript file (see :py:const:`waflib.Context.run_dir`) -""" - -STDOUT = 1 -STDERR = -1 -BOTH = 0 - -classes = [] -""" -List of :py:class:`waflib.Context.Context` subclasses that can be used as waf commands. The classes -are added automatically by a metaclass. -""" - -def create_context(cmd_name, *k, **kw): - """ - Returns a new :py:class:`waflib.Context.Context` instance corresponding to the given command. - Used in particular by :py:func:`waflib.Scripting.run_command` - - :param cmd_name: command name - :type cmd_name: string - :param k: arguments to give to the context class initializer - :type k: list - :param k: keyword arguments to give to the context class initializer - :type k: dict - :return: Context object - :rtype: :py:class:`waflib.Context.Context` - """ - for x in classes: - if x.cmd == cmd_name: - return x(*k, **kw) - ctx = Context(*k, **kw) - ctx.fun = cmd_name - return ctx - -class store_context(type): - """ - Metaclass that registers command classes into the list :py:const:`waflib.Context.classes` - Context classes must provide an attribute 'cmd' representing the command name, and a function - attribute 'fun' representing the function name that the command uses. - """ - def __init__(cls, name, bases, dct): - super(store_context, cls).__init__(name, bases, dct) - name = cls.__name__ - - if name in ('ctx', 'Context'): - return - - try: - cls.cmd - except AttributeError: - raise Errors.WafError('Missing command for the context class %r (cmd)' % name) - - if not getattr(cls, 'fun', None): - cls.fun = cls.cmd - - classes.insert(0, cls) - -ctx = store_context('ctx', (object,), {}) -"""Base class for all :py:class:`waflib.Context.Context` classes""" - -class Context(ctx): - """ - Default context for waf commands, and base class for new command contexts. - - Context objects are passed to top-level functions:: - - def foo(ctx): - print(ctx.__class__.__name__) # waflib.Context.Context - - Subclasses must define the class attributes 'cmd' and 'fun': - - :param cmd: command to execute as in ``waf cmd`` - :type cmd: string - :param fun: function name to execute when the command is called - :type fun: string - - .. inheritance-diagram:: waflib.Context.Context waflib.Build.BuildContext waflib.Build.InstallContext waflib.Build.UninstallContext waflib.Build.StepContext waflib.Build.ListContext waflib.Configure.ConfigurationContext waflib.Scripting.Dist waflib.Scripting.DistCheck waflib.Build.CleanContext - - """ - - errors = Errors - """ - Shortcut to :py:mod:`waflib.Errors` provided for convenience - """ - - tools = {} - """ - A module cache for wscript files; see :py:meth:`Context.Context.load` - """ - - def __init__(self, **kw): - try: - rd = kw['run_dir'] - except KeyError: - rd = run_dir - - # binds the context to the nodes in use to avoid a context singleton - self.node_class = type('Nod3', (waflib.Node.Node,), {}) - self.node_class.__module__ = 'waflib.Node' - self.node_class.ctx = self - - self.root = self.node_class('', None) - self.cur_script = None - self.path = self.root.find_dir(rd) - - self.stack_path = [] - self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self} - self.logger = None - - def finalize(self): - """ - Called to free resources such as logger files - """ - try: - logger = self.logger - except AttributeError: - pass - else: - Logs.free_logger(logger) - delattr(self, 'logger') - - def load(self, tool_list, *k, **kw): - """ - Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun` - from it. A ``tooldir`` argument may be provided as a list of module paths. - - :param tool_list: list of Waf tool names to load - :type tool_list: list of string or space-separated string - """ - tools = Utils.to_list(tool_list) - path = Utils.to_list(kw.get('tooldir', '')) - with_sys_path = kw.get('with_sys_path', True) - - for t in tools: - module = load_tool(t, path, with_sys_path=with_sys_path) - fun = getattr(module, kw.get('name', self.fun), None) - if fun: - fun(self) - - def execute(self): - """ - Here, it calls the function name in the top-level wscript file. Most subclasses - redefine this method to provide additional functionality. - """ - self.recurse([os.path.dirname(g_module.root_path)]) - - def pre_recurse(self, node): - """ - Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`. - The current script is bound as a Node object on ``self.cur_script``, and the current path - is bound to ``self.path`` - - :param node: script - :type node: :py:class:`waflib.Node.Node` - """ - self.stack_path.append(self.cur_script) - - self.cur_script = node - self.path = node.parent - - def post_recurse(self, node): - """ - Restores ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates. - - :param node: script - :type node: :py:class:`waflib.Node.Node` - """ - self.cur_script = self.stack_path.pop() - if self.cur_script: - self.path = self.cur_script.parent - - def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None): - """ - Runs user-provided functions from the supplied list of directories. - The directories can be either absolute, or relative to the directory - of the wscript file - - The methods :py:meth:`waflib.Context.Context.pre_recurse` and - :py:meth:`waflib.Context.Context.post_recurse` are called immediately before - and after a script has been executed. - - :param dirs: List of directories to visit - :type dirs: list of string or space-separated string - :param name: Name of function to invoke from the wscript - :type name: string - :param mandatory: whether sub wscript files are required to exist - :type mandatory: bool - :param once: read the script file once for a particular context - :type once: bool - """ - try: - cache = self.recurse_cache - except AttributeError: - cache = self.recurse_cache = {} - - for d in Utils.to_list(dirs): - - if not os.path.isabs(d): - # absolute paths only - d = os.path.join(self.path.abspath(), d) - - WSCRIPT = os.path.join(d, WSCRIPT_FILE) - WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun) - - node = self.root.find_node(WSCRIPT_FUN) - if node and (not once or node not in cache): - cache[node] = True - self.pre_recurse(node) - try: - function_code = node.read('r', encoding) - exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict) - finally: - self.post_recurse(node) - elif not node: - node = self.root.find_node(WSCRIPT) - tup = (node, name or self.fun) - if node and (not once or tup not in cache): - cache[tup] = True - self.pre_recurse(node) - try: - wscript_module = load_module(node.abspath(), encoding=encoding) - user_function = getattr(wscript_module, (name or self.fun), None) - if not user_function: - if not mandatory: - continue - raise Errors.WafError('No function %r defined in %s' % (name or self.fun, node.abspath())) - user_function(self) - finally: - self.post_recurse(node) - elif not node: - if not mandatory: - continue - try: - os.listdir(d) - except OSError: - raise Errors.WafError('Cannot read the folder %r' % d) - raise Errors.WafError('No wscript file in directory %s' % d) - - def log_command(self, cmd, kw): - if Logs.verbose: - fmt = os.environ.get('WAF_CMD_FORMAT') - if fmt == 'string': - if not isinstance(cmd, str): - cmd = Utils.shell_escape(cmd) - Logs.debug('runner: %r', cmd) - Logs.debug('runner_env: kw=%s', kw) - - def exec_command(self, cmd, **kw): - """ - Runs an external process and returns the exit status:: - - def run(tsk): - ret = tsk.generator.bld.exec_command('touch foo.txt') - return ret - - If the context has the attribute 'log', then captures and logs the process stderr/stdout. - Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the - stdout/stderr values captured. - - :param cmd: command argument for subprocess.Popen - :type cmd: string or list - :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. - :type kw: dict - :returns: process exit status - :rtype: integer - :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process - :raises: :py:class:`waflib.Errors.WafError` in case of execution failure - """ - subprocess = Utils.subprocess - kw['shell'] = isinstance(cmd, str) - self.log_command(cmd, kw) - - if self.logger: - self.logger.info(cmd) - - if 'stdout' not in kw: - kw['stdout'] = subprocess.PIPE - if 'stderr' not in kw: - kw['stderr'] = subprocess.PIPE - - if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): - raise Errors.WafError('Program %s not found!' % cmd[0]) - - cargs = {} - if 'timeout' in kw: - if sys.hexversion >= 0x3030000: - cargs['timeout'] = kw['timeout'] - if not 'start_new_session' in kw: - kw['start_new_session'] = True - del kw['timeout'] - if 'input' in kw: - if kw['input']: - cargs['input'] = kw['input'] - kw['stdin'] = subprocess.PIPE - del kw['input'] - - if 'cwd' in kw: - if not isinstance(kw['cwd'], str): - kw['cwd'] = kw['cwd'].abspath() - - encoding = kw.pop('decode_as', default_encoding) - - try: - ret, out, err = Utils.run_process(cmd, kw, cargs) - except Exception as e: - raise Errors.WafError('Execution failure: %s' % str(e), ex=e) - - if out: - if not isinstance(out, str): - out = out.decode(encoding, errors='replace') - if self.logger: - self.logger.debug('out: %s', out) - else: - Logs.info(out, extra={'stream':sys.stdout, 'c1': ''}) - if err: - if not isinstance(err, str): - err = err.decode(encoding, errors='replace') - if self.logger: - self.logger.error('err: %s' % err) - else: - Logs.info(err, extra={'stream':sys.stderr, 'c1': ''}) - - return ret - - def cmd_and_log(self, cmd, **kw): - """ - Executes a process and returns stdout/stderr if the execution is successful. - An exception is thrown when the exit status is non-0. In that case, both stderr and stdout - will be bound to the WafError object (configuration tests):: - - def configure(conf): - out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH) - (out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH) - (out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT) - try: - conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH) - except Errors.WafError as e: - print(e.stdout, e.stderr) - - :param cmd: args for subprocess.Popen - :type cmd: list or string - :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate. - :type kw: dict - :returns: a tuple containing the contents of stdout and stderr - :rtype: string - :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process - :raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object - """ - subprocess = Utils.subprocess - kw['shell'] = isinstance(cmd, str) - self.log_command(cmd, kw) - - quiet = kw.pop('quiet', None) - to_ret = kw.pop('output', STDOUT) - - if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]): - raise Errors.WafError('Program %r not found!' % cmd[0]) - - kw['stdout'] = kw['stderr'] = subprocess.PIPE - if quiet is None: - self.to_log(cmd) - - cargs = {} - if 'timeout' in kw: - if sys.hexversion >= 0x3030000: - cargs['timeout'] = kw['timeout'] - if not 'start_new_session' in kw: - kw['start_new_session'] = True - del kw['timeout'] - if 'input' in kw: - if kw['input']: - cargs['input'] = kw['input'] - kw['stdin'] = subprocess.PIPE - del kw['input'] - - if 'cwd' in kw: - if not isinstance(kw['cwd'], str): - kw['cwd'] = kw['cwd'].abspath() - - encoding = kw.pop('decode_as', default_encoding) - - try: - ret, out, err = Utils.run_process(cmd, kw, cargs) - except Exception as e: - raise Errors.WafError('Execution failure: %s' % str(e), ex=e) - - if not isinstance(out, str): - out = out.decode(encoding, errors='replace') - if not isinstance(err, str): - err = err.decode(encoding, errors='replace') - - if out and quiet != STDOUT and quiet != BOTH: - self.to_log('out: %s' % out) - if err and quiet != STDERR and quiet != BOTH: - self.to_log('err: %s' % err) - - if ret: - e = Errors.WafError('Command %r returned %r' % (cmd, ret)) - e.returncode = ret - e.stderr = err - e.stdout = out - raise e - - if to_ret == BOTH: - return (out, err) - elif to_ret == STDERR: - return err - return out - - def fatal(self, msg, ex=None): - """ - Prints an error message in red and stops command execution; this is - usually used in the configuration section:: - - def configure(conf): - conf.fatal('a requirement is missing') - - :param msg: message to display - :type msg: string - :param ex: optional exception object - :type ex: exception - :raises: :py:class:`waflib.Errors.ConfigurationError` - """ - if self.logger: - self.logger.info('from %s: %s' % (self.path.abspath(), msg)) - try: - logfile = self.logger.handlers[0].baseFilename - except AttributeError: - pass - else: - if os.environ.get('WAF_PRINT_FAILURE_LOG'): - # see #1930 - msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile)) - else: - msg = '%s\n(complete log in %s)' % (msg, logfile) - raise self.errors.ConfigurationError(msg, ex=ex) - - def to_log(self, msg): - """ - Logs information to the logger (if present), or to stderr. - Empty messages are not printed:: - - def build(bld): - bld.to_log('starting the build') - - Provide a logger on the context class or override this method if necessary. - - :param msg: message - :type msg: string - """ - if not msg: - return - if self.logger: - self.logger.info(msg) - else: - sys.stderr.write(str(msg)) - sys.stderr.flush() - - - def msg(self, *k, **kw): - """ - Prints a configuration message of the form ``msg: result``. - The second part of the message will be in colors. The output - can be disabled easly by setting ``in_msg`` to a positive value:: - - def configure(conf): - self.in_msg = 1 - conf.msg('Checking for library foo', 'ok') - # no output - - :param msg: message to display to the user - :type msg: string - :param result: result to display - :type result: string or boolean - :param color: color to use, see :py:const:`waflib.Logs.colors_lst` - :type color: string - """ - try: - msg = kw['msg'] - except KeyError: - msg = k[0] - - self.start_msg(msg, **kw) - - try: - result = kw['result'] - except KeyError: - result = k[1] - - color = kw.get('color') - if not isinstance(color, str): - color = result and 'GREEN' or 'YELLOW' - - self.end_msg(result, color, **kw) - - def start_msg(self, *k, **kw): - """ - Prints the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg` - """ - if kw.get('quiet'): - return - - msg = kw.get('msg') or k[0] - try: - if self.in_msg: - self.in_msg += 1 - return - except AttributeError: - self.in_msg = 0 - self.in_msg += 1 - - try: - self.line_just = max(self.line_just, len(msg)) - except AttributeError: - self.line_just = max(40, len(msg)) - for x in (self.line_just * '-', msg): - self.to_log(x) - Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='') - - def end_msg(self, *k, **kw): - """Prints the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`""" - if kw.get('quiet'): - return - self.in_msg -= 1 - if self.in_msg: - return - - result = kw.get('result') or k[0] - - defcolor = 'GREEN' - if result is True: - msg = 'ok' - elif not result: - msg = 'not found' - defcolor = 'YELLOW' - else: - msg = str(result) - - self.to_log(msg) - try: - color = kw['color'] - except KeyError: - if len(k) > 1 and k[1] in Logs.colors_lst: - # compatibility waf 1.7 - color = k[1] - else: - color = defcolor - Logs.pprint(color, msg) - - def load_special_tools(self, var, ban=[]): - """ - Loads third-party extensions modules for certain programming languages - by trying to list certain files in the extras/ directory. This method - is typically called once for a programming language group, see for - example :py:mod:`waflib.Tools.compiler_c` - - :param var: glob expression, for example 'cxx\\_\\*.py' - :type var: string - :param ban: list of exact file names to exclude - :type ban: list of string - """ - if os.path.isdir(waf_dir): - lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) - for x in lst: - if not x.name in ban: - load_tool(x.name.replace('.py', '')) - else: - from zipfile import PyZipFile - waflibs = PyZipFile(waf_dir) - lst = waflibs.namelist() - for x in lst: - if not re.match('waflib/extras/%s' % var.replace('*', '.*'), var): - continue - f = os.path.basename(x) - doban = False - for b in ban: - r = b.replace('*', '.*') - if re.match(r, f): - doban = True - if not doban: - f = f.replace('.py', '') - load_tool(f) - -cache_modules = {} -""" -Dictionary holding already loaded modules (wscript), indexed by their absolute path. -The modules are added automatically by :py:func:`waflib.Context.load_module` -""" - -def load_module(path, encoding=None): - """ - Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules` - - :param path: file path - :type path: string - :return: Loaded Python module - :rtype: module - """ - try: - return cache_modules[path] - except KeyError: - pass - - module = imp.new_module(WSCRIPT_FILE) - try: - code = Utils.readf(path, m='r', encoding=encoding) - except EnvironmentError: - raise Errors.WafError('Could not read the file %r' % path) - - module_dir = os.path.dirname(path) - sys.path.insert(0, module_dir) - try: - exec(compile(code, path, 'exec'), module.__dict__) - finally: - sys.path.remove(module_dir) - - cache_modules[path] = module - return module - -def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True): - """ - Imports a Waf tool as a python module, and stores it in the dict :py:const:`waflib.Context.Context.tools` - - :type tool: string - :param tool: Name of the tool - :type tooldir: list - :param tooldir: List of directories to search for the tool module - :type with_sys_path: boolean - :param with_sys_path: whether or not to search the regular sys.path, besides waf_dir and potentially given tooldirs - """ - if tool == 'java': - tool = 'javaw' # jython - else: - tool = tool.replace('++', 'xx') - - if not with_sys_path: - back_path = sys.path - sys.path = [] - try: - if tooldir: - assert isinstance(tooldir, list) - sys.path = tooldir + sys.path - try: - __import__(tool) - except ImportError as e: - e.waf_sys_path = list(sys.path) - raise - finally: - for d in tooldir: - sys.path.remove(d) - ret = sys.modules[tool] - Context.tools[tool] = ret - return ret - else: - if not with_sys_path: - sys.path.insert(0, waf_dir) - try: - for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'): - try: - __import__(x % tool) - break - except ImportError: - x = None - else: # raise an exception - __import__(tool) - except ImportError as e: - e.waf_sys_path = list(sys.path) - raise - finally: - if not with_sys_path: - sys.path.remove(waf_dir) - ret = sys.modules[x % tool] - Context.tools[tool] = ret - return ret - finally: - if not with_sys_path: - sys.path += back_path - diff --git a/waflib/Errors.py b/waflib/Errors.py deleted file mode 100644 index bf75c1b..0000000 --- a/waflib/Errors.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2010-2018 (ita) - -""" -Exceptions used in the Waf code -""" - -import traceback, sys - -class WafError(Exception): - """Base class for all Waf errors""" - def __init__(self, msg='', ex=None): - """ - :param msg: error message - :type msg: string - :param ex: exception causing this error (optional) - :type ex: exception - """ - Exception.__init__(self) - self.msg = msg - assert not isinstance(msg, Exception) - - self.stack = [] - if ex: - if not msg: - self.msg = str(ex) - if isinstance(ex, WafError): - self.stack = ex.stack - else: - self.stack = traceback.extract_tb(sys.exc_info()[2]) - self.stack += traceback.extract_stack()[:-1] - self.verbose_msg = ''.join(traceback.format_list(self.stack)) - - def __str__(self): - return str(self.msg) - -class BuildError(WafError): - """Error raised during the build and install phases""" - def __init__(self, error_tasks=[]): - """ - :param error_tasks: tasks that could not complete normally - :type error_tasks: list of task objects - """ - self.tasks = error_tasks - WafError.__init__(self, self.format_error()) - - def format_error(self): - """Formats the error messages from the tasks that failed""" - lst = ['Build failed'] - for tsk in self.tasks: - txt = tsk.format_error() - if txt: - lst.append(txt) - return '\n'.join(lst) - -class ConfigurationError(WafError): - """Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`""" - pass - -class TaskRescan(WafError): - """Task-specific exception type signalling required signature recalculations""" - pass - -class TaskNotReady(WafError): - """Task-specific exception type signalling that task signatures cannot be computed""" - pass - diff --git a/waflib/Logs.py b/waflib/Logs.py deleted file mode 100644 index 11dc34f..0000000 --- a/waflib/Logs.py +++ /dev/null @@ -1,379 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -""" -logging, colors, terminal width and pretty-print -""" - -import os, re, traceback, sys -from waflib import Utils, ansiterm - -if not os.environ.get('NOSYNC', False): - # synchronized output is nearly mandatory to prevent garbled output - if sys.stdout.isatty() and id(sys.stdout) == id(sys.__stdout__): - sys.stdout = ansiterm.AnsiTerm(sys.stdout) - if sys.stderr.isatty() and id(sys.stderr) == id(sys.__stderr__): - sys.stderr = ansiterm.AnsiTerm(sys.stderr) - -# import the logging module after since it holds a reference on sys.stderr -# in case someone uses the root logger -import logging - -LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s') -HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S') - -zones = [] -""" -See :py:class:`waflib.Logs.log_filter` -""" - -verbose = 0 -""" -Global verbosity level, see :py:func:`waflib.Logs.debug` and :py:func:`waflib.Logs.error` -""" - -colors_lst = { -'USE' : True, -'BOLD' :'\x1b[01;1m', -'RED' :'\x1b[01;31m', -'GREEN' :'\x1b[32m', -'YELLOW':'\x1b[33m', -'PINK' :'\x1b[35m', -'BLUE' :'\x1b[01;34m', -'CYAN' :'\x1b[36m', -'GREY' :'\x1b[37m', -'NORMAL':'\x1b[0m', -'cursor_on' :'\x1b[?25h', -'cursor_off' :'\x1b[?25l', -} - -indicator = '\r\x1b[K%s%s%s' - -try: - unicode -except NameError: - unicode = None - -def enable_colors(use): - """ - If *1* is given, then the system will perform a few verifications - before enabling colors, such as checking whether the interpreter - is running in a terminal. A value of zero will disable colors, - and a value above *1* will force colors. - - :param use: whether to enable colors or not - :type use: integer - """ - if use == 1: - if not (sys.stderr.isatty() or sys.stdout.isatty()): - use = 0 - if Utils.is_win32 and os.name != 'java': - term = os.environ.get('TERM', '') # has ansiterm - else: - term = os.environ.get('TERM', 'dumb') - - if term in ('dumb', 'emacs'): - use = 0 - - if use >= 1: - os.environ['TERM'] = 'vt100' - - colors_lst['USE'] = use - -# If console packages are available, replace the dummy function with a real -# implementation -try: - get_term_cols = ansiterm.get_term_cols -except AttributeError: - def get_term_cols(): - return 80 - -get_term_cols.__doc__ = """ - Returns the console width in characters. - - :return: the number of characters per line - :rtype: int - """ - -def get_color(cl): - """ - Returns the ansi sequence corresponding to the given color name. - An empty string is returned when coloring is globally disabled. - - :param cl: color name in capital letters - :type cl: string - """ - if colors_lst['USE']: - return colors_lst.get(cl, '') - return '' - -class color_dict(object): - """attribute-based color access, eg: colors.PINK""" - def __getattr__(self, a): - return get_color(a) - def __call__(self, a): - return get_color(a) - -colors = color_dict() - -re_log = re.compile(r'(\w+): (.*)', re.M) -class log_filter(logging.Filter): - """ - Waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'. - For example, the following:: - - from waflib import Logs - Logs.debug('test: here is a message') - - Will be displayed only when executing:: - - $ waf --zones=test - """ - def __init__(self, name=''): - logging.Filter.__init__(self, name) - - def filter(self, rec): - """ - Filters log records by zone and by logging level - - :param rec: log entry - """ - rec.zone = rec.module - if rec.levelno >= logging.INFO: - return True - - m = re_log.match(rec.msg) - if m: - rec.zone = m.group(1) - rec.msg = m.group(2) - - if zones: - return getattr(rec, 'zone', '') in zones or '*' in zones - elif not verbose > 2: - return False - return True - -class log_handler(logging.StreamHandler): - """Dispatches messages to stderr/stdout depending on the severity level""" - def emit(self, record): - """ - Delegates the functionality to :py:meth:`waflib.Log.log_handler.emit_override` - """ - # default implementation - try: - try: - self.stream = record.stream - except AttributeError: - if record.levelno >= logging.WARNING: - record.stream = self.stream = sys.stderr - else: - record.stream = self.stream = sys.stdout - self.emit_override(record) - self.flush() - except (KeyboardInterrupt, SystemExit): - raise - except: # from the python library -_- - self.handleError(record) - - def emit_override(self, record, **kw): - """ - Writes the log record to the desired stream (stderr/stdout) - """ - self.terminator = getattr(record, 'terminator', '\n') - stream = self.stream - if unicode: - # python2 - msg = self.formatter.format(record) - fs = '%s' + self.terminator - try: - if (isinstance(msg, unicode) and getattr(stream, 'encoding', None)): - fs = fs.decode(stream.encoding) - try: - stream.write(fs % msg) - except UnicodeEncodeError: - stream.write((fs % msg).encode(stream.encoding)) - else: - stream.write(fs % msg) - except UnicodeError: - stream.write((fs % msg).encode('utf-8')) - else: - logging.StreamHandler.emit(self, record) - -class formatter(logging.Formatter): - """Simple log formatter which handles colors""" - def __init__(self): - logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT) - - def format(self, rec): - """ - Formats records and adds colors as needed. The records do not get - a leading hour format if the logging level is above *INFO*. - """ - try: - msg = rec.msg.decode('utf-8') - except Exception: - msg = rec.msg - - use = colors_lst['USE'] - if (use == 1 and rec.stream.isatty()) or use == 2: - - c1 = getattr(rec, 'c1', None) - if c1 is None: - c1 = '' - if rec.levelno >= logging.ERROR: - c1 = colors.RED - elif rec.levelno >= logging.WARNING: - c1 = colors.YELLOW - elif rec.levelno >= logging.INFO: - c1 = colors.GREEN - c2 = getattr(rec, 'c2', colors.NORMAL) - msg = '%s%s%s' % (c1, msg, c2) - else: - # remove single \r that make long lines in text files - # and other terminal commands - msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg) - - if rec.levelno >= logging.INFO: - # the goal of this is to format without the leading "Logs, hour" prefix - if rec.args: - return msg % rec.args - return msg - - rec.msg = msg - rec.c1 = colors.PINK - rec.c2 = colors.NORMAL - return logging.Formatter.format(self, rec) - -log = None -"""global logger for Logs.debug, Logs.error, etc""" - -def debug(*k, **kw): - """ - Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` ≤ 0 - """ - if verbose: - k = list(k) - k[0] = k[0].replace('\n', ' ') - log.debug(*k, **kw) - -def error(*k, **kw): - """ - Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` ≥ 2 - """ - log.error(*k, **kw) - if verbose > 2: - st = traceback.extract_stack() - if st: - st = st[:-1] - buf = [] - for filename, lineno, name, line in st: - buf.append(' File %r, line %d, in %s' % (filename, lineno, name)) - if line: - buf.append(' %s' % line.strip()) - if buf: - log.error('\n'.join(buf)) - -def warn(*k, **kw): - """ - Wraps logging.warning - """ - log.warning(*k, **kw) - -def info(*k, **kw): - """ - Wraps logging.info - """ - log.info(*k, **kw) - -def init_log(): - """ - Initializes the logger :py:attr:`waflib.Logs.log` - """ - global log - log = logging.getLogger('waflib') - log.handlers = [] - log.filters = [] - hdlr = log_handler() - hdlr.setFormatter(formatter()) - log.addHandler(hdlr) - log.addFilter(log_filter()) - log.setLevel(logging.DEBUG) - -def make_logger(path, name): - """ - Creates a simple logger, which is often used to redirect the context command output:: - - from waflib import Logs - bld.logger = Logs.make_logger('test.log', 'build') - bld.check(header_name='sadlib.h', features='cxx cprogram', mandatory=False) - - # have the file closed immediately - Logs.free_logger(bld.logger) - - # stop logging - bld.logger = None - - The method finalize() of the command will try to free the logger, if any - - :param path: file name to write the log output to - :type path: string - :param name: logger name (loggers are reused) - :type name: string - """ - logger = logging.getLogger(name) - if sys.hexversion > 0x3000000: - encoding = sys.stdout.encoding - else: - encoding = None - hdlr = logging.FileHandler(path, 'w', encoding=encoding) - formatter = logging.Formatter('%(message)s') - hdlr.setFormatter(formatter) - logger.addHandler(hdlr) - logger.setLevel(logging.DEBUG) - return logger - -def make_mem_logger(name, to_log, size=8192): - """ - Creates a memory logger to avoid writing concurrently to the main logger - """ - from logging.handlers import MemoryHandler - logger = logging.getLogger(name) - hdlr = MemoryHandler(size, target=to_log) - formatter = logging.Formatter('%(message)s') - hdlr.setFormatter(formatter) - logger.addHandler(hdlr) - logger.memhandler = hdlr - logger.setLevel(logging.DEBUG) - return logger - -def free_logger(logger): - """ - Frees the resources held by the loggers created through make_logger or make_mem_logger. - This is used for file cleanup and for handler removal (logger objects are re-used). - """ - try: - for x in logger.handlers: - x.close() - logger.removeHandler(x) - except Exception: - pass - -def pprint(col, msg, label='', sep='\n'): - """ - Prints messages in color immediately on stderr:: - - from waflib import Logs - Logs.pprint('RED', 'Something bad just happened') - - :param col: color name to use in :py:const:`Logs.colors_lst` - :type col: string - :param msg: message to display - :type msg: string or a value that can be printed by %s - :param label: a message to add after the colored output - :type label: string - :param sep: a string to append at the end (line separator) - :type sep: string - """ - info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep}) - diff --git a/waflib/Node.py b/waflib/Node.py deleted file mode 100644 index 4ac1ea8..0000000 --- a/waflib/Node.py +++ /dev/null @@ -1,970 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -""" -Node: filesystem structure - -#. Each file/folder is represented by exactly one node. - -#. Some potential class properties are stored on :py:class:`waflib.Build.BuildContext` : nodes to depend on, etc. - Unused class members can increase the `.wafpickle` file size sensibly. - -#. Node objects should never be created directly, use - the methods :py:func:`Node.make_node` or :py:func:`Node.find_node` for the low-level operations - -#. The methods :py:func:`Node.find_resource`, :py:func:`Node.find_dir` :py:func:`Node.find_or_declare` must be - used when a build context is present - -#. Each instance of :py:class:`waflib.Context.Context` has a unique :py:class:`Node` subclass required for serialization. - (:py:class:`waflib.Node.Nod3`, see the :py:class:`waflib.Context.Context` initializer). A reference to the context - owning a node is held as *self.ctx* -""" - -import os, re, sys, shutil -from waflib import Utils, Errors - -exclude_regs = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/*.swp -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/BitKeeper -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzrignore -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/.arch-ids -**/{arch} -**/_darcs -**/_darcs/** -**/.intlcache -**/.DS_Store''' -""" -Ant patterns for files and folders to exclude while doing the -recursive traversal in :py:meth:`waflib.Node.Node.ant_glob` -""" - -def ant_matcher(s, ignorecase): - reflags = re.I if ignorecase else 0 - ret = [] - for x in Utils.to_list(s): - x = x.replace('\\', '/').replace('//', '/') - if x.endswith('/'): - x += '**' - accu = [] - for k in x.split('/'): - if k == '**': - accu.append(k) - else: - k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+') - k = '^%s$' % k - try: - exp = re.compile(k, flags=reflags) - except Exception as e: - raise Errors.WafError('Invalid pattern: %s' % k, e) - else: - accu.append(exp) - ret.append(accu) - return ret - -def ant_sub_filter(name, nn): - ret = [] - for lst in nn: - if not lst: - pass - elif lst[0] == '**': - ret.append(lst) - if len(lst) > 1: - if lst[1].match(name): - ret.append(lst[2:]) - else: - ret.append([]) - elif lst[0].match(name): - ret.append(lst[1:]) - return ret - -def ant_sub_matcher(name, pats): - nacc = ant_sub_filter(name, pats[0]) - nrej = ant_sub_filter(name, pats[1]) - if [] in nrej: - nacc = [] - return [nacc, nrej] - -class Node(object): - """ - This class is organized in two parts: - - * The basic methods meant for filesystem access (compute paths, create folders, etc) - * The methods bound to a :py:class:`waflib.Build.BuildContext` (require ``bld.srcnode`` and ``bld.bldnode``) - """ - - dict_class = dict - """ - Subclasses can provide a dict class to enable case insensitivity for example. - """ - - __slots__ = ('name', 'parent', 'children', 'cache_abspath', 'cache_isdir') - def __init__(self, name, parent): - """ - .. note:: Use :py:func:`Node.make_node` or :py:func:`Node.find_node` instead of calling this constructor - """ - self.name = name - self.parent = parent - if parent: - if name in parent.children: - raise Errors.WafError('node %s exists in the parent files %r already' % (name, parent)) - parent.children[name] = self - - def __setstate__(self, data): - "Deserializes node information, used for persistence" - self.name = data[0] - self.parent = data[1] - if data[2] is not None: - # Issue 1480 - self.children = self.dict_class(data[2]) - - def __getstate__(self): - "Serializes node information, used for persistence" - return (self.name, self.parent, getattr(self, 'children', None)) - - def __str__(self): - """ - String representation (abspath), for debugging purposes - - :rtype: string - """ - return self.abspath() - - def __repr__(self): - """ - String representation (abspath), for debugging purposes - - :rtype: string - """ - return self.abspath() - - def __copy__(self): - """ - Provided to prevent nodes from being copied - - :raises: :py:class:`waflib.Errors.WafError` - """ - raise Errors.WafError('nodes are not supposed to be copied') - - def read(self, flags='r', encoding='latin-1'): - """ - Reads and returns the contents of the file represented by this node, see :py:func:`waflib.Utils.readf`:: - - def build(bld): - bld.path.find_node('wscript').read() - - :param flags: Open mode - :type flags: string - :param encoding: encoding value for Python3 - :type encoding: string - :rtype: string or bytes - :return: File contents - """ - return Utils.readf(self.abspath(), flags, encoding) - - def write(self, data, flags='w', encoding='latin-1'): - """ - Writes data to the file represented by this node, see :py:func:`waflib.Utils.writef`:: - - def build(bld): - bld.path.make_node('foo.txt').write('Hello, world!') - - :param data: data to write - :type data: string - :param flags: Write mode - :type flags: string - :param encoding: encoding value for Python3 - :type encoding: string - """ - Utils.writef(self.abspath(), data, flags, encoding) - - def read_json(self, convert=True, encoding='utf-8'): - """ - Reads and parses the contents of this node as JSON (Python ≥ 2.6):: - - def build(bld): - bld.path.find_node('abc.json').read_json() - - Note that this by default automatically decodes unicode strings on Python2, unlike what the Python JSON module does. - - :type convert: boolean - :param convert: Prevents decoding of unicode strings on Python2 - :type encoding: string - :param encoding: The encoding of the file to read. This default to UTF8 as per the JSON standard - :rtype: object - :return: Parsed file contents - """ - import json # Python 2.6 and up - object_pairs_hook = None - if convert and sys.hexversion < 0x3000000: - try: - _type = unicode - except NameError: - _type = str - - def convert(value): - if isinstance(value, list): - return [convert(element) for element in value] - elif isinstance(value, _type): - return str(value) - else: - return value - - def object_pairs(pairs): - return dict((str(pair[0]), convert(pair[1])) for pair in pairs) - - object_pairs_hook = object_pairs - - return json.loads(self.read(encoding=encoding), object_pairs_hook=object_pairs_hook) - - def write_json(self, data, pretty=True): - """ - Writes a python object as JSON to disk (Python ≥ 2.6) as UTF-8 data (JSON standard):: - - def build(bld): - bld.path.find_node('xyz.json').write_json(199) - - :type data: object - :param data: The data to write to disk - :type pretty: boolean - :param pretty: Determines if the JSON will be nicely space separated - """ - import json # Python 2.6 and up - indent = 2 - separators = (',', ': ') - sort_keys = pretty - newline = os.linesep - if not pretty: - indent = None - separators = (',', ':') - newline = '' - output = json.dumps(data, indent=indent, separators=separators, sort_keys=sort_keys) + newline - self.write(output, encoding='utf-8') - - def exists(self): - """ - Returns whether the Node is present on the filesystem - - :rtype: bool - """ - return os.path.exists(self.abspath()) - - def isdir(self): - """ - Returns whether the Node represents a folder - - :rtype: bool - """ - return os.path.isdir(self.abspath()) - - def chmod(self, val): - """ - Changes the file/dir permissions:: - - def build(bld): - bld.path.chmod(493) # 0755 - """ - os.chmod(self.abspath(), val) - - def delete(self, evict=True): - """ - Removes the file/folder from the filesystem (equivalent to `rm -rf`), and remove this object from the Node tree. - Do not use this object after calling this method. - """ - try: - try: - if os.path.isdir(self.abspath()): - shutil.rmtree(self.abspath()) - else: - os.remove(self.abspath()) - except OSError: - if os.path.exists(self.abspath()): - raise - finally: - if evict: - self.evict() - - def evict(self): - """ - Removes this node from the Node tree - """ - del self.parent.children[self.name] - - def suffix(self): - """ - Returns the file rightmost extension, for example `a.b.c.d → .d` - - :rtype: string - """ - k = max(0, self.name.rfind('.')) - return self.name[k:] - - def height(self): - """ - Returns the depth in the folder hierarchy from the filesystem root or from all the file drives - - :returns: filesystem depth - :rtype: integer - """ - d = self - val = -1 - while d: - d = d.parent - val += 1 - return val - - def listdir(self): - """ - Lists the folder contents - - :returns: list of file/folder names ordered alphabetically - :rtype: list of string - """ - lst = Utils.listdir(self.abspath()) - lst.sort() - return lst - - def mkdir(self): - """ - Creates a folder represented by this node. Intermediate folders are created as needed. - - :raises: :py:class:`waflib.Errors.WafError` when the folder is missing - """ - if self.isdir(): - return - - try: - self.parent.mkdir() - except OSError: - pass - - if self.name: - try: - os.makedirs(self.abspath()) - except OSError: - pass - - if not self.isdir(): - raise Errors.WafError('Could not create the directory %r' % self) - - try: - self.children - except AttributeError: - self.children = self.dict_class() - - def find_node(self, lst): - """ - Finds a node on the file system (files or folders), and creates the corresponding Node objects if it exists - - :param lst: relative path - :type lst: string or list of string - :returns: The corresponding Node object or None if no entry was found on the filesystem - :rtype: :py:class:´waflib.Node.Node´ - """ - - if isinstance(lst, str): - lst = [x for x in Utils.split_path(lst) if x and x != '.'] - - if lst and lst[0].startswith('\\\\') and not self.parent: - node = self.ctx.root.make_node(lst[0]) - node.cache_isdir = True - return node.find_node(lst[1:]) - - cur = self - for x in lst: - if x == '..': - cur = cur.parent or cur - continue - - try: - ch = cur.children - except AttributeError: - cur.children = self.dict_class() - else: - try: - cur = ch[x] - continue - except KeyError: - pass - - # optimistic: create the node first then look if it was correct to do so - cur = self.__class__(x, cur) - if not cur.exists(): - cur.evict() - return None - - if not cur.exists(): - cur.evict() - return None - - return cur - - def make_node(self, lst): - """ - Returns or creates a Node object corresponding to the input path without considering the filesystem. - - :param lst: relative path - :type lst: string or list of string - :rtype: :py:class:´waflib.Node.Node´ - """ - if isinstance(lst, str): - lst = [x for x in Utils.split_path(lst) if x and x != '.'] - - cur = self - for x in lst: - if x == '..': - cur = cur.parent or cur - continue - - try: - cur = cur.children[x] - except AttributeError: - cur.children = self.dict_class() - except KeyError: - pass - else: - continue - cur = self.__class__(x, cur) - return cur - - def search_node(self, lst): - """ - Returns a Node previously defined in the data structure. The filesystem is not considered. - - :param lst: relative path - :type lst: string or list of string - :rtype: :py:class:´waflib.Node.Node´ or None if there is no entry in the Node datastructure - """ - if isinstance(lst, str): - lst = [x for x in Utils.split_path(lst) if x and x != '.'] - - cur = self - for x in lst: - if x == '..': - cur = cur.parent or cur - else: - try: - cur = cur.children[x] - except (AttributeError, KeyError): - return None - return cur - - def path_from(self, node): - """ - Path of this node seen from the other:: - - def build(bld): - n1 = bld.path.find_node('foo/bar/xyz.txt') - n2 = bld.path.find_node('foo/stuff/') - n1.path_from(n2) # '../bar/xyz.txt' - - :param node: path to use as a reference - :type node: :py:class:`waflib.Node.Node` - :returns: a relative path or an absolute one if that is better - :rtype: string - """ - c1 = self - c2 = node - - c1h = c1.height() - c2h = c2.height() - - lst = [] - up = 0 - - while c1h > c2h: - lst.append(c1.name) - c1 = c1.parent - c1h -= 1 - - while c2h > c1h: - up += 1 - c2 = c2.parent - c2h -= 1 - - while not c1 is c2: - lst.append(c1.name) - up += 1 - - c1 = c1.parent - c2 = c2.parent - - if c1.parent: - lst.extend(['..'] * up) - lst.reverse() - return os.sep.join(lst) or '.' - else: - return self.abspath() - - def abspath(self): - """ - Returns the absolute path. A cache is kept in the context as ``cache_node_abspath`` - - :rtype: string - """ - try: - return self.cache_abspath - except AttributeError: - pass - # think twice before touching this (performance + complexity + correctness) - - if not self.parent: - val = os.sep - elif not self.parent.name: - val = os.sep + self.name - else: - val = self.parent.abspath() + os.sep + self.name - self.cache_abspath = val - return val - - if Utils.is_win32: - def abspath(self): - try: - return self.cache_abspath - except AttributeError: - pass - if not self.parent: - val = '' - elif not self.parent.name: - val = self.name + os.sep - else: - val = self.parent.abspath().rstrip(os.sep) + os.sep + self.name - self.cache_abspath = val - return val - - def is_child_of(self, node): - """ - Returns whether the object belongs to a subtree of the input node:: - - def build(bld): - node = bld.path.find_node('wscript') - node.is_child_of(bld.path) # True - - :param node: path to use as a reference - :type node: :py:class:`waflib.Node.Node` - :rtype: bool - """ - p = self - diff = self.height() - node.height() - while diff > 0: - diff -= 1 - p = p.parent - return p is node - - def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False): - """ - Recursive method used by :py:meth:`waflib.Node.ant_glob`. - - :param accept: function used for accepting/rejecting a node, returns the patterns that can be still accepted in recursion - :type accept: function - :param maxdepth: maximum depth in the filesystem (25) - :type maxdepth: int - :param pats: list of patterns to accept and list of patterns to exclude - :type pats: tuple - :param dir: return folders too (False by default) - :type dir: bool - :param src: return files (True by default) - :type src: bool - :param remove: remove files/folders that do not exist (True by default) - :type remove: bool - :param quiet: disable build directory traversal warnings (verbose mode) - :type quiet: bool - :returns: A generator object to iterate from - :rtype: iterator - """ - dircont = self.listdir() - dircont.sort() - - try: - lst = set(self.children.keys()) - except AttributeError: - self.children = self.dict_class() - else: - if remove: - for x in lst - set(dircont): - self.children[x].evict() - - for name in dircont: - npats = accept(name, pats) - if npats and npats[0]: - accepted = [] in npats[0] - - node = self.make_node([name]) - - isdir = node.isdir() - if accepted: - if isdir: - if dir: - yield node - elif src: - yield node - - if isdir: - node.cache_isdir = True - if maxdepth: - for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove, quiet=quiet): - yield k - - def ant_glob(self, *k, **kw): - """ - Finds files across folders and returns Node objects: - - * ``**/*`` find all files recursively - * ``**/*.class`` find all files ending by .class - * ``..`` find files having two dot characters - - For example:: - - def configure(cfg): - # find all .cpp files - cfg.path.ant_glob('**/*.cpp') - # find particular files from the root filesystem (can be slow) - cfg.root.ant_glob('etc/*.txt') - # simple exclusion rule example - cfg.path.ant_glob('*.c*', excl=['*.c'], src=True, dir=False) - - For more information about the patterns, consult http://ant.apache.org/manual/dirtasks.html - Please remember that the '..' sequence does not represent the parent directory:: - - def configure(cfg): - cfg.path.ant_glob('../*.h') # incorrect - cfg.path.parent.ant_glob('*.h') # correct - - The Node structure is itself a filesystem cache, so certain precautions must - be taken while matching files in the build or installation phases. - Nodes objects that do have a corresponding file or folder are garbage-collected by default. - This garbage collection is usually required to prevent returning files that do not - exist anymore. Yet, this may also remove Node objects of files that are yet-to-be built. - - This typically happens when trying to match files in the build directory, - but there are also cases when files are created in the source directory. - Run ``waf -v`` to display any warnings, and try consider passing ``remove=False`` - when matching files in the build directory. - - Since ant_glob can traverse both source and build folders, it is a best practice - to call this method only from the most specific build node:: - - def build(bld): - # traverses the build directory, may need ``remove=False``: - bld.path.ant_glob('project/dir/**/*.h') - # better, no accidental build directory traversal: - bld.path.find_node('project/dir').ant_glob('**/*.h') # best - - In addition, files and folders are listed immediately. When matching files in the - build folders, consider passing ``generator=True`` so that the generator object - returned can defer computation to a later stage. For example:: - - def build(bld): - bld(rule='tar xvf ${SRC}', source='arch.tar') - bld.add_group() - gen = bld.bldnode.ant_glob("*.h", generator=True, remove=True) - # files will be listed only after the arch.tar is unpacked - bld(rule='ls ${SRC}', source=gen, name='XYZ') - - - :param incl: ant patterns or list of patterns to include - :type incl: string or list of strings - :param excl: ant patterns or list of patterns to exclude - :type excl: string or list of strings - :param dir: return folders too (False by default) - :type dir: bool - :param src: return files (True by default) - :type src: bool - :param maxdepth: maximum depth of recursion - :type maxdepth: int - :param ignorecase: ignore case while matching (False by default) - :type ignorecase: bool - :param generator: Whether to evaluate the Nodes lazily - :type generator: bool - :param remove: remove files/folders that do not exist (True by default) - :type remove: bool - :param quiet: disable build directory traversal warnings (verbose mode) - :type quiet: bool - :returns: The corresponding Node objects as a list or as a generator object (generator=True) - :rtype: by default, list of :py:class:`waflib.Node.Node` instances - """ - src = kw.get('src', True) - dir = kw.get('dir') - excl = kw.get('excl', exclude_regs) - incl = k and k[0] or kw.get('incl', '**') - remove = kw.get('remove', True) - maxdepth = kw.get('maxdepth', 25) - ignorecase = kw.get('ignorecase', False) - quiet = kw.get('quiet', False) - pats = (ant_matcher(incl, ignorecase), ant_matcher(excl, ignorecase)) - - if kw.get('generator'): - return Utils.lazy_generator(self.ant_iter, (ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet)) - - it = self.ant_iter(ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet) - if kw.get('flat'): - # returns relative paths as a space-delimited string - # prefer Node objects whenever possible - return ' '.join(x.path_from(self) for x in it) - return list(it) - - # ---------------------------------------------------------------------------- - # the methods below require the source/build folders (bld.srcnode/bld.bldnode) - - def is_src(self): - """ - Returns True if the node is below the source directory. Note that ``!is_src() ≠is_bld()`` - - :rtype: bool - """ - cur = self - x = self.ctx.srcnode - y = self.ctx.bldnode - while cur.parent: - if cur is y: - return False - if cur is x: - return True - cur = cur.parent - return False - - def is_bld(self): - """ - Returns True if the node is below the build directory. Note that ``!is_bld() ≠is_src()`` - - :rtype: bool - """ - cur = self - y = self.ctx.bldnode - while cur.parent: - if cur is y: - return True - cur = cur.parent - return False - - def get_src(self): - """ - Returns the corresponding Node object in the source directory (or self if already - under the source directory). Use this method only if the purpose is to create - a Node object (this is common with folders but not with files, see ticket 1937) - - :rtype: :py:class:`waflib.Node.Node` - """ - cur = self - x = self.ctx.srcnode - y = self.ctx.bldnode - lst = [] - while cur.parent: - if cur is y: - lst.reverse() - return x.make_node(lst) - if cur is x: - return self - lst.append(cur.name) - cur = cur.parent - return self - - def get_bld(self): - """ - Return the corresponding Node object in the build directory (or self if already - under the build directory). Use this method only if the purpose is to create - a Node object (this is common with folders but not with files, see ticket 1937) - - :rtype: :py:class:`waflib.Node.Node` - """ - cur = self - x = self.ctx.srcnode - y = self.ctx.bldnode - lst = [] - while cur.parent: - if cur is y: - return self - if cur is x: - lst.reverse() - return self.ctx.bldnode.make_node(lst) - lst.append(cur.name) - cur = cur.parent - # the file is external to the current project, make a fake root in the current build directory - lst.reverse() - if lst and Utils.is_win32 and len(lst[0]) == 2 and lst[0].endswith(':'): - lst[0] = lst[0][0] - return self.ctx.bldnode.make_node(['__root__'] + lst) - - def find_resource(self, lst): - """ - Use this method in the build phase to find source files corresponding to the relative path given. - - First it looks up the Node data structure to find any declared Node object in the build directory. - If None is found, it then considers the filesystem in the source directory. - - :param lst: relative path - :type lst: string or list of string - :returns: the corresponding Node object or None - :rtype: :py:class:`waflib.Node.Node` - """ - if isinstance(lst, str): - lst = [x for x in Utils.split_path(lst) if x and x != '.'] - - node = self.get_bld().search_node(lst) - if not node: - node = self.get_src().find_node(lst) - if node and node.isdir(): - return None - return node - - def find_or_declare(self, lst): - """ - Use this method in the build phase to declare output files which - are meant to be written in the build directory. - - This method creates the Node object and its parent folder - as needed. - - :param lst: relative path - :type lst: string or list of string - """ - if isinstance(lst, str) and os.path.isabs(lst): - node = self.ctx.root.make_node(lst) - else: - node = self.get_bld().make_node(lst) - node.parent.mkdir() - return node - - def find_dir(self, lst): - """ - Searches for a folder on the filesystem (see :py:meth:`waflib.Node.Node.find_node`) - - :param lst: relative path - :type lst: string or list of string - :returns: The corresponding Node object or None if there is no such folder - :rtype: :py:class:`waflib.Node.Node` - """ - if isinstance(lst, str): - lst = [x for x in Utils.split_path(lst) if x and x != '.'] - - node = self.find_node(lst) - if node and not node.isdir(): - return None - return node - - # helpers for building things - def change_ext(self, ext, ext_in=None): - """ - Declares a build node with a distinct extension; this is uses :py:meth:`waflib.Node.Node.find_or_declare` - - :return: A build node of the same path, but with a different extension - :rtype: :py:class:`waflib.Node.Node` - """ - name = self.name - if ext_in is None: - k = name.rfind('.') - if k >= 0: - name = name[:k] + ext - else: - name = name + ext - else: - name = name[:- len(ext_in)] + ext - - return self.parent.find_or_declare([name]) - - def bldpath(self): - """ - Returns the relative path seen from the build directory ``src/foo.cpp`` - - :rtype: string - """ - return self.path_from(self.ctx.bldnode) - - def srcpath(self): - """ - Returns the relative path seen from the source directory ``../src/foo.cpp`` - - :rtype: string - """ - return self.path_from(self.ctx.srcnode) - - def relpath(self): - """ - If a file in the build directory, returns :py:meth:`waflib.Node.Node.bldpath`, - else returns :py:meth:`waflib.Node.Node.srcpath` - - :rtype: string - """ - cur = self - x = self.ctx.bldnode - while cur.parent: - if cur is x: - return self.bldpath() - cur = cur.parent - return self.srcpath() - - def bld_dir(self): - """ - Equivalent to self.parent.bldpath() - - :rtype: string - """ - return self.parent.bldpath() - - def h_file(self): - """ - See :py:func:`waflib.Utils.h_file` - - :return: a hash representing the file contents - :rtype: string or bytes - """ - return Utils.h_file(self.abspath()) - - def get_bld_sig(self): - """ - Returns a signature (see :py:meth:`waflib.Node.Node.h_file`) for the purpose - of build dependency calculation. This method uses a per-context cache. - - :return: a hash representing the object contents - :rtype: string or bytes - """ - # previous behaviour can be set by returning self.ctx.node_sigs[self] when a build node - try: - cache = self.ctx.cache_sig - except AttributeError: - cache = self.ctx.cache_sig = {} - try: - ret = cache[self] - except KeyError: - p = self.abspath() - try: - ret = cache[self] = self.h_file() - except EnvironmentError: - if self.isdir(): - # allow folders as build nodes, do not use the creation time - st = os.stat(p) - ret = cache[self] = Utils.h_list([p, st.st_ino, st.st_mode]) - return ret - raise - return ret - -pickle_lock = Utils.threading.Lock() -"""Lock mandatory for thread-safe node serialization""" - -class Nod3(Node): - """Mandatory subclass for thread-safe node serialization""" - pass # do not remove - - diff --git a/waflib/Options.py b/waflib/Options.py deleted file mode 100644 index ad802d4..0000000 --- a/waflib/Options.py +++ /dev/null @@ -1,342 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Scott Newton, 2005 (scottn) -# Thomas Nagy, 2006-2018 (ita) - -""" -Support for waf command-line options - -Provides default and command-line options, as well the command -that reads the ``options`` wscript function. -""" - -import os, tempfile, optparse, sys, re -from waflib import Logs, Utils, Context, Errors - -options = optparse.Values() -""" -A global dictionary representing user-provided command-line options:: - - $ waf --foo=bar -""" - -commands = [] -""" -List of commands to execute extracted from the command-line. This list -is consumed during the execution by :py:func:`waflib.Scripting.run_commands`. -""" - -envvars = [] -""" -List of environment variable declarations placed after the Waf executable name. -These are detected by searching for "=" in the remaining arguments. -You probably do not want to use this. -""" - -lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform) -""" -Name of the lock file that marks a project as configured -""" - -class opt_parser(optparse.OptionParser): - """ - Command-line options parser. - """ - def __init__(self, ctx, allow_unknown=False): - optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False, - version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION)) - self.formatter.width = Logs.get_term_cols() - self.ctx = ctx - self.allow_unknown = allow_unknown - - def _process_args(self, largs, rargs, values): - """ - Custom _process_args to allow unknown options according to the allow_unknown status - """ - while rargs: - try: - optparse.OptionParser._process_args(self,largs,rargs,values) - except (optparse.BadOptionError, optparse.AmbiguousOptionError) as e: - if self.allow_unknown: - largs.append(e.opt_str) - else: - self.error(str(e)) - - def print_usage(self, file=None): - return self.print_help(file) - - def get_usage(self): - """ - Builds the message to print on ``waf --help`` - - :rtype: string - """ - cmds_str = {} - for cls in Context.classes: - if not cls.cmd or cls.cmd == 'options' or cls.cmd.startswith( '_' ): - continue - - s = cls.__doc__ or '' - cmds_str[cls.cmd] = s - - if Context.g_module: - for (k, v) in Context.g_module.__dict__.items(): - if k in ('options', 'init', 'shutdown'): - continue - - if type(v) is type(Context.create_context): - if v.__doc__ and not k.startswith('_'): - cmds_str[k] = v.__doc__ - - just = 0 - for k in cmds_str: - just = max(just, len(k)) - - lst = [' %s: %s' % (k.ljust(just), v) for (k, v) in cmds_str.items()] - lst.sort() - ret = '\n'.join(lst) - - return '''waf [commands] [options] - -Main commands (example: ./waf build -j4) -%s -''' % ret - - -class OptionsContext(Context.Context): - """ - Collects custom options from wscript files and parses the command line. - Sets the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values. - """ - cmd = 'options' - fun = 'options' - - def __init__(self, **kw): - super(OptionsContext, self).__init__(**kw) - - self.parser = opt_parser(self) - """Instance of :py:class:`waflib.Options.opt_parser`""" - - self.option_groups = {} - - jobs = self.jobs() - p = self.add_option - color = os.environ.get('NOCOLOR', '') and 'no' or 'auto' - if os.environ.get('CLICOLOR', '') == '0': - color = 'no' - elif os.environ.get('CLICOLOR_FORCE', '') == '1': - color = 'yes' - p('-c', '--color', dest='colors', default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto')) - p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs) - p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)') - p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]') - p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)') - p('--profile', dest='profile', default=0, action='store_true', help=optparse.SUPPRESS_HELP) - p('--pdb', dest='pdb', default=0, action='store_true', help=optparse.SUPPRESS_HELP) - p('-h', '--help', dest='whelp', default=0, action='store_true', help="show this help message and exit") - - gr = self.add_option_group('Configuration options') - self.option_groups['configure options'] = gr - - gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out') - gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top') - - gr.add_option('--no-lock-in-run', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_run') - gr.add_option('--no-lock-in-out', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_out') - gr.add_option('--no-lock-in-top', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_top') - - default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX')) - if not default_prefix: - if Utils.unversioned_sys_platform() == 'win32': - d = tempfile.gettempdir() - default_prefix = d[0].upper() + d[1:] - # win32 preserves the case, but gettempdir does not - else: - default_prefix = '/usr/local/' - gr.add_option('--prefix', dest='prefix', default=default_prefix, help='installation prefix [default: %r]' % default_prefix) - gr.add_option('--bindir', dest='bindir', help='bindir') - gr.add_option('--libdir', dest='libdir', help='libdir') - - gr = self.add_option_group('Build and installation options') - self.option_groups['build and install options'] = gr - gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output') - gr.add_option('--targets', dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"') - - gr = self.add_option_group('Step options') - self.option_groups['step options'] = gr - gr.add_option('--files', dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"') - - default_destdir = os.environ.get('DESTDIR', '') - - gr = self.add_option_group('Installation and uninstallation options') - self.option_groups['install/uninstall options'] = gr - gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir') - gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='force file installation') - gr.add_option('--distcheck-args', metavar='ARGS', help='arguments to pass to distcheck', default=None, action='store') - - def jobs(self): - """ - Finds the optimal amount of cpu cores to use for parallel jobs. - At runtime the options can be obtained from :py:const:`waflib.Options.options` :: - - from waflib.Options import options - njobs = options.jobs - - :return: the amount of cpu cores - :rtype: int - """ - count = int(os.environ.get('JOBS', 0)) - if count < 1: - if 'NUMBER_OF_PROCESSORS' in os.environ: - # on Windows, use the NUMBER_OF_PROCESSORS environment variable - count = int(os.environ.get('NUMBER_OF_PROCESSORS', 1)) - else: - # on everything else, first try the POSIX sysconf values - if hasattr(os, 'sysconf_names'): - if 'SC_NPROCESSORS_ONLN' in os.sysconf_names: - count = int(os.sysconf('SC_NPROCESSORS_ONLN')) - elif 'SC_NPROCESSORS_CONF' in os.sysconf_names: - count = int(os.sysconf('SC_NPROCESSORS_CONF')) - if not count and os.name not in ('nt', 'java'): - try: - tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0) - except Errors.WafError: - pass - else: - if re.match('^[0-9]+$', tmp): - count = int(tmp) - if count < 1: - count = 1 - elif count > 1024: - count = 1024 - return count - - def add_option(self, *k, **kw): - """ - Wraps ``optparse.add_option``:: - - def options(ctx): - ctx.add_option('-u', '--use', dest='use', default=False, - action='store_true', help='a boolean option') - - :rtype: optparse option object - """ - return self.parser.add_option(*k, **kw) - - def add_option_group(self, *k, **kw): - """ - Wraps ``optparse.add_option_group``:: - - def options(ctx): - gr = ctx.add_option_group('some options') - gr.add_option('-u', '--use', dest='use', default=False, action='store_true') - - :rtype: optparse option group object - """ - try: - gr = self.option_groups[k[0]] - except KeyError: - gr = self.parser.add_option_group(*k, **kw) - self.option_groups[k[0]] = gr - return gr - - def get_option_group(self, opt_str): - """ - Wraps ``optparse.get_option_group``:: - - def options(ctx): - gr = ctx.get_option_group('configure options') - gr.add_option('-o', '--out', action='store', default='', - help='build dir for the project', dest='out') - - :rtype: optparse option group object - """ - try: - return self.option_groups[opt_str] - except KeyError: - for group in self.parser.option_groups: - if group.title == opt_str: - return group - return None - - def sanitize_path(self, path, cwd=None): - if not cwd: - cwd = Context.launch_dir - p = os.path.expanduser(path) - p = os.path.join(cwd, p) - p = os.path.normpath(p) - p = os.path.abspath(p) - return p - - def parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False): - """ - Just parse the arguments - """ - self.parser.allow_unknown = allow_unknown - (options, leftover_args) = self.parser.parse_args(args=_args) - envvars = [] - commands = [] - for arg in leftover_args: - if '=' in arg: - envvars.append(arg) - elif arg != 'options': - commands.append(arg) - - for name in 'top out destdir prefix bindir libdir'.split(): - # those paths are usually expanded from Context.launch_dir - if getattr(options, name, None): - path = self.sanitize_path(getattr(options, name), cwd) - setattr(options, name, path) - return options, commands, envvars - - def init_module_vars(self, arg_options, arg_commands, arg_envvars): - options.__dict__.clear() - del commands[:] - del envvars[:] - - options.__dict__.update(arg_options.__dict__) - commands.extend(arg_commands) - envvars.extend(arg_envvars) - - for var in envvars: - (name, value) = var.split('=', 1) - os.environ[name.strip()] = value - - def init_logs(self, options, commands, envvars): - Logs.verbose = options.verbose - if options.verbose >= 1: - self.load('errcheck') - - colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors] - Logs.enable_colors(colors) - - if options.zones: - Logs.zones = options.zones.split(',') - if not Logs.verbose: - Logs.verbose = 1 - elif Logs.verbose > 0: - Logs.zones = ['runner'] - if Logs.verbose > 2: - Logs.zones = ['*'] - - def parse_args(self, _args=None): - """ - Parses arguments from a list which is not necessarily the command-line. - Initializes the module variables options, commands and envvars - If help is requested, prints it and exit the application - - :param _args: arguments - :type _args: list of strings - """ - options, commands, envvars = self.parse_cmd_args() - self.init_logs(options, commands, envvars) - self.init_module_vars(options, commands, envvars) - - def execute(self): - """ - See :py:func:`waflib.Context.Context.execute` - """ - super(OptionsContext, self).execute() - self.parse_args() - Utils.alloc_process_pool(options.jobs) - diff --git a/waflib/README.md b/waflib/README.md deleted file mode 100644 index c5361b9..0000000 --- a/waflib/README.md +++ /dev/null @@ -1,24 +0,0 @@ -Autowaf -======= - -This is autowaf, a bundle of waf and a few extensions intended to be easy to -use directly as source code in a project. Using this as a submodule or subtree -named `waflib` in a project allows waf to be used without including binary -encoded data in the waf script. This gets along with revision control and -distributions better, among other advantages, without losing -self-containedness. - -To use this in a project, add this repository as a directory named `waflib` in -the top level of the project, and link or copy `waf` to the top level. - -Two waf extras are also included: `autowaf.py` and `lv2.py`. - -The `autowaf.py` module is a kitchen sink of Python utilities for building -consistent packages, and can be imported in a wcript as -`waflib.extras.autowaf`. - -The `lv2.py` extra defines options for LV2 plugin installation paths. It can -be used by calling `opt.load('lv2')` and `conf.load('lv2')` in the appropriate -locations in a wscript. - - -- David Robillard <d@drobilla.net> diff --git a/waflib/Runner.py b/waflib/Runner.py deleted file mode 100644 index 5d27669..0000000 --- a/waflib/Runner.py +++ /dev/null @@ -1,617 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -""" -Runner.py: Task scheduling and execution -""" - -import heapq, traceback -try: - from queue import Queue, PriorityQueue -except ImportError: - from Queue import Queue - try: - from Queue import PriorityQueue - except ImportError: - class PriorityQueue(Queue): - def _init(self, maxsize): - self.maxsize = maxsize - self.queue = [] - def _put(self, item): - heapq.heappush(self.queue, item) - def _get(self): - return heapq.heappop(self.queue) - -from waflib import Utils, Task, Errors, Logs - -GAP = 5 -""" -Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run -""" - -class PriorityTasks(object): - def __init__(self): - self.lst = [] - def __len__(self): - return len(self.lst) - def __iter__(self): - return iter(self.lst) - def __str__(self): - return 'PriorityTasks: [%s]' % '\n '.join(str(x) for x in self.lst) - def clear(self): - self.lst = [] - def append(self, task): - heapq.heappush(self.lst, task) - def appendleft(self, task): - "Deprecated, do not use" - heapq.heappush(self.lst, task) - def pop(self): - return heapq.heappop(self.lst) - def extend(self, lst): - if self.lst: - for x in lst: - self.append(x) - else: - if isinstance(lst, list): - self.lst = lst - heapq.heapify(lst) - else: - self.lst = lst.lst - -class Consumer(Utils.threading.Thread): - """ - Daemon thread object that executes a task. It shares a semaphore with - the coordinator :py:class:`waflib.Runner.Spawner`. There is one - instance per task to consume. - """ - def __init__(self, spawner, task): - Utils.threading.Thread.__init__(self) - self.task = task - """Task to execute""" - self.spawner = spawner - """Coordinator object""" - self.setDaemon(1) - self.start() - def run(self): - """ - Processes a single task - """ - try: - if not self.spawner.master.stop: - self.spawner.master.process_task(self.task) - finally: - self.spawner.sem.release() - self.spawner.master.out.put(self.task) - self.task = None - self.spawner = None - -class Spawner(Utils.threading.Thread): - """ - Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and - spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each - :py:class:`waflib.Task.Task` instance. - """ - def __init__(self, master): - Utils.threading.Thread.__init__(self) - self.master = master - """:py:class:`waflib.Runner.Parallel` producer instance""" - self.sem = Utils.threading.Semaphore(master.numjobs) - """Bounded semaphore that prevents spawning more than *n* concurrent consumers""" - self.setDaemon(1) - self.start() - def run(self): - """ - Spawns new consumers to execute tasks by delegating to :py:meth:`waflib.Runner.Spawner.loop` - """ - try: - self.loop() - except Exception: - # Python 2 prints unnecessary messages when shutting down - # we also want to stop the thread properly - pass - def loop(self): - """ - Consumes task objects from the producer; ends when the producer has no more - task to provide. - """ - master = self.master - while 1: - task = master.ready.get() - self.sem.acquire() - if not master.stop: - task.log_display(task.generator.bld) - Consumer(self, task) - -class Parallel(object): - """ - Schedule the tasks obtained from the build context for execution. - """ - def __init__(self, bld, j=2): - """ - The initialization requires a build context reference - for computing the total number of jobs. - """ - - self.numjobs = j - """ - Amount of parallel consumers to use - """ - - self.bld = bld - """ - Instance of :py:class:`waflib.Build.BuildContext` - """ - - self.outstanding = PriorityTasks() - """Heap of :py:class:`waflib.Task.Task` that may be ready to be executed""" - - self.postponed = PriorityTasks() - """Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons""" - - self.incomplete = set() - """List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)""" - - self.ready = PriorityQueue(0) - """List of :py:class:`waflib.Task.Task` ready to be executed by consumers""" - - self.out = Queue(0) - """List of :py:class:`waflib.Task.Task` returned by the task consumers""" - - self.count = 0 - """Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`""" - - self.processed = 0 - """Amount of tasks processed""" - - self.stop = False - """Error flag to stop the build""" - - self.error = [] - """Tasks that could not be executed""" - - self.biter = None - """Task iterator which must give groups of parallelizable tasks when calling ``next()``""" - - self.dirty = False - """ - Flag that indicates that the build cache must be saved when a task was executed - (calls :py:meth:`waflib.Build.BuildContext.store`)""" - - self.revdeps = Utils.defaultdict(set) - """ - The reverse dependency graph of dependencies obtained from Task.run_after - """ - - self.spawner = None - """ - Coordinating daemon thread that spawns thread consumers - """ - if self.numjobs > 1: - self.spawner = Spawner(self) - - def get_next_task(self): - """ - Obtains the next Task instance to run - - :rtype: :py:class:`waflib.Task.Task` - """ - if not self.outstanding: - return None - return self.outstanding.pop() - - def postpone(self, tsk): - """ - Adds the task to the list :py:attr:`waflib.Runner.Parallel.postponed`. - The order is scrambled so as to consume as many tasks in parallel as possible. - - :param tsk: task instance - :type tsk: :py:class:`waflib.Task.Task` - """ - self.postponed.append(tsk) - - def refill_task_list(self): - """ - Pulls a next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`. - Ensures that all tasks in the current build group are complete before processing the next one. - """ - while self.count > self.numjobs * GAP: - self.get_out() - - while not self.outstanding: - if self.count: - self.get_out() - if self.outstanding: - break - elif self.postponed: - try: - cond = self.deadlock == self.processed - except AttributeError: - pass - else: - if cond: - # The most common reason is conflicting build order declaration - # for example: "X run_after Y" and "Y run_after X" - # Another can be changing "run_after" dependencies while the build is running - # for example: updating "tsk.run_after" in the "runnable_status" method - lst = [] - for tsk in self.postponed: - deps = [id(x) for x in tsk.run_after if not x.hasrun] - lst.append('%s\t-> %r' % (repr(tsk), deps)) - if not deps: - lst.append('\n task %r dependencies are done, check its *runnable_status*?' % id(tsk)) - raise Errors.WafError('Deadlock detected: check the task build order%s' % ''.join(lst)) - self.deadlock = self.processed - - if self.postponed: - self.outstanding.extend(self.postponed) - self.postponed.clear() - elif not self.count: - if self.incomplete: - for x in self.incomplete: - for k in x.run_after: - if not k.hasrun: - break - else: - # dependency added after the build started without updating revdeps - self.incomplete.remove(x) - self.outstanding.append(x) - break - else: - if self.stop or self.error: - break - raise Errors.WafError('Broken revdeps detected on %r' % self.incomplete) - else: - tasks = next(self.biter) - ready, waiting = self.prio_and_split(tasks) - self.outstanding.extend(ready) - self.incomplete.update(waiting) - self.total = self.bld.total() - break - - def add_more_tasks(self, tsk): - """ - If a task provides :py:attr:`waflib.Task.Task.more_tasks`, then the tasks contained - in that list are added to the current build and will be processed before the next build group. - - The priorities for dependent tasks are not re-calculated globally - - :param tsk: task instance - :type tsk: :py:attr:`waflib.Task.Task` - """ - if getattr(tsk, 'more_tasks', None): - more = set(tsk.more_tasks) - groups_done = set() - def iteri(a, b): - for x in a: - yield x - for x in b: - yield x - - # Update the dependency tree - # this assumes that task.run_after values were updated - for x in iteri(self.outstanding, self.incomplete): - for k in x.run_after: - if isinstance(k, Task.TaskGroup): - if k not in groups_done: - groups_done.add(k) - for j in k.prev & more: - self.revdeps[j].add(k) - elif k in more: - self.revdeps[k].add(x) - - ready, waiting = self.prio_and_split(tsk.more_tasks) - self.outstanding.extend(ready) - self.incomplete.update(waiting) - self.total += len(tsk.more_tasks) - - def mark_finished(self, tsk): - def try_unfreeze(x): - # DAG ancestors are likely to be in the incomplete set - # This assumes that the run_after contents have not changed - # after the build starts, else a deadlock may occur - if x in self.incomplete: - # TODO remove dependencies to free some memory? - # x.run_after.remove(tsk) - for k in x.run_after: - if not k.hasrun: - break - else: - self.incomplete.remove(x) - self.outstanding.append(x) - - if tsk in self.revdeps: - for x in self.revdeps[tsk]: - if isinstance(x, Task.TaskGroup): - x.prev.remove(tsk) - if not x.prev: - for k in x.next: - # TODO necessary optimization? - k.run_after.remove(x) - try_unfreeze(k) - # TODO necessary optimization? - x.next = [] - else: - try_unfreeze(x) - del self.revdeps[tsk] - - if hasattr(tsk, 'semaphore'): - sem = tsk.semaphore - sem.release(tsk) - while sem.waiting and not sem.is_locked(): - # take a frozen task, make it ready to run - x = sem.waiting.pop() - self._add_task(x) - - def get_out(self): - """ - Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution. - Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`. - - :rtype: :py:attr:`waflib.Task.Task` - """ - tsk = self.out.get() - if not self.stop: - self.add_more_tasks(tsk) - self.mark_finished(tsk) - - self.count -= 1 - self.dirty = True - return tsk - - def add_task(self, tsk): - """ - Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them. - - :param tsk: task instance - :type tsk: :py:attr:`waflib.Task.Task` - """ - # TODO change in waf 2.1 - self.ready.put(tsk) - - def _add_task(self, tsk): - if hasattr(tsk, 'semaphore'): - sem = tsk.semaphore - try: - sem.acquire(tsk) - except IndexError: - sem.waiting.add(tsk) - return - - self.count += 1 - self.processed += 1 - if self.numjobs == 1: - tsk.log_display(tsk.generator.bld) - try: - self.process_task(tsk) - finally: - self.out.put(tsk) - else: - self.add_task(tsk) - - def process_task(self, tsk): - """ - Processes a task and attempts to stop the build in case of errors - """ - tsk.process() - if tsk.hasrun != Task.SUCCESS: - self.error_handler(tsk) - - def skip(self, tsk): - """ - Mark a task as skipped/up-to-date - """ - tsk.hasrun = Task.SKIPPED - self.mark_finished(tsk) - - def cancel(self, tsk): - """ - Mark a task as failed because of unsatisfiable dependencies - """ - tsk.hasrun = Task.CANCELED - self.mark_finished(tsk) - - def error_handler(self, tsk): - """ - Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, - unless the build is executed with:: - - $ waf build -k - - :param tsk: task instance - :type tsk: :py:attr:`waflib.Task.Task` - """ - if not self.bld.keep: - self.stop = True - self.error.append(tsk) - - def task_status(self, tsk): - """ - Obtains the task status to decide whether to run it immediately or not. - - :return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER` - :rtype: integer - """ - try: - return tsk.runnable_status() - except Exception: - self.processed += 1 - tsk.err_msg = traceback.format_exc() - if not self.stop and self.bld.keep: - self.skip(tsk) - if self.bld.keep == 1: - # if -k stop on the first exception, if -kk try to go as far as possible - if Logs.verbose > 1 or not self.error: - self.error.append(tsk) - self.stop = True - else: - if Logs.verbose > 1: - self.error.append(tsk) - return Task.EXCEPTION - - tsk.hasrun = Task.EXCEPTION - self.error_handler(tsk) - - return Task.EXCEPTION - - def start(self): - """ - Obtains Task instances from the BuildContext instance and adds the ones that need to be executed to - :py:class:`waflib.Runner.Parallel.ready` so that the :py:class:`waflib.Runner.Spawner` consumer thread - has them executed. Obtains the executed Tasks back from :py:class:`waflib.Runner.Parallel.out` - and marks the build as failed by setting the ``stop`` flag. - If only one job is used, then executes the tasks one by one, without consumers. - """ - self.total = self.bld.total() - - while not self.stop: - - self.refill_task_list() - - # consider the next task - tsk = self.get_next_task() - if not tsk: - if self.count: - # tasks may add new ones after they are run - continue - else: - # no tasks to run, no tasks running, time to exit - break - - if tsk.hasrun: - # if the task is marked as "run", just skip it - self.processed += 1 - continue - - if self.stop: # stop immediately after a failure is detected - break - - st = self.task_status(tsk) - if st == Task.RUN_ME: - self._add_task(tsk) - elif st == Task.ASK_LATER: - self.postpone(tsk) - elif st == Task.SKIP_ME: - self.processed += 1 - self.skip(tsk) - self.add_more_tasks(tsk) - elif st == Task.CANCEL_ME: - # A dependency problem has occurred, and the - # build is most likely run with `waf -k` - if Logs.verbose > 1: - self.error.append(tsk) - self.processed += 1 - self.cancel(tsk) - - # self.count represents the tasks that have been made available to the consumer threads - # collect all the tasks after an error else the message may be incomplete - while self.error and self.count: - self.get_out() - - self.ready.put(None) - if not self.stop: - assert not self.count - assert not self.postponed - assert not self.incomplete - - def prio_and_split(self, tasks): - """ - Label input tasks with priority values, and return a pair containing - the tasks that are ready to run and the tasks that are necessarily - waiting for other tasks to complete. - - The priority system is really meant as an optional layer for optimization: - dependency cycles are found quickly, and builds should be more efficient. - A high priority number means that a task is processed first. - - This method can be overridden to disable the priority system:: - - def prio_and_split(self, tasks): - return tasks, [] - - :return: A pair of task lists - :rtype: tuple - """ - # to disable: - #return tasks, [] - for x in tasks: - x.visited = 0 - - reverse = self.revdeps - - groups_done = set() - for x in tasks: - for k in x.run_after: - if isinstance(k, Task.TaskGroup): - if k not in groups_done: - groups_done.add(k) - for j in k.prev: - reverse[j].add(k) - else: - reverse[k].add(x) - - # the priority number is not the tree depth - def visit(n): - if isinstance(n, Task.TaskGroup): - return sum(visit(k) for k in n.next) - - if n.visited == 0: - n.visited = 1 - - if n in reverse: - rev = reverse[n] - n.prio_order = n.tree_weight + len(rev) + sum(visit(k) for k in rev) - else: - n.prio_order = n.tree_weight - - n.visited = 2 - elif n.visited == 1: - raise Errors.WafError('Dependency cycle found!') - return n.prio_order - - for x in tasks: - if x.visited != 0: - # must visit all to detect cycles - continue - try: - visit(x) - except Errors.WafError: - self.debug_cycles(tasks, reverse) - - ready = [] - waiting = [] - for x in tasks: - for k in x.run_after: - if not k.hasrun: - waiting.append(x) - break - else: - ready.append(x) - return (ready, waiting) - - def debug_cycles(self, tasks, reverse): - tmp = {} - for x in tasks: - tmp[x] = 0 - - def visit(n, acc): - if isinstance(n, Task.TaskGroup): - for k in n.next: - visit(k, acc) - return - if tmp[n] == 0: - tmp[n] = 1 - for k in reverse.get(n, []): - visit(k, [n] + acc) - tmp[n] = 2 - elif tmp[n] == 1: - lst = [] - for tsk in acc: - lst.append(repr(tsk)) - if tsk is n: - # exclude prior nodes, we want the minimum cycle - break - raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s' % ''.join(lst)) - for x in tasks: - visit(x, []) - diff --git a/waflib/Scripting.py b/waflib/Scripting.py deleted file mode 100644 index ae17a8b..0000000 --- a/waflib/Scripting.py +++ /dev/null @@ -1,620 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -"Module called for configuring, compiling and installing targets" - -from __future__ import with_statement - -import os, shlex, shutil, traceback, errno, sys, stat -from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node - -build_dir_override = None - -no_climb_commands = ['configure'] - -default_cmd = "build" - -def waf_entry_point(current_directory, version, wafdir): - """ - This is the main entry point, all Waf execution starts here. - - :param current_directory: absolute path representing the current directory - :type current_directory: string - :param version: version number - :type version: string - :param wafdir: absolute path representing the directory of the waf library - :type wafdir: string - """ - Logs.init_log() - - if Context.WAFVERSION != version: - Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir) - sys.exit(1) - - # Store current directory before any chdir - Context.waf_dir = wafdir - Context.run_dir = Context.launch_dir = current_directory - start_dir = current_directory - no_climb = os.environ.get('NOCLIMB') - - if len(sys.argv) > 1: - # os.path.join handles absolute paths - # if sys.argv[1] is not an absolute path, then it is relative to the current working directory - potential_wscript = os.path.join(current_directory, sys.argv[1]) - if os.path.basename(potential_wscript) == Context.WSCRIPT_FILE and os.path.isfile(potential_wscript): - # need to explicitly normalize the path, as it may contain extra '/.' - path = os.path.normpath(os.path.dirname(potential_wscript)) - start_dir = os.path.abspath(path) - no_climb = True - sys.argv.pop(1) - - ctx = Context.create_context('options') - (options, commands, env) = ctx.parse_cmd_args(allow_unknown=True) - if options.top: - start_dir = Context.run_dir = Context.top_dir = options.top - no_climb = True - if options.out: - Context.out_dir = options.out - - # if 'configure' is in the commands, do not search any further - if not no_climb: - for k in no_climb_commands: - for y in commands: - if y.startswith(k): - no_climb = True - break - - # try to find a lock file (if the project was configured) - # at the same time, store the first wscript file seen - cur = start_dir - while cur: - try: - lst = os.listdir(cur) - except OSError: - lst = [] - Logs.error('Directory %r is unreadable!', cur) - if Options.lockfile in lst: - env = ConfigSet.ConfigSet() - try: - env.load(os.path.join(cur, Options.lockfile)) - ino = os.stat(cur)[stat.ST_INO] - except EnvironmentError: - pass - else: - # check if the folder was not moved - for x in (env.run_dir, env.top_dir, env.out_dir): - if not x: - continue - if Utils.is_win32: - if cur == x: - load = True - break - else: - # if the filesystem features symlinks, compare the inode numbers - try: - ino2 = os.stat(x)[stat.ST_INO] - except OSError: - pass - else: - if ino == ino2: - load = True - break - else: - Logs.warn('invalid lock file in %s', cur) - load = False - - if load: - Context.run_dir = env.run_dir - Context.top_dir = env.top_dir - Context.out_dir = env.out_dir - break - - if not Context.run_dir: - if Context.WSCRIPT_FILE in lst: - Context.run_dir = cur - - next = os.path.dirname(cur) - if next == cur: - break - cur = next - - if no_climb: - break - - wscript = os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)) - if not os.path.exists(wscript): - if options.whelp: - Logs.warn('These are the generic options (no wscript/project found)') - ctx.parser.print_help() - sys.exit(0) - Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)', Context.WSCRIPT_FILE) - sys.exit(1) - - try: - os.chdir(Context.run_dir) - except OSError: - Logs.error('Waf: The folder %r is unreadable', Context.run_dir) - sys.exit(1) - - try: - set_main_module(wscript) - except Errors.WafError as e: - Logs.pprint('RED', e.verbose_msg) - Logs.error(str(e)) - sys.exit(1) - except Exception as e: - Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir) - traceback.print_exc(file=sys.stdout) - sys.exit(2) - - if options.profile: - import cProfile, pstats - cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt') - p = pstats.Stats('profi.txt') - p.sort_stats('time').print_stats(75) # or 'cumulative' - else: - try: - try: - run_commands() - except: - if options.pdb: - import pdb - type, value, tb = sys.exc_info() - traceback.print_exc() - pdb.post_mortem(tb) - else: - raise - except Errors.WafError as e: - if Logs.verbose > 1: - Logs.pprint('RED', e.verbose_msg) - Logs.error(e.msg) - sys.exit(1) - except SystemExit: - raise - except Exception as e: - traceback.print_exc(file=sys.stdout) - sys.exit(2) - except KeyboardInterrupt: - Logs.pprint('RED', 'Interrupted') - sys.exit(68) - -def set_main_module(file_path): - """ - Read the main wscript file into :py:const:`waflib.Context.Context.g_module` and - bind default functions such as ``init``, ``dist``, ``distclean`` if not defined. - Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization. - - :param file_path: absolute path representing the top-level wscript file - :type file_path: string - """ - Context.g_module = Context.load_module(file_path) - Context.g_module.root_path = file_path - - # note: to register the module globally, use the following: - # sys.modules['wscript_main'] = g_module - - def set_def(obj): - name = obj.__name__ - if not name in Context.g_module.__dict__: - setattr(Context.g_module, name, obj) - for k in (dist, distclean, distcheck): - set_def(k) - # add dummy init and shutdown functions if they're not defined - if not 'init' in Context.g_module.__dict__: - Context.g_module.init = Utils.nada - if not 'shutdown' in Context.g_module.__dict__: - Context.g_module.shutdown = Utils.nada - if not 'options' in Context.g_module.__dict__: - Context.g_module.options = Utils.nada - -def parse_options(): - """ - Parses the command-line options and initialize the logging system. - Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization. - """ - ctx = Context.create_context('options') - ctx.execute() - if not Options.commands: - if isinstance(default_cmd, list): - Options.commands.extend(default_cmd) - else: - Options.commands.append(default_cmd) - if Options.options.whelp: - ctx.parser.print_help() - sys.exit(0) - -def run_command(cmd_name): - """ - Executes a single Waf command. Called by :py:func:`waflib.Scripting.run_commands`. - - :param cmd_name: command to execute, like ``build`` - :type cmd_name: string - """ - ctx = Context.create_context(cmd_name) - ctx.log_timer = Utils.Timer() - ctx.options = Options.options # provided for convenience - ctx.cmd = cmd_name - try: - ctx.execute() - finally: - # Issue 1374 - ctx.finalize() - return ctx - -def run_commands(): - """ - Execute the Waf commands that were given on the command-line, and the other options - Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed - after :py:func:`waflib.Scripting.parse_options`. - """ - parse_options() - run_command('init') - while Options.commands: - cmd_name = Options.commands.pop(0) - ctx = run_command(cmd_name) - Logs.info('%r finished successfully (%s)', cmd_name, ctx.log_timer) - run_command('shutdown') - -########################################################################################### - -def distclean_dir(dirname): - """ - Distclean function called in the particular case when:: - - top == out - - :param dirname: absolute path of the folder to clean - :type dirname: string - """ - for (root, dirs, files) in os.walk(dirname): - for f in files: - if f.endswith(('.o', '.moc', '.exe')): - fname = os.path.join(root, f) - try: - os.remove(fname) - except OSError: - Logs.warn('Could not remove %r', fname) - - for x in (Context.DBFILE, 'config.log'): - try: - os.remove(x) - except OSError: - pass - - try: - shutil.rmtree(Build.CACHE_DIR) - except OSError: - pass - -def distclean(ctx): - '''removes build folders and data''' - - def remove_and_log(k, fun): - try: - fun(k) - except EnvironmentError as e: - if e.errno != errno.ENOENT: - Logs.warn('Could not remove %r', k) - - # remove waf cache folders on the top-level - if not Options.commands: - for k in os.listdir('.'): - for x in '.waf-2 waf-2 .waf3-2 waf3-2'.split(): - if k.startswith(x): - remove_and_log(k, shutil.rmtree) - - # remove a build folder, if any - cur = '.' - if ctx.options.no_lock_in_top: - cur = ctx.options.out - - try: - lst = os.listdir(cur) - except OSError: - Logs.warn('Could not read %r', cur) - return - - if Options.lockfile in lst: - f = os.path.join(cur, Options.lockfile) - try: - env = ConfigSet.ConfigSet(f) - except EnvironmentError: - Logs.warn('Could not read %r', f) - return - - if not env.out_dir or not env.top_dir: - Logs.warn('Invalid lock file %r', f) - return - - if env.out_dir == env.top_dir: - distclean_dir(env.out_dir) - else: - remove_and_log(env.out_dir, shutil.rmtree) - - for k in (env.out_dir, env.top_dir, env.run_dir): - p = os.path.join(k, Options.lockfile) - remove_and_log(p, os.remove) - -class Dist(Context.Context): - '''creates an archive containing the project source code''' - cmd = 'dist' - fun = 'dist' - algo = 'tar.bz2' - ext_algo = {} - - def execute(self): - """ - See :py:func:`waflib.Context.Context.execute` - """ - self.recurse([os.path.dirname(Context.g_module.root_path)]) - self.archive() - - def archive(self): - """ - Creates the source archive. - """ - import tarfile - - arch_name = self.get_arch_name() - - try: - self.base_path - except AttributeError: - self.base_path = self.path - - node = self.base_path.make_node(arch_name) - try: - node.delete() - except OSError: - pass - - files = self.get_files() - - if self.algo.startswith('tar.'): - tar = tarfile.open(node.abspath(), 'w:' + self.algo.replace('tar.', '')) - - for x in files: - self.add_tar_file(x, tar) - tar.close() - elif self.algo == 'zip': - import zipfile - zip = zipfile.ZipFile(node.abspath(), 'w', compression=zipfile.ZIP_DEFLATED) - - for x in files: - archive_name = self.get_base_name() + '/' + x.path_from(self.base_path) - zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED) - zip.close() - else: - self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip') - - try: - from hashlib import sha256 - except ImportError: - digest = '' - else: - digest = ' (sha256=%r)' % sha256(node.read(flags='rb')).hexdigest() - - Logs.info('New archive created: %s%s', self.arch_name, digest) - - def get_tar_path(self, node): - """ - Return the path to use for a node in the tar archive, the purpose of this - is to let subclases resolve symbolic links or to change file names - - :return: absolute path - :rtype: string - """ - return node.abspath() - - def add_tar_file(self, x, tar): - """ - Adds a file to the tar archive. Symlinks are not verified. - - :param x: file path - :param tar: tar file object - """ - p = self.get_tar_path(x) - tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path)) - tinfo.uid = 0 - tinfo.gid = 0 - tinfo.uname = 'root' - tinfo.gname = 'root' - - if os.path.isfile(p): - with open(p, 'rb') as f: - tar.addfile(tinfo, fileobj=f) - else: - tar.addfile(tinfo) - - def get_tar_prefix(self): - """ - Returns the base path for files added into the archive tar file - - :rtype: string - """ - try: - return self.tar_prefix - except AttributeError: - return self.get_base_name() - - def get_arch_name(self): - """ - Returns the archive file name. - Set the attribute *arch_name* to change the default value:: - - def dist(ctx): - ctx.arch_name = 'ctx.tar.bz2' - - :rtype: string - """ - try: - self.arch_name - except AttributeError: - self.arch_name = self.get_base_name() + '.' + self.ext_algo.get(self.algo, self.algo) - return self.arch_name - - def get_base_name(self): - """ - Returns the default name of the main directory in the archive, which is set to *appname-version*. - Set the attribute *base_name* to change the default value:: - - def dist(ctx): - ctx.base_name = 'files' - - :rtype: string - """ - try: - self.base_name - except AttributeError: - appname = getattr(Context.g_module, Context.APPNAME, 'noname') - version = getattr(Context.g_module, Context.VERSION, '1.0') - self.base_name = appname + '-' + version - return self.base_name - - def get_excl(self): - """ - Returns the patterns to exclude for finding the files in the top-level directory. - Set the attribute *excl* to change the default value:: - - def dist(ctx): - ctx.excl = 'build **/*.o **/*.class' - - :rtype: string - """ - try: - return self.excl - except AttributeError: - self.excl = Node.exclude_regs + ' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' - if Context.out_dir: - nd = self.root.find_node(Context.out_dir) - if nd: - self.excl += ' ' + nd.path_from(self.base_path) - return self.excl - - def get_files(self): - """ - Files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`. - Set *files* to prevent this behaviour:: - - def dist(ctx): - ctx.files = ctx.path.find_node('wscript') - - Files are also searched from the directory 'base_path', to change it, set:: - - def dist(ctx): - ctx.base_path = path - - :rtype: list of :py:class:`waflib.Node.Node` - """ - try: - files = self.files - except AttributeError: - files = self.base_path.ant_glob('**/*', excl=self.get_excl()) - return files - -def dist(ctx): - '''makes a tarball for redistributing the sources''' - pass - -class DistCheck(Dist): - """creates an archive with dist, then tries to build it""" - fun = 'distcheck' - cmd = 'distcheck' - - def execute(self): - """ - See :py:func:`waflib.Context.Context.execute` - """ - self.recurse([os.path.dirname(Context.g_module.root_path)]) - self.archive() - self.check() - - def make_distcheck_cmd(self, tmpdir): - cfg = [] - if Options.options.distcheck_args: - cfg = shlex.split(Options.options.distcheck_args) - else: - cfg = [x for x in sys.argv if x.startswith('-')] - cmd = [sys.executable, sys.argv[0], 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] + cfg - return cmd - - def check(self): - """ - Creates the archive, uncompresses it and tries to build the project - """ - import tempfile, tarfile - - with tarfile.open(self.get_arch_name()) as t: - for x in t: - t.extract(x) - - instdir = tempfile.mkdtemp('.inst', self.get_base_name()) - cmd = self.make_distcheck_cmd(instdir) - ret = Utils.subprocess.Popen(cmd, cwd=self.get_base_name()).wait() - if ret: - raise Errors.WafError('distcheck failed with code %r' % ret) - - if os.path.exists(instdir): - raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir) - - shutil.rmtree(self.get_base_name()) - - -def distcheck(ctx): - '''checks if the project compiles (tarball from 'dist')''' - pass - -def autoconfigure(execute_method): - """ - Decorator that enables context commands to run *configure* as needed. - """ - def execute(self): - """ - Wraps :py:func:`waflib.Context.Context.execute` on the context class - """ - if not Configure.autoconfig: - return execute_method(self) - - env = ConfigSet.ConfigSet() - do_config = False - try: - env.load(os.path.join(Context.top_dir, Options.lockfile)) - except EnvironmentError: - Logs.warn('Configuring the project') - do_config = True - else: - if env.run_dir != Context.run_dir: - do_config = True - else: - h = 0 - for f in env.files: - try: - h = Utils.h_list((h, Utils.readf(f, 'rb'))) - except EnvironmentError: - do_config = True - break - else: - do_config = h != env.hash - - if do_config: - cmd = env.config_cmd or 'configure' - if Configure.autoconfig == 'clobber': - tmp = Options.options.__dict__ - launch_dir_tmp = Context.launch_dir - if env.options: - Options.options.__dict__ = env.options - Context.launch_dir = env.launch_dir - try: - run_command(cmd) - finally: - Options.options.__dict__ = tmp - Context.launch_dir = launch_dir_tmp - else: - run_command(cmd) - run_command(self.cmd) - else: - return execute_method(self) - return execute -Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute) - diff --git a/waflib/Task.py b/waflib/Task.py deleted file mode 100644 index cb49a73..0000000 --- a/waflib/Task.py +++ /dev/null @@ -1,1406 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -""" -Tasks represent atomic operations such as processes. -""" - -import os, re, sys, tempfile, traceback -from waflib import Utils, Logs, Errors - -# task states -NOT_RUN = 0 -"""The task was not executed yet""" - -MISSING = 1 -"""The task has been executed but the files have not been created""" - -CRASHED = 2 -"""The task execution returned a non-zero exit status""" - -EXCEPTION = 3 -"""An exception occurred in the task execution""" - -CANCELED = 4 -"""A dependency for the task is missing so it was cancelled""" - -SKIPPED = 8 -"""The task did not have to be executed""" - -SUCCESS = 9 -"""The task was successfully executed""" - -ASK_LATER = -1 -"""The task is not ready to be executed""" - -SKIP_ME = -2 -"""The task does not need to be executed""" - -RUN_ME = -3 -"""The task must be executed""" - -CANCEL_ME = -4 -"""The task cannot be executed because of a dependency problem""" - -COMPILE_TEMPLATE_SHELL = ''' -def f(tsk): - env = tsk.env - gen = tsk.generator - bld = gen.bld - cwdx = tsk.get_cwd() - p = env.get_flat - def to_list(xx): - if isinstance(xx, str): return [xx] - return xx - tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s - return tsk.exec_command(cmd, cwd=cwdx, env=env.env or None) -''' - -COMPILE_TEMPLATE_NOSHELL = ''' -def f(tsk): - env = tsk.env - gen = tsk.generator - bld = gen.bld - cwdx = tsk.get_cwd() - def to_list(xx): - if isinstance(xx, str): return [xx] - return xx - def merge(lst1, lst2): - if lst1 and lst2: - return lst1[:-1] + [lst1[-1] + lst2[0]] + lst2[1:] - return lst1 + lst2 - lst = [] - %s - if '' in lst: - lst = [x for x in lst if x] - tsk.last_cmd = lst - return tsk.exec_command(lst, cwd=cwdx, env=env.env or None) -''' - -COMPILE_TEMPLATE_SIG_VARS = ''' -def f(tsk): - sig = tsk.generator.bld.hash_env_vars(tsk.env, tsk.vars) - tsk.m.update(sig) - env = tsk.env - gen = tsk.generator - bld = gen.bld - cwdx = tsk.get_cwd() - p = env.get_flat - buf = [] - %s - tsk.m.update(repr(buf).encode()) -''' - -classes = {} -""" -The metaclass :py:class:`waflib.Task.store_task_type` stores all class tasks -created by user scripts or Waf tools to this dict. It maps class names to class objects. -""" - -class store_task_type(type): - """ - Metaclass: store the task classes into the dict pointed by the - class attribute 'register' which defaults to :py:const:`waflib.Task.classes`, - - The attribute 'run_str' is compiled into a method 'run' bound to the task class. - """ - def __init__(cls, name, bases, dict): - super(store_task_type, cls).__init__(name, bases, dict) - name = cls.__name__ - - if name != 'evil' and name != 'Task': - if getattr(cls, 'run_str', None): - # if a string is provided, convert it to a method - (f, dvars) = compile_fun(cls.run_str, cls.shell) - cls.hcode = Utils.h_cmd(cls.run_str) - cls.orig_run_str = cls.run_str - # change the name of run_str or it is impossible to subclass with a function - cls.run_str = None - cls.run = f - # process variables - cls.vars = list(set(cls.vars + dvars)) - cls.vars.sort() - if cls.vars: - fun = compile_sig_vars(cls.vars) - if fun: - cls.sig_vars = fun - elif getattr(cls, 'run', None) and not 'hcode' in cls.__dict__: - # getattr(cls, 'hcode') would look in the upper classes - cls.hcode = Utils.h_cmd(cls.run) - - # be creative - getattr(cls, 'register', classes)[name] = cls - -evil = store_task_type('evil', (object,), {}) -"Base class provided to avoid writing a metaclass, so the code can run in python 2.6 and 3.x unmodified" - -class Task(evil): - """ - Task objects represents actions to perform such as commands to execute by calling the `run` method. - - Detecting when to execute a task occurs in the method :py:meth:`waflib.Task.Task.runnable_status`. - - Detecting which tasks to execute is performed through a hash value returned by - :py:meth:`waflib.Task.Task.signature`. The task signature is persistent from build to build. - """ - vars = [] - """ConfigSet variables that should trigger a rebuild (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)""" - - always_run = False - """Specify whether task instances must always be executed or not (class attribute)""" - - shell = False - """Execute the command with the shell (class attribute)""" - - color = 'GREEN' - """Color for the console display, see :py:const:`waflib.Logs.colors_lst`""" - - ext_in = [] - """File extensions that objects of this task class may use""" - - ext_out = [] - """File extensions that objects of this task class may create""" - - before = [] - """The instances of this class are executed before the instances of classes whose names are in this list""" - - after = [] - """The instances of this class are executed after the instances of classes whose names are in this list""" - - hcode = Utils.SIG_NIL - """String representing an additional hash for the class representation""" - - keep_last_cmd = False - """Whether to keep the last command executed on the instance after execution. - This may be useful for certain extensions but it can a lot of memory. - """ - - weight = 0 - """Optional weight to tune the priority for task instances. - The higher, the earlier. The weight only applies to single task objects.""" - - tree_weight = 0 - """Optional weight to tune the priority of task instances and whole subtrees. - The higher, the earlier.""" - - prio_order = 0 - """Priority order set by the scheduler on instances during the build phase. - You most likely do not need to set it. - """ - - __slots__ = ('hasrun', 'generator', 'env', 'inputs', 'outputs', 'dep_nodes', 'run_after') - - def __init__(self, *k, **kw): - self.hasrun = NOT_RUN - try: - self.generator = kw['generator'] - except KeyError: - self.generator = self - - self.env = kw['env'] - """:py:class:`waflib.ConfigSet.ConfigSet` object (make sure to provide one)""" - - self.inputs = [] - """List of input nodes, which represent the files used by the task instance""" - - self.outputs = [] - """List of output nodes, which represent the files created by the task instance""" - - self.dep_nodes = [] - """List of additional nodes to depend on""" - - self.run_after = set() - """Set of tasks that must be executed before this one""" - - def __lt__(self, other): - return self.priority() > other.priority() - def __le__(self, other): - return self.priority() >= other.priority() - def __gt__(self, other): - return self.priority() < other.priority() - def __ge__(self, other): - return self.priority() <= other.priority() - - def get_cwd(self): - """ - :return: current working directory - :rtype: :py:class:`waflib.Node.Node` - """ - bld = self.generator.bld - ret = getattr(self, 'cwd', None) or getattr(bld, 'cwd', bld.bldnode) - if isinstance(ret, str): - if os.path.isabs(ret): - ret = bld.root.make_node(ret) - else: - ret = self.generator.path.make_node(ret) - return ret - - def quote_flag(self, x): - """ - Surround a process argument by quotes so that a list of arguments can be written to a file - - :param x: flag - :type x: string - :return: quoted flag - :rtype: string - """ - old = x - if '\\' in x: - x = x.replace('\\', '\\\\') - if '"' in x: - x = x.replace('"', '\\"') - if old != x or ' ' in x or '\t' in x or "'" in x: - x = '"%s"' % x - return x - - def priority(self): - """ - Priority of execution; the higher, the earlier - - :return: the priority value - :rtype: a tuple of numeric values - """ - return (self.weight + self.prio_order, - getattr(self.generator, 'tg_idx_count', 0)) - - def split_argfile(self, cmd): - """ - Splits a list of process commands into the executable part and its list of arguments - - :return: a tuple containing the executable first and then the rest of arguments - :rtype: tuple - """ - return ([cmd[0]], [self.quote_flag(x) for x in cmd[1:]]) - - def exec_command(self, cmd, **kw): - """ - Wrapper for :py:meth:`waflib.Context.Context.exec_command`. - This version set the current working directory (``build.variant_dir``), - applies PATH settings (if self.env.PATH is provided), and can run long - commands through a temporary ``@argfile``. - - :param cmd: process command to execute - :type cmd: list of string (best) or string (process will use a shell) - :return: the return code - :rtype: int - - Optional parameters: - - #. cwd: current working directory (Node or string) - #. stdout: set to None to prevent waf from capturing the process standard output - #. stderr: set to None to prevent waf from capturing the process standard error - #. timeout: timeout value (Python 3) - """ - if not 'cwd' in kw: - kw['cwd'] = self.get_cwd() - - if hasattr(self, 'timeout'): - kw['timeout'] = self.timeout - - if self.env.PATH: - env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ) - env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH) - - if hasattr(self, 'stdout'): - kw['stdout'] = self.stdout - if hasattr(self, 'stderr'): - kw['stderr'] = self.stderr - - if not isinstance(cmd, str): - if Utils.is_win32: - # win32 compares the resulting length http://support.microsoft.com/kb/830473 - too_long = sum([len(arg) for arg in cmd]) + len(cmd) > 8192 - else: - # non-win32 counts the amount of arguments (200k) - too_long = len(cmd) > 200000 - - if too_long and getattr(self, 'allow_argsfile', True): - # Shunt arguments to a temporary file if the command is too long. - cmd, args = self.split_argfile(cmd) - try: - (fd, tmp) = tempfile.mkstemp() - os.write(fd, '\r\n'.join(args).encode()) - os.close(fd) - if Logs.verbose: - Logs.debug('argfile: @%r -> %r', tmp, args) - return self.generator.bld.exec_command(cmd + ['@' + tmp], **kw) - finally: - try: - os.remove(tmp) - except OSError: - # anti-virus and indexers can keep files open -_- - pass - return self.generator.bld.exec_command(cmd, **kw) - - def process(self): - """ - Runs the task and handles errors - - :return: 0 or None if everything is fine - :rtype: integer - """ - # remove the task signature immediately before it is executed - # so that the task will be executed again in case of failure - try: - del self.generator.bld.task_sigs[self.uid()] - except KeyError: - pass - - try: - ret = self.run() - except Exception: - self.err_msg = traceback.format_exc() - self.hasrun = EXCEPTION - else: - if ret: - self.err_code = ret - self.hasrun = CRASHED - else: - try: - self.post_run() - except Errors.WafError: - pass - except Exception: - self.err_msg = traceback.format_exc() - self.hasrun = EXCEPTION - else: - self.hasrun = SUCCESS - - if self.hasrun != SUCCESS and self.scan: - # rescan dependencies on next run - try: - del self.generator.bld.imp_sigs[self.uid()] - except KeyError: - pass - - def log_display(self, bld): - "Writes the execution status on the context logger" - if self.generator.bld.progress_bar == 3: - return - - s = self.display() - if s: - if bld.logger: - logger = bld.logger - else: - logger = Logs - - if self.generator.bld.progress_bar == 1: - c1 = Logs.colors.cursor_off - c2 = Logs.colors.cursor_on - logger.info(s, extra={'stream': sys.stderr, 'terminator':'', 'c1': c1, 'c2' : c2}) - else: - logger.info(s, extra={'terminator':'', 'c1': '', 'c2' : ''}) - - def display(self): - """ - Returns an execution status for the console, the progress bar, or the IDE output. - - :rtype: string - """ - col1 = Logs.colors(self.color) - col2 = Logs.colors.NORMAL - master = self.generator.bld.producer - - def cur(): - # the current task position, computed as late as possible - return master.processed - master.ready.qsize() - - if self.generator.bld.progress_bar == 1: - return self.generator.bld.progress_line(cur(), master.total, col1, col2) - - if self.generator.bld.progress_bar == 2: - ela = str(self.generator.bld.timer) - try: - ins = ','.join([n.name for n in self.inputs]) - except AttributeError: - ins = '' - try: - outs = ','.join([n.name for n in self.outputs]) - except AttributeError: - outs = '' - return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (master.total, cur(), ins, outs, ela) - - s = str(self) - if not s: - return None - - total = master.total - n = len(str(total)) - fs = '[%%%dd/%%%dd] %%s%%s%%s%%s\n' % (n, n) - kw = self.keyword() - if kw: - kw += ' ' - return fs % (cur(), total, kw, col1, s, col2) - - def hash_constraints(self): - """ - Identifies a task type for all the constraints relevant for the scheduler: precedence, file production - - :return: a hash value - :rtype: string - """ - return (tuple(self.before), tuple(self.after), tuple(self.ext_in), tuple(self.ext_out), self.__class__.__name__, self.hcode) - - def format_error(self): - """ - Returns an error message to display the build failure reasons - - :rtype: string - """ - if Logs.verbose: - msg = ': %r\n%r' % (self, getattr(self, 'last_cmd', '')) - else: - msg = ' (run with -v to display more information)' - name = getattr(self.generator, 'name', '') - if getattr(self, "err_msg", None): - return self.err_msg - elif not self.hasrun: - return 'task in %r was not executed for some reason: %r' % (name, self) - elif self.hasrun == CRASHED: - try: - return ' -> task in %r failed with exit status %r%s' % (name, self.err_code, msg) - except AttributeError: - return ' -> task in %r failed%s' % (name, msg) - elif self.hasrun == MISSING: - return ' -> missing files in %r%s' % (name, msg) - elif self.hasrun == CANCELED: - return ' -> %r canceled because of missing dependencies' % name - else: - return 'invalid status for task in %r: %r' % (name, self.hasrun) - - def colon(self, var1, var2): - """ - Enable scriptlet expressions of the form ${FOO_ST:FOO} - If the first variable (FOO_ST) is empty, then an empty list is returned - - The results will be slightly different if FOO_ST is a list, for example:: - - env.FOO = ['p1', 'p2'] - env.FOO_ST = '-I%s' - # ${FOO_ST:FOO} returns - ['-Ip1', '-Ip2'] - - env.FOO_ST = ['-a', '-b'] - # ${FOO_ST:FOO} returns - ['-a', '-b', 'p1', '-a', '-b', 'p2'] - """ - tmp = self.env[var1] - if not tmp: - return [] - - if isinstance(var2, str): - it = self.env[var2] - else: - it = var2 - if isinstance(tmp, str): - return [tmp % x for x in it] - else: - lst = [] - for y in it: - lst.extend(tmp) - lst.append(y) - return lst - - def __str__(self): - "string to display to the user" - name = self.__class__.__name__ - if self.outputs: - if name.endswith(('lib', 'program')) or not self.inputs: - node = self.outputs[0] - return node.path_from(node.ctx.launch_node()) - if not (self.inputs or self.outputs): - return self.__class__.__name__ - if len(self.inputs) == 1: - node = self.inputs[0] - return node.path_from(node.ctx.launch_node()) - - src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs]) - tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs]) - if self.outputs: - sep = ' -> ' - else: - sep = '' - return '%s: %s%s%s' % (self.__class__.__name__, src_str, sep, tgt_str) - - def keyword(self): - "Display keyword used to prettify the console outputs" - name = self.__class__.__name__ - if name.endswith(('lib', 'program')): - return 'Linking' - if len(self.inputs) == 1 and len(self.outputs) == 1: - return 'Compiling' - if not self.inputs: - if self.outputs: - return 'Creating' - else: - return 'Running' - return 'Processing' - - def __repr__(self): - "for debugging purposes" - try: - ins = ",".join([x.name for x in self.inputs]) - outs = ",".join([x.name for x in self.outputs]) - except AttributeError: - ins = ",".join([str(x) for x in self.inputs]) - outs = ",".join([str(x) for x in self.outputs]) - return "".join(['\n\t{task %r: ' % id(self), self.__class__.__name__, " ", ins, " -> ", outs, '}']) - - def uid(self): - """ - Returns an identifier used to determine if tasks are up-to-date. Since the - identifier will be stored between executions, it must be: - - - unique for a task: no two tasks return the same value (for a given build context) - - the same for a given task instance - - By default, the node paths, the class name, and the function are used - as inputs to compute a hash. - - The pointer to the object (python built-in 'id') will change between build executions, - and must be avoided in such hashes. - - :return: hash value - :rtype: string - """ - try: - return self.uid_ - except AttributeError: - m = Utils.md5(self.__class__.__name__) - up = m.update - for x in self.inputs + self.outputs: - up(x.abspath()) - self.uid_ = m.digest() - return self.uid_ - - def set_inputs(self, inp): - """ - Appends the nodes to the *inputs* list - - :param inp: input nodes - :type inp: node or list of nodes - """ - if isinstance(inp, list): - self.inputs += inp - else: - self.inputs.append(inp) - - def set_outputs(self, out): - """ - Appends the nodes to the *outputs* list - - :param out: output nodes - :type out: node or list of nodes - """ - if isinstance(out, list): - self.outputs += out - else: - self.outputs.append(out) - - def set_run_after(self, task): - """ - Run this task only after the given *task*. - - Calling this method from :py:meth:`waflib.Task.Task.runnable_status` may cause - build deadlocks; see :py:meth:`waflib.Tools.fc.fc.runnable_status` for details. - - :param task: task - :type task: :py:class:`waflib.Task.Task` - """ - assert isinstance(task, Task) - self.run_after.add(task) - - def signature(self): - """ - Task signatures are stored between build executions, they are use to track the changes - made to the input nodes (not to the outputs!). The signature hashes data from various sources: - - * explicit dependencies: files listed in the inputs (list of node objects) :py:meth:`waflib.Task.Task.sig_explicit_deps` - * implicit dependencies: list of nodes returned by scanner methods (when present) :py:meth:`waflib.Task.Task.sig_implicit_deps` - * hashed data: variables/values read from task.vars/task.env :py:meth:`waflib.Task.Task.sig_vars` - - If the signature is expected to give a different result, clear the cache kept in ``self.cache_sig``:: - - from waflib import Task - class cls(Task.Task): - def signature(self): - sig = super(Task.Task, self).signature() - delattr(self, 'cache_sig') - return super(Task.Task, self).signature() - - :return: the signature value - :rtype: string or bytes - """ - try: - return self.cache_sig - except AttributeError: - pass - - self.m = Utils.md5(self.hcode) - - # explicit deps - self.sig_explicit_deps() - - # env vars - self.sig_vars() - - # implicit deps / scanner results - if self.scan: - try: - self.sig_implicit_deps() - except Errors.TaskRescan: - return self.signature() - - ret = self.cache_sig = self.m.digest() - return ret - - def runnable_status(self): - """ - Returns the Task status - - :return: a task state in :py:const:`waflib.Task.RUN_ME`, - :py:const:`waflib.Task.SKIP_ME`, :py:const:`waflib.Task.CANCEL_ME` or :py:const:`waflib.Task.ASK_LATER`. - :rtype: int - """ - bld = self.generator.bld - if bld.is_install < 0: - return SKIP_ME - - for t in self.run_after: - if not t.hasrun: - return ASK_LATER - elif t.hasrun < SKIPPED: - # a dependency has an error - return CANCEL_ME - - # first compute the signature - try: - new_sig = self.signature() - except Errors.TaskNotReady: - return ASK_LATER - - # compare the signature to a signature computed previously - key = self.uid() - try: - prev_sig = bld.task_sigs[key] - except KeyError: - Logs.debug('task: task %r must run: it was never run before or the task code changed', self) - return RUN_ME - - if new_sig != prev_sig: - Logs.debug('task: task %r must run: the task signature changed', self) - return RUN_ME - - # compare the signatures of the outputs - for node in self.outputs: - sig = bld.node_sigs.get(node) - if not sig: - Logs.debug('task: task %r must run: an output node has no signature', self) - return RUN_ME - if sig != key: - Logs.debug('task: task %r must run: an output node was produced by another task', self) - return RUN_ME - if not node.exists(): - Logs.debug('task: task %r must run: an output node does not exist', self) - return RUN_ME - - return (self.always_run and RUN_ME) or SKIP_ME - - def post_run(self): - """ - Called after successful execution to record that the task has run by - updating the entry in :py:attr:`waflib.Build.BuildContext.task_sigs`. - """ - bld = self.generator.bld - for node in self.outputs: - if not node.exists(): - self.hasrun = MISSING - self.err_msg = '-> missing file: %r' % node.abspath() - raise Errors.WafError(self.err_msg) - bld.node_sigs[node] = self.uid() # make sure this task produced the files in question - bld.task_sigs[self.uid()] = self.signature() - if not self.keep_last_cmd: - try: - del self.last_cmd - except AttributeError: - pass - - def sig_explicit_deps(self): - """ - Used by :py:meth:`waflib.Task.Task.signature`; it hashes :py:attr:`waflib.Task.Task.inputs` - and :py:attr:`waflib.Task.Task.dep_nodes` signatures. - """ - bld = self.generator.bld - upd = self.m.update - - # the inputs - for x in self.inputs + self.dep_nodes: - upd(x.get_bld_sig()) - - # manual dependencies, they can slow down the builds - if bld.deps_man: - additional_deps = bld.deps_man - for x in self.inputs + self.outputs: - try: - d = additional_deps[x] - except KeyError: - continue - - for v in d: - try: - v = v.get_bld_sig() - except AttributeError: - if hasattr(v, '__call__'): - v = v() # dependency is a function, call it - upd(v) - - def sig_deep_inputs(self): - """ - Enable rebuilds on input files task signatures. Not used by default. - - Example: hashes of output programs can be unchanged after being re-linked, - despite the libraries being different. This method can thus prevent stale unit test - results (waf_unit_test.py). - - Hashing input file timestamps is another possibility for the implementation. - This may cause unnecessary rebuilds when input tasks are frequently executed. - Here is an implementation example:: - - lst = [] - for node in self.inputs + self.dep_nodes: - st = os.stat(node.abspath()) - lst.append(st.st_mtime) - lst.append(st.st_size) - self.m.update(Utils.h_list(lst)) - - The downside of the implementation is that it absolutely requires all build directory - files to be declared within the current build. - """ - bld = self.generator.bld - lst = [bld.task_sigs[bld.node_sigs[node]] for node in (self.inputs + self.dep_nodes) if node.is_bld()] - self.m.update(Utils.h_list(lst)) - - def sig_vars(self): - """ - Used by :py:meth:`waflib.Task.Task.signature`; it hashes :py:attr:`waflib.Task.Task.env` variables/values - When overriding this method, and if scriptlet expressions are used, make sure to follow - the code in :py:meth:`waflib.Task.Task.compile_sig_vars` to enable dependencies on scriptlet results. - - This method may be replaced on subclasses by the metaclass to force dependencies on scriptlet code. - """ - sig = self.generator.bld.hash_env_vars(self.env, self.vars) - self.m.update(sig) - - scan = None - """ - This method, when provided, returns a tuple containing: - - * a list of nodes corresponding to real files - * a list of names for files not found in path_lst - - For example:: - - from waflib.Task import Task - class mytask(Task): - def scan(self, node): - return ([], []) - - The first and second lists in the tuple are stored in :py:attr:`waflib.Build.BuildContext.node_deps` and - :py:attr:`waflib.Build.BuildContext.raw_deps` respectively. - """ - - def sig_implicit_deps(self): - """ - Used by :py:meth:`waflib.Task.Task.signature`; it hashes node signatures - obtained by scanning for dependencies (:py:meth:`waflib.Task.Task.scan`). - - The exception :py:class:`waflib.Errors.TaskRescan` is thrown - when a file has changed. In this case, the method :py:meth:`waflib.Task.Task.signature` is called - once again, and return here to call :py:meth:`waflib.Task.Task.scan` and searching for dependencies. - """ - bld = self.generator.bld - - # get the task signatures from previous runs - key = self.uid() - prev = bld.imp_sigs.get(key, []) - - # for issue #379 - if prev: - try: - if prev == self.compute_sig_implicit_deps(): - return prev - except Errors.TaskNotReady: - raise - except EnvironmentError: - # when a file was renamed, remove the stale nodes (headers in folders without source files) - # this will break the order calculation for headers created during the build in the source directory (should be uncommon) - # the behaviour will differ when top != out - for x in bld.node_deps.get(self.uid(), []): - if not x.is_bld() and not x.exists(): - try: - del x.parent.children[x.name] - except KeyError: - pass - del bld.imp_sigs[key] - raise Errors.TaskRescan('rescan') - - # no previous run or the signature of the dependencies has changed, rescan the dependencies - (bld.node_deps[key], bld.raw_deps[key]) = self.scan() - if Logs.verbose: - Logs.debug('deps: scanner for %s: %r; unresolved: %r', self, bld.node_deps[key], bld.raw_deps[key]) - - # recompute the signature and return it - try: - bld.imp_sigs[key] = self.compute_sig_implicit_deps() - except EnvironmentError: - for k in bld.node_deps.get(self.uid(), []): - if not k.exists(): - Logs.warn('Dependency %r for %r is missing: check the task declaration and the build order!', k, self) - raise - - def compute_sig_implicit_deps(self): - """ - Used by :py:meth:`waflib.Task.Task.sig_implicit_deps` for computing the actual hash of the - :py:class:`waflib.Node.Node` returned by the scanner. - - :return: a hash value for the implicit dependencies - :rtype: string or bytes - """ - upd = self.m.update - self.are_implicit_nodes_ready() - - # scanner returns a node that does not have a signature - # just *ignore* the error and let them figure out from the compiler output - # waf -k behaviour - for k in self.generator.bld.node_deps.get(self.uid(), []): - upd(k.get_bld_sig()) - return self.m.digest() - - def are_implicit_nodes_ready(self): - """ - For each node returned by the scanner, see if there is a task that creates it, - and infer the build order - - This has a low performance impact on null builds (1.86s->1.66s) thanks to caching (28s->1.86s) - """ - bld = self.generator.bld - try: - cache = bld.dct_implicit_nodes - except AttributeError: - bld.dct_implicit_nodes = cache = {} - - # one cache per build group - try: - dct = cache[bld.current_group] - except KeyError: - dct = cache[bld.current_group] = {} - for tsk in bld.cur_tasks: - for x in tsk.outputs: - dct[x] = tsk - - modified = False - for x in bld.node_deps.get(self.uid(), []): - if x in dct: - self.run_after.add(dct[x]) - modified = True - - if modified: - for tsk in self.run_after: - if not tsk.hasrun: - #print "task is not ready..." - raise Errors.TaskNotReady('not ready') -if sys.hexversion > 0x3000000: - def uid(self): - try: - return self.uid_ - except AttributeError: - m = Utils.md5(self.__class__.__name__.encode('latin-1', 'xmlcharrefreplace')) - up = m.update - for x in self.inputs + self.outputs: - up(x.abspath().encode('latin-1', 'xmlcharrefreplace')) - self.uid_ = m.digest() - return self.uid_ - uid.__doc__ = Task.uid.__doc__ - Task.uid = uid - -def is_before(t1, t2): - """ - Returns a non-zero value if task t1 is to be executed before task t2:: - - t1.ext_out = '.h' - t2.ext_in = '.h' - t2.after = ['t1'] - t1.before = ['t2'] - waflib.Task.is_before(t1, t2) # True - - :param t1: Task object - :type t1: :py:class:`waflib.Task.Task` - :param t2: Task object - :type t2: :py:class:`waflib.Task.Task` - """ - to_list = Utils.to_list - for k in to_list(t2.ext_in): - if k in to_list(t1.ext_out): - return 1 - - if t1.__class__.__name__ in to_list(t2.after): - return 1 - - if t2.__class__.__name__ in to_list(t1.before): - return 1 - - return 0 - -def set_file_constraints(tasks): - """ - Updates the ``run_after`` attribute of all tasks based on the task inputs and outputs - - :param tasks: tasks - :type tasks: list of :py:class:`waflib.Task.Task` - """ - ins = Utils.defaultdict(set) - outs = Utils.defaultdict(set) - for x in tasks: - for a in x.inputs: - ins[a].add(x) - for a in x.dep_nodes: - ins[a].add(x) - for a in x.outputs: - outs[a].add(x) - - links = set(ins.keys()).intersection(outs.keys()) - for k in links: - for a in ins[k]: - a.run_after.update(outs[k]) - - -class TaskGroup(object): - """ - Wrap nxm task order constraints into a single object - to prevent the creation of large list/set objects - - This is an optimization - """ - def __init__(self, prev, next): - self.prev = prev - self.next = next - self.done = False - - def get_hasrun(self): - for k in self.prev: - if not k.hasrun: - return NOT_RUN - return SUCCESS - - hasrun = property(get_hasrun, None) - -def set_precedence_constraints(tasks): - """ - Updates the ``run_after`` attribute of all tasks based on the after/before/ext_out/ext_in attributes - - :param tasks: tasks - :type tasks: list of :py:class:`waflib.Task.Task` - """ - cstr_groups = Utils.defaultdict(list) - for x in tasks: - h = x.hash_constraints() - cstr_groups[h].append(x) - - keys = list(cstr_groups.keys()) - maxi = len(keys) - - # this list should be short - for i in range(maxi): - t1 = cstr_groups[keys[i]][0] - for j in range(i + 1, maxi): - t2 = cstr_groups[keys[j]][0] - - # add the constraints based on the comparisons - if is_before(t1, t2): - a = i - b = j - elif is_before(t2, t1): - a = j - b = i - else: - continue - - a = cstr_groups[keys[a]] - b = cstr_groups[keys[b]] - - if len(a) < 2 or len(b) < 2: - for x in b: - x.run_after.update(a) - else: - group = TaskGroup(set(a), set(b)) - for x in b: - x.run_after.add(group) - -def funex(c): - """ - Compiles a scriptlet expression into a Python function - - :param c: function to compile - :type c: string - :return: the function 'f' declared in the input string - :rtype: function - """ - dc = {} - exec(c, dc) - return dc['f'] - -re_cond = re.compile(r'(?P<var>\w+)|(?P<or>\|)|(?P<and>&)') -re_novar = re.compile(r'^(SRC|TGT)\W+.*?$') -reg_act = re.compile(r'(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})', re.M) -def compile_fun_shell(line): - """ - Creates a compiled function to execute a process through a sub-shell - """ - extr = [] - def repl(match): - g = match.group - if g('dollar'): - return "$" - elif g('backslash'): - return '\\\\' - elif g('subst'): - extr.append((g('var'), g('code'))) - return "%s" - return None - line = reg_act.sub(repl, line) or line - dvars = [] - def add_dvar(x): - if x not in dvars: - dvars.append(x) - - def replc(m): - # performs substitutions and populates dvars - if m.group('and'): - return ' and ' - elif m.group('or'): - return ' or ' - else: - x = m.group('var') - add_dvar(x) - return 'env[%r]' % x - - parm = [] - app = parm.append - for (var, meth) in extr: - if var == 'SRC': - if meth: - app('tsk.inputs%s' % meth) - else: - app('" ".join([a.path_from(cwdx) for a in tsk.inputs])') - elif var == 'TGT': - if meth: - app('tsk.outputs%s' % meth) - else: - app('" ".join([a.path_from(cwdx) for a in tsk.outputs])') - elif meth: - if meth.startswith(':'): - add_dvar(var) - m = meth[1:] - if m == 'SRC': - m = '[a.path_from(cwdx) for a in tsk.inputs]' - elif m == 'TGT': - m = '[a.path_from(cwdx) for a in tsk.outputs]' - elif re_novar.match(m): - m = '[tsk.inputs%s]' % m[3:] - elif re_novar.match(m): - m = '[tsk.outputs%s]' % m[3:] - else: - add_dvar(m) - if m[:3] not in ('tsk', 'gen', 'bld'): - m = '%r' % m - app('" ".join(tsk.colon(%r, %s))' % (var, m)) - elif meth.startswith('?'): - # In A?B|C output env.A if one of env.B or env.C is non-empty - expr = re_cond.sub(replc, meth[1:]) - app('p(%r) if (%s) else ""' % (var, expr)) - else: - call = '%s%s' % (var, meth) - add_dvar(call) - app(call) - else: - add_dvar(var) - app("p('%s')" % var) - if parm: - parm = "%% (%s) " % (',\n\t\t'.join(parm)) - else: - parm = '' - - c = COMPILE_TEMPLATE_SHELL % (line, parm) - Logs.debug('action: %s', c.strip().splitlines()) - return (funex(c), dvars) - -reg_act_noshell = re.compile(r"(?P<space>\s+)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})|(?P<text>([^$ \t\n\r\f\v]|\$\$)+)", re.M) -def compile_fun_noshell(line): - """ - Creates a compiled function to execute a process without a sub-shell - """ - buf = [] - dvars = [] - merge = False - app = buf.append - - def add_dvar(x): - if x not in dvars: - dvars.append(x) - - def replc(m): - # performs substitutions and populates dvars - if m.group('and'): - return ' and ' - elif m.group('or'): - return ' or ' - else: - x = m.group('var') - add_dvar(x) - return 'env[%r]' % x - - for m in reg_act_noshell.finditer(line): - if m.group('space'): - merge = False - continue - elif m.group('text'): - app('[%r]' % m.group('text').replace('$$', '$')) - elif m.group('subst'): - var = m.group('var') - code = m.group('code') - if var == 'SRC': - if code: - app('[tsk.inputs%s]' % code) - else: - app('[a.path_from(cwdx) for a in tsk.inputs]') - elif var == 'TGT': - if code: - app('[tsk.outputs%s]' % code) - else: - app('[a.path_from(cwdx) for a in tsk.outputs]') - elif code: - if code.startswith(':'): - # a composed variable ${FOO:OUT} - add_dvar(var) - m = code[1:] - if m == 'SRC': - m = '[a.path_from(cwdx) for a in tsk.inputs]' - elif m == 'TGT': - m = '[a.path_from(cwdx) for a in tsk.outputs]' - elif re_novar.match(m): - m = '[tsk.inputs%s]' % m[3:] - elif re_novar.match(m): - m = '[tsk.outputs%s]' % m[3:] - else: - add_dvar(m) - if m[:3] not in ('tsk', 'gen', 'bld'): - m = '%r' % m - app('tsk.colon(%r, %s)' % (var, m)) - elif code.startswith('?'): - # In A?B|C output env.A if one of env.B or env.C is non-empty - expr = re_cond.sub(replc, code[1:]) - app('to_list(env[%r] if (%s) else [])' % (var, expr)) - else: - # plain code such as ${tsk.inputs[0].abspath()} - call = '%s%s' % (var, code) - add_dvar(call) - app('to_list(%s)' % call) - else: - # a plain variable such as # a plain variable like ${AR} - app('to_list(env[%r])' % var) - add_dvar(var) - if merge: - tmp = 'merge(%s, %s)' % (buf[-2], buf[-1]) - del buf[-1] - buf[-1] = tmp - merge = True # next turn - - buf = ['lst.extend(%s)' % x for x in buf] - fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf) - Logs.debug('action: %s', fun.strip().splitlines()) - return (funex(fun), dvars) - -def compile_fun(line, shell=False): - """ - Parses a string expression such as '${CC} ${SRC} -o ${TGT}' and returns a pair containing: - - * The function created (compiled) for use as :py:meth:`waflib.Task.Task.run` - * The list of variables that must cause rebuilds when *env* data is modified - - for example:: - - from waflib.Task import compile_fun - compile_fun('cxx', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}') - - def build(bld): - bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"') - - The env variables (CXX, ..) on the task must not hold dicts so as to preserve a consistent order. - The reserved keywords ``TGT`` and ``SRC`` represent the task input and output nodes - - """ - if isinstance(line, str): - if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0: - shell = True - else: - dvars_lst = [] - funs_lst = [] - for x in line: - if isinstance(x, str): - fun, dvars = compile_fun(x, shell) - dvars_lst += dvars - funs_lst.append(fun) - else: - # assume a function to let through - funs_lst.append(x) - def composed_fun(task): - for x in funs_lst: - ret = x(task) - if ret: - return ret - return None - return composed_fun, dvars_lst - if shell: - return compile_fun_shell(line) - else: - return compile_fun_noshell(line) - -def compile_sig_vars(vars): - """ - This method produces a sig_vars method suitable for subclasses that provide - scriptlet code in their run_str code. - If no such method can be created, this method returns None. - - The purpose of the sig_vars method returned is to ensures - that rebuilds occur whenever the contents of the expression changes. - This is the case B below:: - - import time - # case A: regular variables - tg = bld(rule='echo ${FOO}') - tg.env.FOO = '%s' % time.time() - # case B - bld(rule='echo ${gen.foo}', foo='%s' % time.time()) - - :param vars: env variables such as CXXFLAGS or gen.foo - :type vars: list of string - :return: A sig_vars method relevant for dependencies if adequate, else None - :rtype: A function, or None in most cases - """ - buf = [] - for x in sorted(vars): - if x[:3] in ('tsk', 'gen', 'bld'): - buf.append('buf.append(%s)' % x) - if buf: - return funex(COMPILE_TEMPLATE_SIG_VARS % '\n\t'.join(buf)) - return None - -def task_factory(name, func=None, vars=None, color='GREEN', ext_in=[], ext_out=[], before=[], after=[], shell=False, scan=None): - """ - Returns a new task subclass with the function ``run`` compiled from the line given. - - :param func: method run - :type func: string or function - :param vars: list of variables to hash - :type vars: list of string - :param color: color to use - :type color: string - :param shell: when *func* is a string, enable/disable the use of the shell - :type shell: bool - :param scan: method scan - :type scan: function - :rtype: :py:class:`waflib.Task.Task` - """ - - params = { - 'vars': vars or [], # function arguments are static, and this one may be modified by the class - 'color': color, - 'name': name, - 'shell': shell, - 'scan': scan, - } - - if isinstance(func, str) or isinstance(func, tuple): - params['run_str'] = func - else: - params['run'] = func - - cls = type(Task)(name, (Task,), params) - classes[name] = cls - - if ext_in: - cls.ext_in = Utils.to_list(ext_in) - if ext_out: - cls.ext_out = Utils.to_list(ext_out) - if before: - cls.before = Utils.to_list(before) - if after: - cls.after = Utils.to_list(after) - - return cls - -def deep_inputs(cls): - """ - Task class decorator to enable rebuilds on input files task signatures - """ - def sig_explicit_deps(self): - Task.sig_explicit_deps(self) - Task.sig_deep_inputs(self) - cls.sig_explicit_deps = sig_explicit_deps - return cls - -TaskBase = Task -"Provided for compatibility reasons, TaskBase should not be used" - -class TaskSemaphore(object): - """ - Task semaphores provide a simple and efficient way of throttling the amount of - a particular task to run concurrently. The throttling value is capped - by the amount of maximum jobs, so for example, a `TaskSemaphore(10)` - has no effect in a `-j2` build. - - Task semaphores are typically specified on the task class level:: - - class compile(waflib.Task.Task): - semaphore = waflib.Task.TaskSemaphore(2) - run_str = 'touch ${TGT}' - - Task semaphores are meant to be used by the build scheduler in the main - thread, so there are no guarantees of thread safety. - """ - def __init__(self, num): - """ - :param num: maximum value of concurrent tasks - :type num: int - """ - self.num = num - self.locking = set() - self.waiting = set() - - def is_locked(self): - """Returns True if this semaphore cannot be acquired by more tasks""" - return len(self.locking) >= self.num - - def acquire(self, tsk): - """ - Mark the semaphore as used by the given task (not re-entrant). - - :param tsk: task object - :type tsk: :py:class:`waflib.Task.Task` - :raises: :py:class:`IndexError` in case the resource is already acquired - """ - if self.is_locked(): - raise IndexError('Cannot lock more %r' % self.locking) - self.locking.add(tsk) - - def release(self, tsk): - """ - Mark the semaphore as unused by the given task. - - :param tsk: task object - :type tsk: :py:class:`waflib.Task.Task` - :raises: :py:class:`KeyError` in case the resource is not acquired by the task - """ - self.locking.remove(tsk) - diff --git a/waflib/TaskGen.py b/waflib/TaskGen.py deleted file mode 100644 index 532b7d5..0000000 --- a/waflib/TaskGen.py +++ /dev/null @@ -1,917 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -""" -Task generators - -The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code) -The instances can have various parameters, but the creation of task nodes (Task.py) -is deferred. To achieve this, various methods are called from the method "apply" -""" - -import copy, re, os, functools -from waflib import Task, Utils, Logs, Errors, ConfigSet, Node - -feats = Utils.defaultdict(set) -"""remember the methods declaring features""" - -HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh'] - -class task_gen(object): - """ - Instances of this class create :py:class:`waflib.Task.Task` when - calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread. - A few notes: - - * The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..) - * The 'features' are used to add methods to self.meths and then execute them - * The attribute 'path' is a node representing the location of the task generator - * The tasks created are added to the attribute *tasks* - * The attribute 'idx' is a counter of task generators in the same path - """ - - mappings = Utils.ordered_iter_dict() - """Mappings are global file extension mappings that are retrieved in the order of definition""" - - prec = Utils.defaultdict(set) - """Dict that holds the precedence execution rules for task generator methods""" - - def __init__(self, *k, **kw): - """ - Task generator objects predefine various attributes (source, target) for possible - processing by process_rule (make-like rules) or process_source (extensions, misc methods) - - Tasks are stored on the attribute 'tasks'. They are created by calling methods - listed in ``self.meths`` or referenced in the attribute ``features`` - A topological sort is performed to execute the methods in correct order. - - The extra key/value elements passed in ``kw`` are set as attributes - """ - self.source = [] - self.target = '' - - self.meths = [] - """ - List of method names to execute (internal) - """ - - self.features = [] - """ - List of feature names for bringing new methods in - """ - - self.tasks = [] - """ - Tasks created are added to this list - """ - - if not 'bld' in kw: - # task generators without a build context :-/ - self.env = ConfigSet.ConfigSet() - self.idx = 0 - self.path = None - else: - self.bld = kw['bld'] - self.env = self.bld.env.derive() - self.path = kw.get('path', self.bld.path) # by default, emulate chdir when reading scripts - - # Provide a unique index per folder - # This is part of a measure to prevent output file name collisions - path = self.path.abspath() - try: - self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1 - except AttributeError: - self.bld.idx = {} - self.idx = self.bld.idx[path] = 1 - - # Record the global task generator count - try: - self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1 - except AttributeError: - self.tg_idx_count = self.bld.tg_idx_count = 1 - - for key, val in kw.items(): - setattr(self, key, val) - - def __str__(self): - """Debugging helper""" - return "<task_gen %r declared in %s>" % (self.name, self.path.abspath()) - - def __repr__(self): - """Debugging helper""" - lst = [] - for x in self.__dict__: - if x not in ('env', 'bld', 'compiled_tasks', 'tasks'): - lst.append("%s=%s" % (x, repr(getattr(self, x)))) - return "bld(%s) in %s" % (", ".join(lst), self.path.abspath()) - - def get_cwd(self): - """ - Current working directory for the task generator, defaults to the build directory. - This is still used in a few places but it should disappear at some point as the classes - define their own working directory. - - :rtype: :py:class:`waflib.Node.Node` - """ - return self.bld.bldnode - - def get_name(self): - """ - If the attribute ``name`` is not set on the instance, - the name is computed from the target name:: - - def build(bld): - x = bld(name='foo') - x.get_name() # foo - y = bld(target='bar') - y.get_name() # bar - - :rtype: string - :return: name of this task generator - """ - try: - return self._name - except AttributeError: - if isinstance(self.target, list): - lst = [str(x) for x in self.target] - name = self._name = ','.join(lst) - else: - name = self._name = str(self.target) - return name - def set_name(self, name): - self._name = name - - name = property(get_name, set_name) - - def to_list(self, val): - """ - Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list` - - :type val: string or list of string - :param val: input to return as a list - :rtype: list - """ - if isinstance(val, str): - return val.split() - else: - return val - - def post(self): - """ - Creates tasks for this task generators. The following operations are performed: - - #. The body of this method is called only once and sets the attribute ``posted`` - #. The attribute ``features`` is used to add more methods in ``self.meths`` - #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec` - #. The methods are then executed in order - #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` - """ - if getattr(self, 'posted', None): - return False - self.posted = True - - keys = set(self.meths) - keys.update(feats['*']) - - # add the methods listed in the features - self.features = Utils.to_list(self.features) - for x in self.features: - st = feats[x] - if st: - keys.update(st) - elif not x in Task.classes: - Logs.warn('feature %r does not exist - bind at least one method to it?', x) - - # copy the precedence table - prec = {} - prec_tbl = self.prec - for x in prec_tbl: - if x in keys: - prec[x] = prec_tbl[x] - - # elements disconnected - tmp = [] - for a in keys: - for x in prec.values(): - if a in x: - break - else: - tmp.append(a) - - tmp.sort(reverse=True) - - # topological sort - out = [] - while tmp: - e = tmp.pop() - if e in keys: - out.append(e) - try: - nlst = prec[e] - except KeyError: - pass - else: - del prec[e] - for x in nlst: - for y in prec: - if x in prec[y]: - break - else: - tmp.append(x) - tmp.sort(reverse=True) - - if prec: - buf = ['Cycle detected in the method execution:'] - for k, v in prec.items(): - buf.append('- %s after %s' % (k, [x for x in v if x in prec])) - raise Errors.WafError('\n'.join(buf)) - self.meths = out - - # then we run the methods in order - Logs.debug('task_gen: posting %s %d', self, id(self)) - for x in out: - try: - v = getattr(self, x) - except AttributeError: - raise Errors.WafError('%r is not a valid task generator method' % x) - Logs.debug('task_gen: -> %s (%d)', x, id(self)) - v() - - Logs.debug('task_gen: posted %s', self.name) - return True - - def get_hook(self, node): - """ - Returns the ``@extension`` method to call for a Node of a particular extension. - - :param node: Input file to process - :type node: :py:class:`waflib.Tools.Node.Node` - :return: A method able to process the input node by looking at the extension - :rtype: function - """ - name = node.name - for k in self.mappings: - try: - if name.endswith(k): - return self.mappings[k] - except TypeError: - # regexps objects - if k.match(name): - return self.mappings[k] - keys = list(self.mappings.keys()) - raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)" % (node, keys)) - - def create_task(self, name, src=None, tgt=None, **kw): - """ - Creates task instances. - - :param name: task class name - :type name: string - :param src: input nodes - :type src: list of :py:class:`waflib.Tools.Node.Node` - :param tgt: output nodes - :type tgt: list of :py:class:`waflib.Tools.Node.Node` - :return: A task object - :rtype: :py:class:`waflib.Task.Task` - """ - task = Task.classes[name](env=self.env.derive(), generator=self) - if src: - task.set_inputs(src) - if tgt: - task.set_outputs(tgt) - task.__dict__.update(kw) - self.tasks.append(task) - return task - - def clone(self, env): - """ - Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the - it does not create the same output files as the original, or the same files may - be compiled several times. - - :param env: A configuration set - :type env: :py:class:`waflib.ConfigSet.ConfigSet` - :return: A copy - :rtype: :py:class:`waflib.TaskGen.task_gen` - """ - newobj = self.bld() - for x in self.__dict__: - if x in ('env', 'bld'): - continue - elif x in ('path', 'features'): - setattr(newobj, x, getattr(self, x)) - else: - setattr(newobj, x, copy.copy(getattr(self, x))) - - newobj.posted = False - if isinstance(env, str): - newobj.env = self.bld.all_envs[env].derive() - else: - newobj.env = env.derive() - - return newobj - -def declare_chain(name='', rule=None, reentrant=None, color='BLUE', - ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False): - """ - Creates a new mapping and a task class for processing files by extension. - See Tools/flex.py for an example. - - :param name: name for the task class - :type name: string - :param rule: function to execute or string to be compiled in a function - :type rule: string or function - :param reentrant: re-inject the output file in the process (done automatically, set to 0 to disable) - :type reentrant: int - :param color: color for the task output - :type color: string - :param ext_in: execute the task only after the files of such extensions are created - :type ext_in: list of string - :param ext_out: execute the task only before files of such extensions are processed - :type ext_out: list of string - :param before: execute instances of this task before classes of the given names - :type before: list of string - :param after: execute instances of this task after classes of the given names - :type after: list of string - :param decider: if present, function that returns a list of output file extensions (overrides ext_out for output files, but not for the build order) - :type decider: function - :param scan: scanner function for the task - :type scan: function - :param install_path: installation path for the output nodes - :type install_path: string - """ - ext_in = Utils.to_list(ext_in) - ext_out = Utils.to_list(ext_out) - if not name: - name = rule - cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell) - - def x_file(self, node): - if ext_in: - _ext_in = ext_in[0] - - tsk = self.create_task(name, node) - cnt = 0 - - ext = decider(self, node) if decider else cls.ext_out - for x in ext: - k = node.change_ext(x, ext_in=_ext_in) - tsk.outputs.append(k) - - if reentrant != None: - if cnt < int(reentrant): - self.source.append(k) - else: - # reinject downstream files into the build - for y in self.mappings: # ~ nfile * nextensions :-/ - if k.name.endswith(y): - self.source.append(k) - break - cnt += 1 - - if install_path: - self.install_task = self.add_install_files(install_to=install_path, install_from=tsk.outputs) - return tsk - - for x in cls.ext_in: - task_gen.mappings[x] = x_file - return x_file - -def taskgen_method(func): - """ - Decorator that registers method as a task generator method. - The function must accept a task generator as first parameter:: - - from waflib.TaskGen import taskgen_method - @taskgen_method - def mymethod(self): - pass - - :param func: task generator method to add - :type func: function - :rtype: function - """ - setattr(task_gen, func.__name__, func) - return func - -def feature(*k): - """ - Decorator that registers a task generator method that will be executed when the - object attribute ``feature`` contains the corresponding key(s):: - - from waflib.Task import feature - @feature('myfeature') - def myfunction(self): - print('that is my feature!') - def build(bld): - bld(features='myfeature') - - :param k: feature names - :type k: list of string - """ - def deco(func): - setattr(task_gen, func.__name__, func) - for name in k: - feats[name].update([func.__name__]) - return func - return deco - -def before_method(*k): - """ - Decorator that registera task generator method which will be executed - before the functions of given name(s):: - - from waflib.TaskGen import feature, before - @feature('myfeature') - @before_method('fun2') - def fun1(self): - print('feature 1!') - @feature('myfeature') - def fun2(self): - print('feature 2!') - def build(bld): - bld(features='myfeature') - - :param k: method names - :type k: list of string - """ - def deco(func): - setattr(task_gen, func.__name__, func) - for fun_name in k: - task_gen.prec[func.__name__].add(fun_name) - return func - return deco -before = before_method - -def after_method(*k): - """ - Decorator that registers a task generator method which will be executed - after the functions of given name(s):: - - from waflib.TaskGen import feature, after - @feature('myfeature') - @after_method('fun2') - def fun1(self): - print('feature 1!') - @feature('myfeature') - def fun2(self): - print('feature 2!') - def build(bld): - bld(features='myfeature') - - :param k: method names - :type k: list of string - """ - def deco(func): - setattr(task_gen, func.__name__, func) - for fun_name in k: - task_gen.prec[fun_name].add(func.__name__) - return func - return deco -after = after_method - -def extension(*k): - """ - Decorator that registers a task generator method which will be invoked during - the processing of source files for the extension given:: - - from waflib import Task - class mytask(Task): - run_str = 'cp ${SRC} ${TGT}' - @extension('.moo') - def create_maa_file(self, node): - self.create_task('mytask', node, node.change_ext('.maa')) - def build(bld): - bld(source='foo.moo') - """ - def deco(func): - setattr(task_gen, func.__name__, func) - for x in k: - task_gen.mappings[x] = func - return func - return deco - -@taskgen_method -def to_nodes(self, lst, path=None): - """ - Flatten the input list of string/nodes/lists into a list of nodes. - - It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`. - It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`: - - :param lst: input list - :type lst: list of string and nodes - :param path: path from which to search the nodes (by default, :py:attr:`waflib.TaskGen.task_gen.path`) - :type path: :py:class:`waflib.Tools.Node.Node` - :rtype: list of :py:class:`waflib.Tools.Node.Node` - """ - tmp = [] - path = path or self.path - find = path.find_resource - - if isinstance(lst, Node.Node): - lst = [lst] - - for x in Utils.to_list(lst): - if isinstance(x, str): - node = find(x) - elif hasattr(x, 'name'): - node = x - else: - tmp.extend(self.to_nodes(x)) - continue - if not node: - raise Errors.WafError('source not found: %r in %r' % (x, self)) - tmp.append(node) - return tmp - -@feature('*') -def process_source(self): - """ - Processes each element in the attribute ``source`` by extension. - - #. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first. - #. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension` - #. The method is retrieved through :py:meth:`waflib.TaskGen.task_gen.get_hook` - #. When called, the methods may modify self.source to append more source to process - #. The mappings can map an extension or a filename (see the code below) - """ - self.source = self.to_nodes(getattr(self, 'source', [])) - for node in self.source: - self.get_hook(node)(self, node) - -@feature('*') -@before_method('process_source') -def process_rule(self): - """ - Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled:: - - def build(bld): - bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt') - - Main attributes processed: - - * rule: command to execute, it can be a tuple of strings for multiple commands - * chmod: permissions for the resulting files (integer value such as Utils.O755) - * shell: set to False to execute the command directly (default is True to use a shell) - * scan: scanner function - * vars: list of variables to trigger rebuilds, such as CFLAGS - * cls_str: string to display when executing the task - * cls_keyword: label to display when executing the task - * cache_rule: by default, try to re-use similar classes, set to False to disable - * source: list of Node or string objects representing the source files required by this task - * target: list of Node or string objects representing the files that this task creates - * cwd: current working directory (Node or string) - * stdout: standard output, set to None to prevent waf from capturing the text - * stderr: standard error, set to None to prevent waf from capturing the text - * timeout: timeout for command execution (Python 3) - * always: whether to always run the command (False by default) - * deep_inputs: whether the task must depend on the input file tasks too (False by default) - """ - if not getattr(self, 'rule', None): - return - - # create the task class - name = str(getattr(self, 'name', None) or self.target or getattr(self.rule, '__name__', self.rule)) - - # or we can put the class in a cache for performance reasons - try: - cache = self.bld.cache_rule_attr - except AttributeError: - cache = self.bld.cache_rule_attr = {} - - chmod = getattr(self, 'chmod', None) - shell = getattr(self, 'shell', True) - color = getattr(self, 'color', 'BLUE') - scan = getattr(self, 'scan', None) - _vars = getattr(self, 'vars', []) - cls_str = getattr(self, 'cls_str', None) - cls_keyword = getattr(self, 'cls_keyword', None) - use_cache = getattr(self, 'cache_rule', 'True') - deep_inputs = getattr(self, 'deep_inputs', False) - - scan_val = has_deps = hasattr(self, 'deps') - if scan: - scan_val = id(scan) - - key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str, cls_keyword, scan_val, _vars, deep_inputs)) - - cls = None - if use_cache: - try: - cls = cache[key] - except KeyError: - pass - if not cls: - rule = self.rule - if chmod is not None: - def chmod_fun(tsk): - for x in tsk.outputs: - os.chmod(x.abspath(), tsk.generator.chmod) - if isinstance(rule, tuple): - rule = list(rule) - rule.append(chmod_fun) - rule = tuple(rule) - else: - rule = (rule, chmod_fun) - - cls = Task.task_factory(name, rule, _vars, shell=shell, color=color) - - if cls_str: - setattr(cls, '__str__', self.cls_str) - - if cls_keyword: - setattr(cls, 'keyword', self.cls_keyword) - - if deep_inputs: - Task.deep_inputs(cls) - - if scan: - cls.scan = self.scan - elif has_deps: - def scan(self): - nodes = [] - for x in self.generator.to_list(getattr(self.generator, 'deps', None)): - node = self.generator.path.find_resource(x) - if not node: - self.generator.bld.fatal('Could not find %r (was it declared?)' % x) - nodes.append(node) - return [nodes, []] - cls.scan = scan - - if use_cache: - cache[key] = cls - - # now create one instance - tsk = self.create_task(name) - - for x in ('after', 'before', 'ext_in', 'ext_out'): - setattr(tsk, x, getattr(self, x, [])) - - if hasattr(self, 'stdout'): - tsk.stdout = self.stdout - - if hasattr(self, 'stderr'): - tsk.stderr = self.stderr - - if getattr(self, 'timeout', None): - tsk.timeout = self.timeout - - if getattr(self, 'always', None): - tsk.always_run = True - - if getattr(self, 'target', None): - if isinstance(self.target, str): - self.target = self.target.split() - if not isinstance(self.target, list): - self.target = [self.target] - for x in self.target: - if isinstance(x, str): - tsk.outputs.append(self.path.find_or_declare(x)) - else: - x.parent.mkdir() # if a node was given, create the required folders - tsk.outputs.append(x) - if getattr(self, 'install_path', None): - self.install_task = self.add_install_files(install_to=self.install_path, - install_from=tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644)) - - if getattr(self, 'source', None): - tsk.inputs = self.to_nodes(self.source) - # bypass the execution of process_source by setting the source to an empty list - self.source = [] - - if getattr(self, 'cwd', None): - tsk.cwd = self.cwd - - if isinstance(tsk.run, functools.partial): - # Python documentation says: "partial objects defined in classes - # behave like static methods and do not transform into bound - # methods during instance attribute look-up." - tsk.run = functools.partial(tsk.run, tsk) - -@feature('seq') -def sequence_order(self): - """ - Adds a strict sequential constraint between the tasks generated by task generators. - It works because task generators are posted in order. - It will not post objects which belong to other folders. - - Example:: - - bld(features='javac seq') - bld(features='jar seq') - - To start a new sequence, set the attribute seq_start, for example:: - - obj = bld(features='seq') - obj.seq_start = True - - Note that the method is executed in last position. This is more an - example than a widely-used solution. - """ - if self.meths and self.meths[-1] != 'sequence_order': - self.meths.append('sequence_order') - return - - if getattr(self, 'seq_start', None): - return - - # all the tasks previously declared must be run before these - if getattr(self.bld, 'prev', None): - self.bld.prev.post() - for x in self.bld.prev.tasks: - for y in self.tasks: - y.set_run_after(x) - - self.bld.prev = self - - -re_m4 = re.compile(r'@(\w+)@', re.M) - -class subst_pc(Task.Task): - """ - Creates *.pc* files from *.pc.in*. The task is executed whenever an input variable used - in the substitution changes. - """ - - def force_permissions(self): - "Private for the time being, we will probably refactor this into run_str=[run1,chmod]" - if getattr(self.generator, 'chmod', None): - for x in self.outputs: - os.chmod(x.abspath(), self.generator.chmod) - - def run(self): - "Substitutes variables in a .in file" - - if getattr(self.generator, 'is_copy', None): - for i, x in enumerate(self.outputs): - x.write(self.inputs[i].read('rb'), 'wb') - stat = os.stat(self.inputs[i].abspath()) # Preserve mtime of the copy - os.utime(self.outputs[i].abspath(), (stat.st_atime, stat.st_mtime)) - self.force_permissions() - return None - - if getattr(self.generator, 'fun', None): - ret = self.generator.fun(self) - if not ret: - self.force_permissions() - return ret - - code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'latin-1')) - if getattr(self.generator, 'subst_fun', None): - code = self.generator.subst_fun(self, code) - if code is not None: - self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1')) - self.force_permissions() - return None - - # replace all % by %% to prevent errors by % signs - code = code.replace('%', '%%') - - # extract the vars foo into lst and replace @foo@ by %(foo)s - lst = [] - def repl(match): - g = match.group - if g(1): - lst.append(g(1)) - return "%%(%s)s" % g(1) - return '' - code = getattr(self.generator, 're_m4', re_m4).sub(repl, code) - - try: - d = self.generator.dct - except AttributeError: - d = {} - for x in lst: - tmp = getattr(self.generator, x, '') or self.env[x] or self.env[x.upper()] - try: - tmp = ''.join(tmp) - except TypeError: - tmp = str(tmp) - d[x] = tmp - - code = code % d - self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1')) - self.generator.bld.raw_deps[self.uid()] = lst - - # make sure the signature is updated - try: - delattr(self, 'cache_sig') - except AttributeError: - pass - - self.force_permissions() - - def sig_vars(self): - """ - Compute a hash (signature) of the variables used in the substitution - """ - bld = self.generator.bld - env = self.env - upd = self.m.update - - if getattr(self.generator, 'fun', None): - upd(Utils.h_fun(self.generator.fun).encode()) - if getattr(self.generator, 'subst_fun', None): - upd(Utils.h_fun(self.generator.subst_fun).encode()) - - # raw_deps: persistent custom values returned by the scanner - vars = self.generator.bld.raw_deps.get(self.uid(), []) - - # hash both env vars and task generator attributes - act_sig = bld.hash_env_vars(env, vars) - upd(act_sig) - - lst = [getattr(self.generator, x, '') for x in vars] - upd(Utils.h_list(lst)) - - return self.m.digest() - -@extension('.pc.in') -def add_pcfile(self, node): - """ - Processes *.pc.in* files to *.pc*. Installs the results to ``${PREFIX}/lib/pkgconfig/`` by default - - def build(bld): - bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/') - """ - tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in')) - self.install_task = self.add_install_files( - install_to=getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), install_from=tsk.outputs) - -class subst(subst_pc): - pass - -@feature('subst') -@before_method('process_source', 'process_rule') -def process_subst(self): - """ - Defines a transformation that substitutes the contents of *source* files to *target* files:: - - def build(bld): - bld( - features='subst', - source='foo.c.in', - target='foo.c', - install_path='${LIBDIR}/pkgconfig', - VAR = 'val' - ) - - The input files are supposed to contain macros of the form *@VAR@*, where *VAR* is an argument - of the task generator object. - - This method overrides the processing by :py:meth:`waflib.TaskGen.process_source`. - """ - - src = Utils.to_list(getattr(self, 'source', [])) - if isinstance(src, Node.Node): - src = [src] - tgt = Utils.to_list(getattr(self, 'target', [])) - if isinstance(tgt, Node.Node): - tgt = [tgt] - if len(src) != len(tgt): - raise Errors.WafError('invalid number of source/target for %r' % self) - - for x, y in zip(src, tgt): - if not x or not y: - raise Errors.WafError('null source or target for %r' % self) - a, b = None, None - - if isinstance(x, str) and isinstance(y, str) and x == y: - a = self.path.find_node(x) - b = self.path.get_bld().make_node(y) - if not os.path.isfile(b.abspath()): - b.parent.mkdir() - else: - if isinstance(x, str): - a = self.path.find_resource(x) - elif isinstance(x, Node.Node): - a = x - if isinstance(y, str): - b = self.path.find_or_declare(y) - elif isinstance(y, Node.Node): - b = y - - if not a: - raise Errors.WafError('could not find %r for %r' % (x, self)) - - tsk = self.create_task('subst', a, b) - for k in ('after', 'before', 'ext_in', 'ext_out'): - val = getattr(self, k, None) - if val: - setattr(tsk, k, val) - - # paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies - for xt in HEADER_EXTS: - if b.name.endswith(xt): - tsk.ext_in = tsk.ext_in + ['.h'] - break - - inst_to = getattr(self, 'install_path', None) - if inst_to: - self.install_task = self.add_install_files(install_to=inst_to, - install_from=b, chmod=getattr(self, 'chmod', Utils.O644)) - - self.source = [] - diff --git a/waflib/Tools/__init__.py b/waflib/Tools/__init__.py deleted file mode 100644 index 079df35..0000000 --- a/waflib/Tools/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) diff --git a/waflib/Tools/ar.py b/waflib/Tools/ar.py deleted file mode 100644 index b39b645..0000000 --- a/waflib/Tools/ar.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2018 (ita) -# Ralf Habacker, 2006 (rh) - -""" -The **ar** program creates static libraries. This tool is almost always loaded -from others (C, C++, D, etc) for static library support. -""" - -from waflib.Configure import conf - -@conf -def find_ar(conf): - """Configuration helper used by C/C++ tools to enable the support for static libraries""" - conf.load('ar') - -def configure(conf): - """Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``""" - conf.find_program('ar', var='AR') - conf.add_os_flags('ARFLAGS') - if not conf.env.ARFLAGS: - conf.env.ARFLAGS = ['rcs'] - diff --git a/waflib/Tools/asm.py b/waflib/Tools/asm.py deleted file mode 100644 index b6f26fb..0000000 --- a/waflib/Tools/asm.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2008-2018 (ita) - -""" -Assembly support, used by tools such as gas and nasm - -To declare targets using assembly:: - - def configure(conf): - conf.load('gcc gas') - - def build(bld): - bld( - features='c cstlib asm', - source = 'test.S', - target = 'asmtest') - - bld( - features='asm asmprogram', - source = 'test.S', - target = 'asmtest') - -Support for pure asm programs and libraries should also work:: - - def configure(conf): - conf.load('nasm') - conf.find_program('ld', 'ASLINK') - - def build(bld): - bld( - features='asm asmprogram', - source = 'test.S', - target = 'asmtest') -""" - -from waflib import Task -from waflib.Tools.ccroot import link_task, stlink_task -from waflib.TaskGen import extension - -class asm(Task.Task): - """ - Compiles asm files by gas/nasm/yasm/... - """ - color = 'BLUE' - run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' - -@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP') -def asm_hook(self, node): - """ - Binds the asm extension to the asm task - - :param node: input file - :type node: :py:class:`waflib.Node.Node` - """ - return self.create_compiled_task('asm', node) - -class asmprogram(link_task): - "Links object files into a c program" - run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}' - ext_out = ['.bin'] - inst_to = '${BINDIR}' - -class asmshlib(asmprogram): - "Links object files into a c shared library" - inst_to = '${LIBDIR}' - -class asmstlib(stlink_task): - "Links object files into a c static library" - pass # do not remove - -def configure(conf): - conf.env.ASMPATH_ST = '-I%s' diff --git a/waflib/Tools/bison.py b/waflib/Tools/bison.py deleted file mode 100644 index eef56dc..0000000 --- a/waflib/Tools/bison.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# John O'Meara, 2006 -# Thomas Nagy 2009-2018 (ita) - -""" -The **bison** program is a code generator which creates C or C++ files. -The generated files are compiled into object files. -""" - -from waflib import Task -from waflib.TaskGen import extension - -class bison(Task.Task): - """Compiles bison files""" - color = 'BLUE' - run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}' - ext_out = ['.h'] # just to make sure - -@extension('.y', '.yc', '.yy') -def big_bison(self, node): - """ - Creates a bison task, which must be executed from the directory of the output file. - """ - has_h = '-d' in self.env.BISONFLAGS - - outs = [] - if node.name.endswith('.yc'): - outs.append(node.change_ext('.tab.cc')) - if has_h: - outs.append(node.change_ext('.tab.hh')) - else: - outs.append(node.change_ext('.tab.c')) - if has_h: - outs.append(node.change_ext('.tab.h')) - - tsk = self.create_task('bison', node, outs) - tsk.cwd = node.parent.get_bld() - - # and the c/cxx file must be compiled too - self.source.append(outs[0]) - -def configure(conf): - """ - Detects the *bison* program - """ - conf.find_program('bison', var='BISON') - conf.env.BISONFLAGS = ['-d'] - diff --git a/waflib/Tools/c.py b/waflib/Tools/c.py deleted file mode 100644 index effd6b6..0000000 --- a/waflib/Tools/c.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2018 (ita) - -"Base for c programs/libraries" - -from waflib import TaskGen, Task -from waflib.Tools import c_preproc -from waflib.Tools.ccroot import link_task, stlink_task - -@TaskGen.extension('.c') -def c_hook(self, node): - "Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances" - if not self.env.CC and self.env.CXX: - return self.create_compiled_task('cxx', node) - return self.create_compiled_task('c', node) - -class c(Task.Task): - "Compiles C files into object files" - run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}' - vars = ['CCDEPS'] # unused variable to depend on, just in case - ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] - scan = c_preproc.scan - -class cprogram(link_task): - "Links object files into c programs" - run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' - ext_out = ['.bin'] - vars = ['LINKDEPS'] - inst_to = '${BINDIR}' - -class cshlib(cprogram): - "Links object files into c shared libraries" - inst_to = '${LIBDIR}' - -class cstlib(stlink_task): - "Links object files into a c static libraries" - pass # do not remove - diff --git a/waflib/Tools/c_aliases.py b/waflib/Tools/c_aliases.py deleted file mode 100644 index c9d5369..0000000 --- a/waflib/Tools/c_aliases.py +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2015 (ita) - -"base for all c/c++ programs and libraries" - -from waflib import Utils, Errors -from waflib.Configure import conf - -def get_extensions(lst): - """ - Returns the file extensions for the list of files given as input - - :param lst: files to process - :list lst: list of string or :py:class:`waflib.Node.Node` - :return: list of file extensions - :rtype: list of string - """ - ret = [] - for x in Utils.to_list(lst): - if not isinstance(x, str): - x = x.name - ret.append(x[x.rfind('.') + 1:]) - return ret - -def sniff_features(**kw): - """ - Computes and returns the features required for a task generator by - looking at the file extensions. This aimed for C/C++ mainly:: - - snif_features(source=['foo.c', 'foo.cxx'], type='shlib') - # returns ['cxx', 'c', 'cxxshlib', 'cshlib'] - - :param source: source files to process - :type source: list of string or :py:class:`waflib.Node.Node` - :param type: object type in *program*, *shlib* or *stlib* - :type type: string - :return: the list of features for a task generator processing the source files - :rtype: list of string - """ - exts = get_extensions(kw['source']) - typ = kw['typ'] - feats = [] - - # watch the order, cxx will have the precedence - for x in 'cxx cpp c++ cc C'.split(): - if x in exts: - feats.append('cxx') - break - - if 'c' in exts or 'vala' in exts or 'gs' in exts: - feats.append('c') - - for x in 'f f90 F F90 for FOR'.split(): - if x in exts: - feats.append('fc') - break - - if 'd' in exts: - feats.append('d') - - if 'java' in exts: - feats.append('java') - return 'java' - - if typ in ('program', 'shlib', 'stlib'): - will_link = False - for x in feats: - if x in ('cxx', 'd', 'fc', 'c'): - feats.append(x + typ) - will_link = True - if not will_link and not kw.get('features', []): - raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw) - return feats - -def set_features(kw, typ): - """ - Inserts data in the input dict *kw* based on existing data and on the type of target - required (typ). - - :param kw: task generator parameters - :type kw: dict - :param typ: type of target - :type typ: string - """ - kw['typ'] = typ - kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw)) - -@conf -def program(bld, *k, **kw): - """ - Alias for creating programs by looking at the file extensions:: - - def build(bld): - bld.program(source='foo.c', target='app') - # equivalent to: - # bld(features='c cprogram', source='foo.c', target='app') - - """ - set_features(kw, 'program') - return bld(*k, **kw) - -@conf -def shlib(bld, *k, **kw): - """ - Alias for creating shared libraries by looking at the file extensions:: - - def build(bld): - bld.shlib(source='foo.c', target='app') - # equivalent to: - # bld(features='c cshlib', source='foo.c', target='app') - - """ - set_features(kw, 'shlib') - return bld(*k, **kw) - -@conf -def stlib(bld, *k, **kw): - """ - Alias for creating static libraries by looking at the file extensions:: - - def build(bld): - bld.stlib(source='foo.cpp', target='app') - # equivalent to: - # bld(features='cxx cxxstlib', source='foo.cpp', target='app') - - """ - set_features(kw, 'stlib') - return bld(*k, **kw) - -@conf -def objects(bld, *k, **kw): - """ - Alias for creating object files by looking at the file extensions:: - - def build(bld): - bld.objects(source='foo.c', target='app') - # equivalent to: - # bld(features='c', source='foo.c', target='app') - - """ - set_features(kw, 'objects') - return bld(*k, **kw) - diff --git a/waflib/Tools/c_config.py b/waflib/Tools/c_config.py deleted file mode 100644 index d546be9..0000000 --- a/waflib/Tools/c_config.py +++ /dev/null @@ -1,1351 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -""" -C/C++/D configuration helpers -""" - -from __future__ import with_statement - -import os, re, shlex -from waflib import Build, Utils, Task, Options, Logs, Errors, Runner -from waflib.TaskGen import after_method, feature -from waflib.Configure import conf - -WAF_CONFIG_H = 'config.h' -"""default name for the config.h file""" - -DEFKEYS = 'define_key' -INCKEYS = 'include_key' - -SNIP_EMPTY_PROGRAM = ''' -int main(int argc, char **argv) { - (void)argc; (void)argv; - return 0; -} -''' - -MACRO_TO_DESTOS = { -'__linux__' : 'linux', -'__GNU__' : 'gnu', # hurd -'__FreeBSD__' : 'freebsd', -'__NetBSD__' : 'netbsd', -'__OpenBSD__' : 'openbsd', -'__sun' : 'sunos', -'__hpux' : 'hpux', -'__sgi' : 'irix', -'_AIX' : 'aix', -'__CYGWIN__' : 'cygwin', -'__MSYS__' : 'cygwin', -'_UWIN' : 'uwin', -'_WIN64' : 'win32', -'_WIN32' : 'win32', -# Note about darwin: this is also tested with 'defined __APPLE__ && defined __MACH__' somewhere below in this file. -'__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__' : 'darwin', -'__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__' : 'darwin', # iphone -'__QNX__' : 'qnx', -'__native_client__' : 'nacl' # google native client platform -} - -MACRO_TO_DEST_CPU = { -'__x86_64__' : 'x86_64', -'__amd64__' : 'x86_64', -'__i386__' : 'x86', -'__ia64__' : 'ia', -'__mips__' : 'mips', -'__sparc__' : 'sparc', -'__alpha__' : 'alpha', -'__aarch64__' : 'aarch64', -'__thumb__' : 'thumb', -'__arm__' : 'arm', -'__hppa__' : 'hppa', -'__powerpc__' : 'powerpc', -'__ppc__' : 'powerpc', -'__convex__' : 'convex', -'__m68k__' : 'm68k', -'__s390x__' : 's390x', -'__s390__' : 's390', -'__sh__' : 'sh', -'__xtensa__' : 'xtensa', -} - -@conf -def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=None): - """ - Parses flags from the input lines, and adds them to the relevant use variables:: - - def configure(conf): - conf.parse_flags('-O3', 'FOO') - # conf.env.CXXFLAGS_FOO = ['-O3'] - # conf.env.CFLAGS_FOO = ['-O3'] - - :param line: flags - :type line: string - :param uselib_store: where to add the flags - :type uselib_store: string - :param env: config set or conf.env by default - :type env: :py:class:`waflib.ConfigSet.ConfigSet` - """ - - assert(isinstance(line, str)) - - env = env or self.env - - # Issue 811 and 1371 - if posix is None: - posix = True - if '\\' in line: - posix = ('\\ ' in line) or ('\\\\' in line) - - lex = shlex.shlex(line, posix=posix) - lex.whitespace_split = True - lex.commenters = '' - lst = list(lex) - - # append_unique is not always possible - # for example, apple flags may require both -arch i386 and -arch ppc - uselib = uselib_store - def app(var, val): - env.append_value('%s_%s' % (var, uselib), val) - def appu(var, val): - env.append_unique('%s_%s' % (var, uselib), val) - static = False - while lst: - x = lst.pop(0) - st = x[:2] - ot = x[2:] - - if st == '-I' or st == '/I': - if not ot: - ot = lst.pop(0) - appu('INCLUDES', ot) - elif st == '-i': - tmp = [x, lst.pop(0)] - app('CFLAGS', tmp) - app('CXXFLAGS', tmp) - elif st == '-D' or (env.CXX_NAME == 'msvc' and st == '/D'): # not perfect but.. - if not ot: - ot = lst.pop(0) - app('DEFINES', ot) - elif st == '-l': - if not ot: - ot = lst.pop(0) - prefix = 'STLIB' if (force_static or static) else 'LIB' - app(prefix, ot) - elif st == '-L': - if not ot: - ot = lst.pop(0) - prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH' - appu(prefix, ot) - elif x.startswith('/LIBPATH:'): - prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH' - appu(prefix, x.replace('/LIBPATH:', '')) - elif x.startswith('-std='): - prefix = 'CXXFLAGS' if '++' in x else 'CFLAGS' - app(prefix, x) - elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie'): - app('CFLAGS', x) - app('CXXFLAGS', x) - app('LINKFLAGS', x) - elif x == '-framework': - appu('FRAMEWORK', lst.pop(0)) - elif x.startswith('-F'): - appu('FRAMEWORKPATH', x[2:]) - elif x == '-Wl,-rpath' or x == '-Wl,-R': - app('RPATH', lst.pop(0).lstrip('-Wl,')) - elif x.startswith('-Wl,-R,'): - app('RPATH', x[7:]) - elif x.startswith('-Wl,-R'): - app('RPATH', x[6:]) - elif x.startswith('-Wl,-rpath,'): - app('RPATH', x[11:]) - elif x == '-Wl,-Bstatic' or x == '-Bstatic': - static = True - elif x == '-Wl,-Bdynamic' or x == '-Bdynamic': - static = False - elif x.startswith('-Wl') or x in ('-rdynamic', '-pie'): - app('LINKFLAGS', x) - elif x.startswith(('-m', '-f', '-dynamic', '-O', '-g')): - # Adding the -W option breaks python builds on Openindiana - app('CFLAGS', x) - app('CXXFLAGS', x) - elif x.startswith('-bundle'): - app('LINKFLAGS', x) - elif x.startswith(('-undefined', '-Xlinker')): - arg = lst.pop(0) - app('LINKFLAGS', [x, arg]) - elif x.startswith(('-arch', '-isysroot')): - tmp = [x, lst.pop(0)] - app('CFLAGS', tmp) - app('CXXFLAGS', tmp) - app('LINKFLAGS', tmp) - elif x.endswith(('.a', '.so', '.dylib', '.lib')): - appu('LINKFLAGS', x) # not cool, #762 - else: - self.to_log('Unhandled flag %r' % x) - -@conf -def validate_cfg(self, kw): - """ - Searches for the program *pkg-config* if missing, and validates the - parameters to pass to :py:func:`waflib.Tools.c_config.exec_cfg`. - - :param path: the **-config program to use** (default is *pkg-config*) - :type path: list of string - :param msg: message to display to describe the test executed - :type msg: string - :param okmsg: message to display when the test is successful - :type okmsg: string - :param errmsg: message to display in case of error - :type errmsg: string - """ - if not 'path' in kw: - if not self.env.PKGCONFIG: - self.find_program('pkg-config', var='PKGCONFIG') - kw['path'] = self.env.PKGCONFIG - - # verify that exactly one action is requested - s = ('atleast_pkgconfig_version' in kw) + ('modversion' in kw) + ('package' in kw) - if s != 1: - raise ValueError('exactly one of atleast_pkgconfig_version, modversion and package must be set') - if not 'msg' in kw: - if 'atleast_pkgconfig_version' in kw: - kw['msg'] = 'Checking for pkg-config version >= %r' % kw['atleast_pkgconfig_version'] - elif 'modversion' in kw: - kw['msg'] = 'Checking for %r version' % kw['modversion'] - else: - kw['msg'] = 'Checking for %r' %(kw['package']) - - # let the modversion check set the okmsg to the detected version - if not 'okmsg' in kw and not 'modversion' in kw: - kw['okmsg'] = 'yes' - if not 'errmsg' in kw: - kw['errmsg'] = 'not found' - - # pkg-config version - if 'atleast_pkgconfig_version' in kw: - pass - elif 'modversion' in kw: - if not 'uselib_store' in kw: - kw['uselib_store'] = kw['modversion'] - if not 'define_name' in kw: - kw['define_name'] = '%s_VERSION' % Utils.quote_define_name(kw['uselib_store']) - else: - if not 'uselib_store' in kw: - kw['uselib_store'] = Utils.to_list(kw['package'])[0].upper() - if not 'define_name' in kw: - kw['define_name'] = self.have_define(kw['uselib_store']) - -@conf -def exec_cfg(self, kw): - """ - Executes ``pkg-config`` or other ``-config`` applications to collect configuration flags: - - * if atleast_pkgconfig_version is given, check that pkg-config has the version n and return - * if modversion is given, then return the module version - * else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable - - :param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests) - :type atleast_pkgconfig_version: string - :param package: package name, for example *gtk+-2.0* - :type package: string - :param uselib_store: if the test is successful, define HAVE\\_*name*. It is also used to define *conf.env.FLAGS_name* variables. - :type uselib_store: string - :param modversion: if provided, return the version of the given module and define *name*\\_VERSION - :type modversion: string - :param args: arguments to give to *package* when retrieving flags - :type args: list of string - :param variables: return the values of particular variables - :type variables: list of string - :param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES) - :type define_variable: dict(string: string) - """ - - path = Utils.to_list(kw['path']) - env = self.env.env or None - if kw.get('pkg_config_path'): - if not env: - env = dict(self.environ) - env['PKG_CONFIG_PATH'] = kw['pkg_config_path'] - - def define_it(): - define_name = kw['define_name'] - # by default, add HAVE_X to the config.h, else provide DEFINES_X for use=X - if kw.get('global_define', 1): - self.define(define_name, 1, False) - else: - self.env.append_unique('DEFINES_%s' % kw['uselib_store'], "%s=1" % define_name) - - if kw.get('add_have_to_env', 1): - self.env[define_name] = 1 - - # pkg-config version - if 'atleast_pkgconfig_version' in kw: - cmd = path + ['--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']] - self.cmd_and_log(cmd, env=env) - return - - # single version for a module - if 'modversion' in kw: - version = self.cmd_and_log(path + ['--modversion', kw['modversion']], env=env).strip() - if not 'okmsg' in kw: - kw['okmsg'] = version - self.define(kw['define_name'], version) - return version - - lst = [] + path - - defi = kw.get('define_variable') - if not defi: - defi = self.env.PKG_CONFIG_DEFINES or {} - for key, val in defi.items(): - lst.append('--define-variable=%s=%s' % (key, val)) - - static = kw.get('force_static', False) - if 'args' in kw: - args = Utils.to_list(kw['args']) - if '--static' in args or '--static-libs' in args: - static = True - lst += args - - # tools like pkgconf expect the package argument after the -- ones -_- - lst.extend(Utils.to_list(kw['package'])) - - # retrieving variables of a module - if 'variables' in kw: - v_env = kw.get('env', self.env) - vars = Utils.to_list(kw['variables']) - for v in vars: - val = self.cmd_and_log(lst + ['--variable=' + v], env=env).strip() - var = '%s_%s' % (kw['uselib_store'], v) - v_env[var] = val - return - - # so we assume the command-line will output flags to be parsed afterwards - ret = self.cmd_and_log(lst, env=env) - - define_it() - self.parse_flags(ret, kw['uselib_store'], kw.get('env', self.env), force_static=static, posix=kw.get('posix')) - return ret - -@conf -def check_cfg(self, *k, **kw): - """ - Checks for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc). - This wraps internal calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg` - - A few examples:: - - def configure(conf): - conf.load('compiler_c') - conf.check_cfg(package='glib-2.0', args='--libs --cflags') - conf.check_cfg(package='pango') - conf.check_cfg(package='pango', uselib_store='MYPANGO', args=['--cflags', '--libs']) - conf.check_cfg(package='pango', - args=['pango >= 0.1.0', 'pango < 9.9.9', '--cflags', '--libs'], - msg="Checking for 'pango 0.1.0'") - conf.check_cfg(path='sdl-config', args='--cflags --libs', package='', uselib_store='SDL') - conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', - package='', uselib_store='OPEN_MPI', mandatory=False) - # variables - conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO') - print(conf.env.FOO_includedir) - """ - self.validate_cfg(kw) - if 'msg' in kw: - self.start_msg(kw['msg'], **kw) - ret = None - try: - ret = self.exec_cfg(kw) - except self.errors.WafError as e: - if 'errmsg' in kw: - self.end_msg(kw['errmsg'], 'YELLOW', **kw) - if Logs.verbose > 1: - self.to_log('Command failure: %s' % e) - self.fatal('The configuration failed') - else: - if not ret: - ret = True - kw['success'] = ret - if 'okmsg' in kw: - self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) - - return ret - -def build_fun(bld): - """ - Build function that is used for running configuration tests with ``conf.check()`` - """ - if bld.kw['compile_filename']: - node = bld.srcnode.make_node(bld.kw['compile_filename']) - node.write(bld.kw['code']) - - o = bld(features=bld.kw['features'], source=bld.kw['compile_filename'], target='testprog') - - for k, v in bld.kw.items(): - setattr(o, k, v) - - if not bld.kw.get('quiet'): - bld.conf.to_log("==>\n%s\n<==" % bld.kw['code']) - -@conf -def validate_c(self, kw): - """ - Pre-checks the parameters that will be given to :py:func:`waflib.Configure.run_build` - - :param compiler: c or cxx (tries to guess what is best) - :type compiler: string - :param type: cprogram, cshlib, cstlib - not required if *features are given directly* - :type type: binary to create - :param feature: desired features for the task generator that will execute the test, for example ``cxx cxxstlib`` - :type feature: list of string - :param fragment: provide a piece of code for the test (default is to let the system create one) - :type fragment: string - :param uselib_store: define variables after the test is executed (IMPORTANT!) - :type uselib_store: string - :param use: parameters to use for building (just like the normal *use* keyword) - :type use: list of string - :param define_name: define to set when the check is over - :type define_name: string - :param execute: execute the resulting binary - :type execute: bool - :param define_ret: if execute is set to True, use the execution output in both the define and the return value - :type define_ret: bool - :param header_name: check for a particular header - :type header_name: string - :param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers - :type auto_add_header_name: bool - """ - for x in ('type_name', 'field_name', 'function_name'): - if x in kw: - Logs.warn('Invalid argument %r in test' % x) - - if not 'build_fun' in kw: - kw['build_fun'] = build_fun - - if not 'env' in kw: - kw['env'] = self.env.derive() - env = kw['env'] - - if not 'compiler' in kw and not 'features' in kw: - kw['compiler'] = 'c' - if env.CXX_NAME and Task.classes.get('cxx'): - kw['compiler'] = 'cxx' - if not self.env.CXX: - self.fatal('a c++ compiler is required') - else: - if not self.env.CC: - self.fatal('a c compiler is required') - - if not 'compile_mode' in kw: - kw['compile_mode'] = 'c' - if 'cxx' in Utils.to_list(kw.get('features', [])) or kw.get('compiler') == 'cxx': - kw['compile_mode'] = 'cxx' - - if not 'type' in kw: - kw['type'] = 'cprogram' - - if not 'features' in kw: - if not 'header_name' in kw or kw.get('link_header_test', True): - kw['features'] = [kw['compile_mode'], kw['type']] # "c ccprogram" - else: - kw['features'] = [kw['compile_mode']] - else: - kw['features'] = Utils.to_list(kw['features']) - - if not 'compile_filename' in kw: - kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '') - - def to_header(dct): - if 'header_name' in dct: - dct = Utils.to_list(dct['header_name']) - return ''.join(['#include <%s>\n' % x for x in dct]) - return '' - - if 'framework_name' in kw: - # OSX, not sure this is used anywhere - fwkname = kw['framework_name'] - if not 'uselib_store' in kw: - kw['uselib_store'] = fwkname.upper() - if not kw.get('no_header'): - fwk = '%s/%s.h' % (fwkname, fwkname) - if kw.get('remove_dot_h'): - fwk = fwk[:-2] - val = kw.get('header_name', []) - kw['header_name'] = Utils.to_list(val) + [fwk] - kw['msg'] = 'Checking for framework %s' % fwkname - kw['framework'] = fwkname - - elif 'header_name' in kw: - if not 'msg' in kw: - kw['msg'] = 'Checking for header %s' % kw['header_name'] - - l = Utils.to_list(kw['header_name']) - assert len(l), 'list of headers in header_name is empty' - - kw['code'] = to_header(kw) + SNIP_EMPTY_PROGRAM - if not 'uselib_store' in kw: - kw['uselib_store'] = l[0].upper() - if not 'define_name' in kw: - kw['define_name'] = self.have_define(l[0]) - - if 'lib' in kw: - if not 'msg' in kw: - kw['msg'] = 'Checking for library %s' % kw['lib'] - if not 'uselib_store' in kw: - kw['uselib_store'] = kw['lib'].upper() - - if 'stlib' in kw: - if not 'msg' in kw: - kw['msg'] = 'Checking for static library %s' % kw['stlib'] - if not 'uselib_store' in kw: - kw['uselib_store'] = kw['stlib'].upper() - - if 'fragment' in kw: - # an additional code fragment may be provided to replace the predefined code - # in custom headers - kw['code'] = kw['fragment'] - if not 'msg' in kw: - kw['msg'] = 'Checking for code snippet' - if not 'errmsg' in kw: - kw['errmsg'] = 'no' - - for (flagsname,flagstype) in (('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')): - if flagsname in kw: - if not 'msg' in kw: - kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname]) - if not 'errmsg' in kw: - kw['errmsg'] = 'no' - - if not 'execute' in kw: - kw['execute'] = False - if kw['execute']: - kw['features'].append('test_exec') - kw['chmod'] = Utils.O755 - - if not 'errmsg' in kw: - kw['errmsg'] = 'not found' - - if not 'okmsg' in kw: - kw['okmsg'] = 'yes' - - if not 'code' in kw: - kw['code'] = SNIP_EMPTY_PROGRAM - - # if there are headers to append automatically to the next tests - if self.env[INCKEYS]: - kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code'] - - # in case defines lead to very long command-lines - if kw.get('merge_config_header') or env.merge_config_header: - kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code']) - env.DEFINES = [] # modify the copy - - if not kw.get('success'): - kw['success'] = None - - if 'define_name' in kw: - self.undefine(kw['define_name']) - if not 'msg' in kw: - self.fatal('missing "msg" in conf.check(...)') - -@conf -def post_check(self, *k, **kw): - """ - Sets the variables after a test executed in - :py:func:`waflib.Tools.c_config.check` was run successfully - """ - is_success = 0 - if kw['execute']: - if kw['success'] is not None: - if kw.get('define_ret'): - is_success = kw['success'] - else: - is_success = (kw['success'] == 0) - else: - is_success = (kw['success'] == 0) - - if kw.get('define_name'): - comment = kw.get('comment', '') - define_name = kw['define_name'] - if kw['execute'] and kw.get('define_ret') and isinstance(is_success, str): - if kw.get('global_define', 1): - self.define(define_name, is_success, quote=kw.get('quote', 1), comment=comment) - else: - if kw.get('quote', 1): - succ = '"%s"' % is_success - else: - succ = int(is_success) - val = '%s=%s' % (define_name, succ) - var = 'DEFINES_%s' % kw['uselib_store'] - self.env.append_value(var, val) - else: - if kw.get('global_define', 1): - self.define_cond(define_name, is_success, comment=comment) - else: - var = 'DEFINES_%s' % kw['uselib_store'] - self.env.append_value(var, '%s=%s' % (define_name, int(is_success))) - - # define conf.env.HAVE_X to 1 - if kw.get('add_have_to_env', 1): - if kw.get('uselib_store'): - self.env[self.have_define(kw['uselib_store'])] = 1 - elif kw['execute'] and kw.get('define_ret'): - self.env[define_name] = is_success - else: - self.env[define_name] = int(is_success) - - if 'header_name' in kw: - if kw.get('auto_add_header_name'): - self.env.append_value(INCKEYS, Utils.to_list(kw['header_name'])) - - if is_success and 'uselib_store' in kw: - from waflib.Tools import ccroot - # See get_uselib_vars in ccroot.py - _vars = set() - for x in kw['features']: - if x in ccroot.USELIB_VARS: - _vars |= ccroot.USELIB_VARS[x] - - for k in _vars: - x = k.lower() - if x in kw: - self.env.append_value(k + '_' + kw['uselib_store'], kw[x]) - return is_success - -@conf -def check(self, *k, **kw): - """ - Performs a configuration test by calling :py:func:`waflib.Configure.run_build`. - For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`. - To force a specific compiler, pass ``compiler='c'`` or ``compiler='cxx'`` to the list of arguments - - Besides build targets, complete builds can be given through a build function. All files will - be written to a temporary directory:: - - def build(bld): - lib_node = bld.srcnode.make_node('libdir/liblc1.c') - lib_node.parent.mkdir() - lib_node.write('#include <stdio.h>\\nint lib_func(void) { FILE *f = fopen("foo", "r");}\\n', 'w') - bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc') - conf.check(build_fun=build, msg=msg) - """ - self.validate_c(kw) - self.start_msg(kw['msg'], **kw) - ret = None - try: - ret = self.run_build(*k, **kw) - except self.errors.ConfigurationError: - self.end_msg(kw['errmsg'], 'YELLOW', **kw) - if Logs.verbose > 1: - raise - else: - self.fatal('The configuration failed') - else: - kw['success'] = ret - - ret = self.post_check(*k, **kw) - if not ret: - self.end_msg(kw['errmsg'], 'YELLOW', **kw) - self.fatal('The configuration failed %r' % ret) - else: - self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw) - return ret - -class test_exec(Task.Task): - """ - A task that runs programs after they are built. See :py:func:`waflib.Tools.c_config.test_exec_fun`. - """ - color = 'PINK' - def run(self): - if getattr(self.generator, 'rpath', None): - if getattr(self.generator, 'define_ret', False): - self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()]) - else: - self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()]) - else: - env = self.env.env or {} - env.update(dict(os.environ)) - for var in ('LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'PATH'): - env[var] = self.inputs[0].parent.abspath() + os.path.pathsep + env.get(var, '') - if getattr(self.generator, 'define_ret', False): - self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()], env=env) - else: - self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()], env=env) - -@feature('test_exec') -@after_method('apply_link') -def test_exec_fun(self): - """ - The feature **test_exec** is used to create a task that will to execute the binary - created (link task output) during the build. The exit status will be set - on the build context, so only one program may have the feature *test_exec*. - This is used by configuration tests:: - - def configure(conf): - conf.check(execute=True) - """ - self.create_task('test_exec', self.link_task.outputs[0]) - -@conf -def check_cxx(self, *k, **kw): - """ - Runs a test with a task generator of the form:: - - conf.check(features='cxx cxxprogram', ...) - """ - kw['compiler'] = 'cxx' - return self.check(*k, **kw) - -@conf -def check_cc(self, *k, **kw): - """ - Runs a test with a task generator of the form:: - - conf.check(features='c cprogram', ...) - """ - kw['compiler'] = 'c' - return self.check(*k, **kw) - -@conf -def set_define_comment(self, key, comment): - """ - Sets a comment that will appear in the configuration header - - :type key: string - :type comment: string - """ - coms = self.env.DEFINE_COMMENTS - if not coms: - coms = self.env.DEFINE_COMMENTS = {} - coms[key] = comment or '' - -@conf -def get_define_comment(self, key): - """ - Returns the comment associated to a define - - :type key: string - """ - coms = self.env.DEFINE_COMMENTS or {} - return coms.get(key, '') - -@conf -def define(self, key, val, quote=True, comment=''): - """ - Stores a single define and its state into ``conf.env.DEFINES``. The value is cast to an integer (0/1). - - :param key: define name - :type key: string - :param val: value - :type val: int or string - :param quote: enclose strings in quotes (yes by default) - :type quote: bool - """ - assert isinstance(key, str) - if not key: - return - if val is True: - val = 1 - elif val in (False, None): - val = 0 - - if isinstance(val, int) or isinstance(val, float): - s = '%s=%s' - else: - s = quote and '%s="%s"' or '%s=%s' - app = s % (key, str(val)) - - ban = key + '=' - lst = self.env.DEFINES - for x in lst: - if x.startswith(ban): - lst[lst.index(x)] = app - break - else: - self.env.append_value('DEFINES', app) - - self.env.append_unique(DEFKEYS, key) - self.set_define_comment(key, comment) - -@conf -def undefine(self, key, comment=''): - """ - Removes a global define from ``conf.env.DEFINES`` - - :param key: define name - :type key: string - """ - assert isinstance(key, str) - if not key: - return - ban = key + '=' - lst = [x for x in self.env.DEFINES if not x.startswith(ban)] - self.env.DEFINES = lst - self.env.append_unique(DEFKEYS, key) - self.set_define_comment(key, comment) - -@conf -def define_cond(self, key, val, comment=''): - """ - Conditionally defines a name:: - - def configure(conf): - conf.define_cond('A', True) - # equivalent to: - # if val: conf.define('A', 1) - # else: conf.undefine('A') - - :param key: define name - :type key: string - :param val: value - :type val: int or string - """ - assert isinstance(key, str) - if not key: - return - if val: - self.define(key, 1, comment=comment) - else: - self.undefine(key, comment=comment) - -@conf -def is_defined(self, key): - """ - Indicates whether a particular define is globally set in ``conf.env.DEFINES``. - - :param key: define name - :type key: string - :return: True if the define is set - :rtype: bool - """ - assert key and isinstance(key, str) - - ban = key + '=' - for x in self.env.DEFINES: - if x.startswith(ban): - return True - return False - -@conf -def get_define(self, key): - """ - Returns the value of an existing define, or None if not found - - :param key: define name - :type key: string - :rtype: string - """ - assert key and isinstance(key, str) - - ban = key + '=' - for x in self.env.DEFINES: - if x.startswith(ban): - return x[len(ban):] - return None - -@conf -def have_define(self, key): - """ - Returns a variable suitable for command-line or header use by removing invalid characters - and prefixing it with ``HAVE_`` - - :param key: define name - :type key: string - :return: the input key prefixed by *HAVE_* and substitute any invalid characters. - :rtype: string - """ - return (self.env.HAVE_PAT or 'HAVE_%s') % Utils.quote_define_name(key) - -@conf -def write_config_header(self, configfile='', guard='', top=False, defines=True, headers=False, remove=True, define_prefix=''): - """ - Writes a configuration header containing defines and includes:: - - def configure(cnf): - cnf.define('A', 1) - cnf.write_config_header('config.h') - - This function only adds include guards (if necessary), consult - :py:func:`waflib.Tools.c_config.get_config_header` for details on the body. - - :param configfile: path to the file to create (relative or absolute) - :type configfile: string - :param guard: include guard name to add, by default it is computed from the file name - :type guard: string - :param top: write the configuration header from the build directory (default is from the current path) - :type top: bool - :param defines: add the defines (yes by default) - :type defines: bool - :param headers: add #include in the file - :type headers: bool - :param remove: remove the defines after they are added (yes by default, works like in autoconf) - :type remove: bool - :type define_prefix: string - :param define_prefix: prefix all the defines in the file with a particular prefix - """ - if not configfile: - configfile = WAF_CONFIG_H - waf_guard = guard or 'W_%s_WAF' % Utils.quote_define_name(configfile) - - node = top and self.bldnode or self.path.get_bld() - node = node.make_node(configfile) - node.parent.mkdir() - - lst = ['/* WARNING! All changes made to this file will be lost! */\n'] - lst.append('#ifndef %s\n#define %s\n' % (waf_guard, waf_guard)) - lst.append(self.get_config_header(defines, headers, define_prefix=define_prefix)) - lst.append('\n#endif /* %s */\n' % waf_guard) - - node.write('\n'.join(lst)) - - # config files must not be removed on "waf clean" - self.env.append_unique(Build.CFG_FILES, [node.abspath()]) - - if remove: - for key in self.env[DEFKEYS]: - self.undefine(key) - self.env[DEFKEYS] = [] - -@conf -def get_config_header(self, defines=True, headers=False, define_prefix=''): - """ - Creates the contents of a ``config.h`` file from the defines and includes - set in conf.env.define_key / conf.env.include_key. No include guards are added. - - A prelude will be added from the variable env.WAF_CONFIG_H_PRELUDE if provided. This - can be used to insert complex macros or include guards:: - - def configure(conf): - conf.env.WAF_CONFIG_H_PRELUDE = '#include <unistd.h>\\n' - conf.write_config_header('config.h') - - :param defines: write the defines values - :type defines: bool - :param headers: write include entries for each element in self.env.INCKEYS - :type headers: bool - :type define_prefix: string - :param define_prefix: prefix all the defines with a particular prefix - :return: the contents of a ``config.h`` file - :rtype: string - """ - lst = [] - - if self.env.WAF_CONFIG_H_PRELUDE: - lst.append(self.env.WAF_CONFIG_H_PRELUDE) - - if headers: - for x in self.env[INCKEYS]: - lst.append('#include <%s>' % x) - - if defines: - tbl = {} - for k in self.env.DEFINES: - a, _, b = k.partition('=') - tbl[a] = b - - for k in self.env[DEFKEYS]: - caption = self.get_define_comment(k) - if caption: - caption = ' /* %s */' % caption - try: - txt = '#define %s%s %s%s' % (define_prefix, k, tbl[k], caption) - except KeyError: - txt = '/* #undef %s%s */%s' % (define_prefix, k, caption) - lst.append(txt) - return "\n".join(lst) - -@conf -def cc_add_flags(conf): - """ - Adds CFLAGS / CPPFLAGS from os.environ to conf.env - """ - conf.add_os_flags('CPPFLAGS', dup=False) - conf.add_os_flags('CFLAGS', dup=False) - -@conf -def cxx_add_flags(conf): - """ - Adds CXXFLAGS / CPPFLAGS from os.environ to conf.env - """ - conf.add_os_flags('CPPFLAGS', dup=False) - conf.add_os_flags('CXXFLAGS', dup=False) - -@conf -def link_add_flags(conf): - """ - Adds LINKFLAGS / LDFLAGS from os.environ to conf.env - """ - conf.add_os_flags('LINKFLAGS', dup=False) - conf.add_os_flags('LDFLAGS', dup=False) - -@conf -def cc_load_tools(conf): - """ - Loads the Waf c extensions - """ - if not conf.env.DEST_OS: - conf.env.DEST_OS = Utils.unversioned_sys_platform() - conf.load('c') - -@conf -def cxx_load_tools(conf): - """ - Loads the Waf c++ extensions - """ - if not conf.env.DEST_OS: - conf.env.DEST_OS = Utils.unversioned_sys_platform() - conf.load('cxx') - -@conf -def get_cc_version(conf, cc, gcc=False, icc=False, clang=False): - """ - Runs the preprocessor to determine the gcc/icc/clang version - - The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env* - - :raise: :py:class:`waflib.Errors.ConfigurationError` - """ - cmd = cc + ['-dM', '-E', '-'] - env = conf.env.env or None - try: - out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env) - except Errors.WafError: - conf.fatal('Could not determine the compiler version %r' % cmd) - - if gcc: - if out.find('__INTEL_COMPILER') >= 0: - conf.fatal('The intel compiler pretends to be gcc') - if out.find('__GNUC__') < 0 and out.find('__clang__') < 0: - conf.fatal('Could not determine the compiler type') - - if icc and out.find('__INTEL_COMPILER') < 0: - conf.fatal('Not icc/icpc') - - if clang and out.find('__clang__') < 0: - conf.fatal('Not clang/clang++') - if not clang and out.find('__clang__') >= 0: - conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure') - - k = {} - if icc or gcc or clang: - out = out.splitlines() - for line in out: - lst = shlex.split(line) - if len(lst)>2: - key = lst[1] - val = lst[2] - k[key] = val - - def isD(var): - return var in k - - # Some documentation is available at http://predef.sourceforge.net - # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns. - if not conf.env.DEST_OS: - conf.env.DEST_OS = '' - for i in MACRO_TO_DESTOS: - if isD(i): - conf.env.DEST_OS = MACRO_TO_DESTOS[i] - break - else: - if isD('__APPLE__') and isD('__MACH__'): - conf.env.DEST_OS = 'darwin' - elif isD('__unix__'): # unix must be tested last as it's a generic fallback - conf.env.DEST_OS = 'generic' - - if isD('__ELF__'): - conf.env.DEST_BINFMT = 'elf' - elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'): - conf.env.DEST_BINFMT = 'pe' - if not conf.env.IMPLIBDIR: - conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files - conf.env.LIBDIR = conf.env.BINDIR - elif isD('__APPLE__'): - conf.env.DEST_BINFMT = 'mac-o' - - if not conf.env.DEST_BINFMT: - # Infer the binary format from the os name. - conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS) - - for i in MACRO_TO_DEST_CPU: - if isD(i): - conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i] - break - - Logs.debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')])) - if icc: - ver = k['__INTEL_COMPILER'] - conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1]) - else: - if isD('__clang__') and isD('__clang_major__'): - conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__']) - else: - # older clang versions and gcc - conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0')) - return k - -@conf -def get_xlc_version(conf, cc): - """ - Returns the Aix compiler version - - :raise: :py:class:`waflib.Errors.ConfigurationError` - """ - cmd = cc + ['-qversion'] - try: - out, err = conf.cmd_and_log(cmd, output=0) - except Errors.WafError: - conf.fatal('Could not find xlc %r' % cmd) - - # the intention is to catch the 8.0 in "IBM XL C/C++ Enterprise Edition V8.0 for AIX..." - for v in (r"IBM XL C/C\+\+.* V(?P<major>\d*)\.(?P<minor>\d*)",): - version_re = re.compile(v, re.I).search - match = version_re(out or err) - if match: - k = match.groupdict() - conf.env.CC_VERSION = (k['major'], k['minor']) - break - else: - conf.fatal('Could not determine the XLC version.') - -@conf -def get_suncc_version(conf, cc): - """ - Returns the Sun compiler version - - :raise: :py:class:`waflib.Errors.ConfigurationError` - """ - cmd = cc + ['-V'] - try: - out, err = conf.cmd_and_log(cmd, output=0) - except Errors.WafError as e: - # Older versions of the compiler exit with non-zero status when reporting their version - if not (hasattr(e, 'returncode') and hasattr(e, 'stdout') and hasattr(e, 'stderr')): - conf.fatal('Could not find suncc %r' % cmd) - out = e.stdout - err = e.stderr - - version = (out or err) - version = version.splitlines()[0] - - # cc: Sun C 5.10 SunOS_i386 2009/06/03 - # cc: Studio 12.5 Sun C++ 5.14 SunOS_sparc Beta 2015/11/17 - # cc: WorkShop Compilers 5.0 98/12/15 C 5.0 - version_re = re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P<major>\d*)\.(?P<minor>\d*)', re.I).search - match = version_re(version) - if match: - k = match.groupdict() - conf.env.CC_VERSION = (k['major'], k['minor']) - else: - conf.fatal('Could not determine the suncc version.') - -# ============ the --as-needed flag should added during the configuration, not at runtime ========= - -@conf -def add_as_needed(self): - """ - Adds ``--as-needed`` to the *LINKFLAGS* - On some platforms, it is a default flag. In some cases (e.g., in NS-3) it is necessary to explicitly disable this feature with `-Wl,--no-as-needed` flag. - """ - if self.env.DEST_BINFMT == 'elf' and 'gcc' in (self.env.CXX_NAME, self.env.CC_NAME): - self.env.append_unique('LINKFLAGS', '-Wl,--as-needed') - -# ============ parallel configuration - -class cfgtask(Task.Task): - """ - A task that executes build configuration tests (calls conf.check) - - Make sure to use locks if concurrent access to the same conf.env data is necessary. - """ - def __init__(self, *k, **kw): - Task.Task.__init__(self, *k, **kw) - self.run_after = set() - - def display(self): - return '' - - def runnable_status(self): - for x in self.run_after: - if not x.hasrun: - return Task.ASK_LATER - return Task.RUN_ME - - def uid(self): - return Utils.SIG_NIL - - def signature(self): - return Utils.SIG_NIL - - def run(self): - conf = self.conf - bld = Build.BuildContext(top_dir=conf.srcnode.abspath(), out_dir=conf.bldnode.abspath()) - bld.env = conf.env - bld.init_dirs() - bld.in_msg = 1 # suppress top-level start_msg - bld.logger = self.logger - bld.multicheck_task = self - args = self.args - try: - if 'func' in args: - bld.test(build_fun=args['func'], - msg=args.get('msg', ''), - okmsg=args.get('okmsg', ''), - errmsg=args.get('errmsg', ''), - ) - else: - args['multicheck_mandatory'] = args.get('mandatory', True) - args['mandatory'] = True - try: - bld.check(**args) - finally: - args['mandatory'] = args['multicheck_mandatory'] - except Exception: - return 1 - - def process(self): - Task.Task.process(self) - if 'msg' in self.args: - with self.generator.bld.multicheck_lock: - self.conf.start_msg(self.args['msg']) - if self.hasrun == Task.NOT_RUN: - self.conf.end_msg('test cancelled', 'YELLOW') - elif self.hasrun != Task.SUCCESS: - self.conf.end_msg(self.args.get('errmsg', 'no'), 'YELLOW') - else: - self.conf.end_msg(self.args.get('okmsg', 'yes'), 'GREEN') - -@conf -def multicheck(self, *k, **kw): - """ - Runs configuration tests in parallel; results are printed sequentially at the end of the build - but each test must provide its own msg value to display a line:: - - def test_build(ctx): - ctx.in_msg = True # suppress console outputs - ctx.check_large_file(mandatory=False) - - conf.multicheck( - {'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store':'STDIO', 'global_define':False}, - {'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False}, - {'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'}, - {'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'}, - msg = 'Checking for headers in parallel', - mandatory = True, # mandatory tests raise an error at the end - run_all_tests = True, # try running all tests - ) - - The configuration tests may modify the values in conf.env in any order, and the define - values can affect configuration tests being executed. It is hence recommended - to provide `uselib_store` values with `global_define=False` to prevent such issues. - """ - self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)), **kw) - - # Force a copy so that threads append to the same list at least - # no order is guaranteed, but the values should not disappear at least - for var in ('DEFINES', DEFKEYS): - self.env.append_value(var, []) - self.env.DEFINE_COMMENTS = self.env.DEFINE_COMMENTS or {} - - # define a task object that will execute our tests - class par(object): - def __init__(self): - self.keep = False - self.task_sigs = {} - self.progress_bar = 0 - def total(self): - return len(tasks) - def to_log(self, *k, **kw): - return - - bld = par() - bld.keep = kw.get('run_all_tests', True) - bld.imp_sigs = {} - tasks = [] - - id_to_task = {} - for dct in k: - x = Task.classes['cfgtask'](bld=bld, env=None) - tasks.append(x) - x.args = dct - x.bld = bld - x.conf = self - x.args = dct - - # bind a logger that will keep the info in memory - x.logger = Logs.make_mem_logger(str(id(x)), self.logger) - - if 'id' in dct: - id_to_task[dct['id']] = x - - # second pass to set dependencies with after_test/before_test - for x in tasks: - for key in Utils.to_list(x.args.get('before_tests', [])): - tsk = id_to_task[key] - if not tsk: - raise ValueError('No test named %r' % key) - tsk.run_after.add(x) - for key in Utils.to_list(x.args.get('after_tests', [])): - tsk = id_to_task[key] - if not tsk: - raise ValueError('No test named %r' % key) - x.run_after.add(tsk) - - def it(): - yield tasks - while 1: - yield [] - bld.producer = p = Runner.Parallel(bld, Options.options.jobs) - bld.multicheck_lock = Utils.threading.Lock() - p.biter = it() - - self.end_msg('started') - p.start() - - # flush the logs in order into the config.log - for x in tasks: - x.logger.memhandler.flush() - - self.start_msg('-> processing test results') - if p.error: - for x in p.error: - if getattr(x, 'err_msg', None): - self.to_log(x.err_msg) - self.end_msg('fail', color='RED') - raise Errors.WafError('There is an error in the library, read config.log for more information') - - failure_count = 0 - for x in tasks: - if x.hasrun not in (Task.SUCCESS, Task.NOT_RUN): - failure_count += 1 - - if failure_count: - self.end_msg(kw.get('errmsg', '%s test failed' % failure_count), color='YELLOW', **kw) - else: - self.end_msg('all ok', **kw) - - for x in tasks: - if x.hasrun != Task.SUCCESS: - if x.args.get('mandatory', True): - self.fatal(kw.get('fatalmsg') or 'One of the tests has failed, read config.log for more information') - -@conf -def check_gcc_o_space(self, mode='c'): - if int(self.env.CC_VERSION[0]) > 4: - # this is for old compilers - return - self.env.stash() - if mode == 'c': - self.env.CCLNK_TGT_F = ['-o', ''] - elif mode == 'cxx': - self.env.CXXLNK_TGT_F = ['-o', ''] - features = '%s %sshlib' % (mode, mode) - try: - self.check(msg='Checking if the -o link must be split from arguments', fragment=SNIP_EMPTY_PROGRAM, features=features) - except self.errors.ConfigurationError: - self.env.revert() - else: - self.env.commit() - diff --git a/waflib/Tools/c_osx.py b/waflib/Tools/c_osx.py deleted file mode 100644 index f70b128..0000000 --- a/waflib/Tools/c_osx.py +++ /dev/null @@ -1,193 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy 2008-2018 (ita) - -""" -MacOSX related tools -""" - -import os, shutil, platform -from waflib import Task, Utils -from waflib.TaskGen import taskgen_method, feature, after_method, before_method - -app_info = ''' -<?xml version="1.0" encoding="UTF-8"?> -<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd"> -<plist version="0.9"> -<dict> - <key>CFBundlePackageType</key> - <string>APPL</string> - <key>CFBundleGetInfoString</key> - <string>Created by Waf</string> - <key>CFBundleSignature</key> - <string>????</string> - <key>NOTE</key> - <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string> - <key>CFBundleExecutable</key> - <string>{app_name}</string> -</dict> -</plist> -''' -""" -plist template -""" - -@feature('c', 'cxx') -def set_macosx_deployment_target(self): - """ - see WAF issue 285 and also and also http://trac.macports.org/ticket/17059 - """ - if self.env.MACOSX_DEPLOYMENT_TARGET: - os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET - elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ: - if Utils.unversioned_sys_platform() == 'darwin': - os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2]) - -@taskgen_method -def create_bundle_dirs(self, name, out): - """ - Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp` - """ - dir = out.parent.find_or_declare(name) - dir.mkdir() - macos = dir.find_or_declare(['Contents', 'MacOS']) - macos.mkdir() - return dir - -def bundle_name_for_output(out): - name = out.name - k = name.rfind('.') - if k >= 0: - name = name[:k] + '.app' - else: - name = name + '.app' - return name - -@feature('cprogram', 'cxxprogram') -@after_method('apply_link') -def create_task_macapp(self): - """ - To compile an executable into a Mac application (a .app), set its *mac_app* attribute:: - - def build(bld): - bld.shlib(source='a.c', target='foo', mac_app=True) - - To force *all* executables to be transformed into Mac applications:: - - def build(bld): - bld.env.MACAPP = True - bld.shlib(source='a.c', target='foo') - """ - if self.env.MACAPP or getattr(self, 'mac_app', False): - out = self.link_task.outputs[0] - - name = bundle_name_for_output(out) - dir = self.create_bundle_dirs(name, out) - - n1 = dir.find_or_declare(['Contents', 'MacOS', out.name]) - - self.apptask = self.create_task('macapp', self.link_task.outputs, n1) - inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name - self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755) - - if getattr(self, 'mac_files', None): - # this only accepts files; they will be installed as seen from mac_files_root - mac_files_root = getattr(self, 'mac_files_root', None) - if isinstance(mac_files_root, str): - mac_files_root = self.path.find_node(mac_files_root) - if not mac_files_root: - self.bld.fatal('Invalid mac_files_root %r' % self.mac_files_root) - res_dir = n1.parent.parent.make_node('Resources') - inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name - for node in self.to_nodes(self.mac_files): - relpath = node.path_from(mac_files_root or node.parent) - self.create_task('macapp', node, res_dir.make_node(relpath)) - self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node) - - if getattr(self.bld, 'is_install', None): - # disable regular binary installation - self.install_task.hasrun = Task.SKIP_ME - -@feature('cprogram', 'cxxprogram') -@after_method('apply_link') -def create_task_macplist(self): - """ - Creates a :py:class:`waflib.Tools.c_osx.macplist` instance. - """ - if self.env.MACAPP or getattr(self, 'mac_app', False): - out = self.link_task.outputs[0] - - name = bundle_name_for_output(out) - - dir = self.create_bundle_dirs(name, out) - n1 = dir.find_or_declare(['Contents', 'Info.plist']) - self.plisttask = plisttask = self.create_task('macplist', [], n1) - plisttask.context = { - 'app_name': self.link_task.outputs[0].name, - 'env': self.env - } - - plist_ctx = getattr(self, 'plist_context', None) - if (plist_ctx): - plisttask.context.update(plist_ctx) - - if getattr(self, 'mac_plist', False): - node = self.path.find_resource(self.mac_plist) - if node: - plisttask.inputs.append(node) - else: - plisttask.code = self.mac_plist - else: - plisttask.code = app_info - - inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name - self.add_install_files(install_to=inst_to, install_from=n1) - -@feature('cshlib', 'cxxshlib') -@before_method('apply_link', 'propagate_uselib_vars') -def apply_bundle(self): - """ - To make a bundled shared library (a ``.bundle``), set the *mac_bundle* attribute:: - - def build(bld): - bld.shlib(source='a.c', target='foo', mac_bundle = True) - - To force *all* executables to be transformed into bundles:: - - def build(bld): - bld.env.MACBUNDLE = True - bld.shlib(source='a.c', target='foo') - """ - if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False): - self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag - self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN - use = self.use = self.to_list(getattr(self, 'use', [])) - if not 'MACBUNDLE' in use: - use.append('MACBUNDLE') - -app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources'] - -class macapp(Task.Task): - """ - Creates mac applications - """ - color = 'PINK' - def run(self): - self.outputs[0].parent.mkdir() - shutil.copy2(self.inputs[0].srcpath(), self.outputs[0].abspath()) - -class macplist(Task.Task): - """ - Creates plist files - """ - color = 'PINK' - ext_in = ['.bin'] - def run(self): - if getattr(self, 'code', None): - txt = self.code - else: - txt = self.inputs[0].read() - context = getattr(self, 'context', {}) - txt = txt.format(**context) - self.outputs[0].write(txt) - diff --git a/waflib/Tools/c_preproc.py b/waflib/Tools/c_preproc.py deleted file mode 100644 index 68e5f5a..0000000 --- a/waflib/Tools/c_preproc.py +++ /dev/null @@ -1,1091 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2018 (ita) - -""" -C/C++ preprocessor for finding dependencies - -Reasons for using the Waf preprocessor by default - -#. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files) -#. Not all compilers provide .d files for obtaining the dependencies (portability) -#. A naive file scanner will not catch the constructs such as "#include foo()" -#. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything) - -Regarding the speed concerns: - -* the preprocessing is performed only when files must be compiled -* the macros are evaluated only for #if/#elif/#include -* system headers are not scanned by default - -Now if you do not want the Waf preprocessor, the tool +gccdeps* uses the .d files produced -during the compilation to track the dependencies (useful when used with the boost libraries). -It only works with gcc >= 4.4 though. - -A dumb preprocessor is also available in the tool *c_dumbpreproc* -""" -# TODO: more varargs, pragma once - -import re, string, traceback -from waflib import Logs, Utils, Errors - -class PreprocError(Errors.WafError): - pass - -FILE_CACHE_SIZE = 100000 -LINE_CACHE_SIZE = 100000 - -POPFILE = '-' -"Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously" - -recursion_limit = 150 -"Limit on the amount of files to read in the dependency scanner" - -go_absolute = False -"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)" - -standard_includes = ['/usr/local/include', '/usr/include'] -if Utils.is_win32: - standard_includes = [] - -use_trigraphs = 0 -"""Apply trigraph rules (False by default)""" - -# obsolete, do not use -strict_quotes = 0 - -g_optrans = { -'not':'!', -'not_eq':'!', -'and':'&&', -'and_eq':'&=', -'or':'||', -'or_eq':'|=', -'xor':'^', -'xor_eq':'^=', -'bitand':'&', -'bitor':'|', -'compl':'~', -} -"""Operators such as and/or/xor for c++. Set an empty dict to disable.""" - -# ignore #warning and #error -re_lines = re.compile( - '^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$', - re.IGNORECASE | re.MULTILINE) -"""Match #include lines""" - -re_mac = re.compile(r"^[a-zA-Z_]\w*") -"""Match macro definitions""" - -re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') -"""Match macro functions""" - -re_pragma_once = re.compile(r'^\s*once\s*', re.IGNORECASE) -"""Match #pragma once statements""" - -re_nl = re.compile('\\\\\r*\n', re.MULTILINE) -"""Match newlines""" - -re_cpp = re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"', re.DOTALL | re.MULTILINE ) -"""Filter C/C++ comments""" - -trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')] -"""Trigraph definitions""" - -chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39} -"""Escape characters""" - -NUM = 'i' -"""Number token""" - -OP = 'O' -"""Operator token""" - -IDENT = 'T' -"""Identifier token""" - -STR = 's' -"""String token""" - -CHAR = 'c' -"""Character token""" - -tok_types = [NUM, STR, IDENT, OP] -"""Token types""" - -exp_types = [ - r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""", - r'L?"([^"\\]|\\.)*"', - r'[a-zA-Z_]\w*', - r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]', -] -"""Expression types""" - -re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M) -"""Match expressions into tokens""" - -accepted = 'a' -"""Parser state is *accepted*""" - -ignored = 'i' -"""Parser state is *ignored*, for example preprocessor lines in an #if 0 block""" - -undefined = 'u' -"""Parser state is *undefined* at the moment""" - -skipped = 's' -"""Parser state is *skipped*, for example preprocessor lines in a #elif 0 block""" - -def repl(m): - """Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`""" - s = m.group() - if s[0] == '/': - return ' ' - return s - -prec = {} -""" -Operator precedence rules required for parsing expressions of the form:: - - #if 1 && 2 != 0 -""" -ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ','] -for x, syms in enumerate(ops): - for u in syms.split(): - prec[u] = x - -def reduce_nums(val_1, val_2, val_op): - """ - Apply arithmetic rules to compute a result - - :param val1: input parameter - :type val1: int or string - :param val2: input parameter - :type val2: int or string - :param val_op: C operator in *+*, */*, *-*, etc - :type val_op: string - :rtype: int - """ - #print val_1, val_2, val_op - - # now perform the operation, make certain a and b are numeric - try: - a = 0 + val_1 - except TypeError: - a = int(val_1) - try: - b = 0 + val_2 - except TypeError: - b = int(val_2) - - d = val_op - if d == '%': - c = a % b - elif d=='+': - c = a + b - elif d=='-': - c = a - b - elif d=='*': - c = a * b - elif d=='/': - c = a / b - elif d=='^': - c = a ^ b - elif d=='==': - c = int(a == b) - elif d=='|' or d == 'bitor': - c = a | b - elif d=='||' or d == 'or' : - c = int(a or b) - elif d=='&' or d == 'bitand': - c = a & b - elif d=='&&' or d == 'and': - c = int(a and b) - elif d=='!=' or d == 'not_eq': - c = int(a != b) - elif d=='^' or d == 'xor': - c = int(a^b) - elif d=='<=': - c = int(a <= b) - elif d=='<': - c = int(a < b) - elif d=='>': - c = int(a > b) - elif d=='>=': - c = int(a >= b) - elif d=='<<': - c = a << b - elif d=='>>': - c = a >> b - else: - c = 0 - return c - -def get_num(lst): - """ - Try to obtain a number from a list of tokens. The token types are defined in :py:attr:`waflib.Tools.ccroot.tok_types`. - - :param lst: list of preprocessor tokens - :type lst: list of tuple (tokentype, value) - :return: a pair containing the number and the rest of the list - :rtype: tuple(value, list) - """ - if not lst: - raise PreprocError('empty list for get_num') - (p, v) = lst[0] - if p == OP: - if v == '(': - count_par = 1 - i = 1 - while i < len(lst): - (p, v) = lst[i] - - if p == OP: - if v == ')': - count_par -= 1 - if count_par == 0: - break - elif v == '(': - count_par += 1 - i += 1 - else: - raise PreprocError('rparen expected %r' % lst) - - (num, _) = get_term(lst[1:i]) - return (num, lst[i+1:]) - - elif v == '+': - return get_num(lst[1:]) - elif v == '-': - num, lst = get_num(lst[1:]) - return (reduce_nums('-1', num, '*'), lst) - elif v == '!': - num, lst = get_num(lst[1:]) - return (int(not int(num)), lst) - elif v == '~': - num, lst = get_num(lst[1:]) - return (~ int(num), lst) - else: - raise PreprocError('Invalid op token %r for get_num' % lst) - elif p == NUM: - return v, lst[1:] - elif p == IDENT: - # all macros should have been replaced, remaining identifiers eval to 0 - return 0, lst[1:] - else: - raise PreprocError('Invalid token %r for get_num' % lst) - -def get_term(lst): - """ - Evaluate an expression recursively, for example:: - - 1+1+1 -> 2+1 -> 3 - - :param lst: list of tokens - :type lst: list of tuple(token, value) - :return: the value and the remaining tokens - :rtype: value, list - """ - - if not lst: - raise PreprocError('empty list for get_term') - num, lst = get_num(lst) - if not lst: - return (num, []) - (p, v) = lst[0] - if p == OP: - if v == ',': - # skip - return get_term(lst[1:]) - elif v == '?': - count_par = 0 - i = 1 - while i < len(lst): - (p, v) = lst[i] - - if p == OP: - if v == ')': - count_par -= 1 - elif v == '(': - count_par += 1 - elif v == ':': - if count_par == 0: - break - i += 1 - else: - raise PreprocError('rparen expected %r' % lst) - - if int(num): - return get_term(lst[1:i]) - else: - return get_term(lst[i+1:]) - - else: - num2, lst = get_num(lst[1:]) - - if not lst: - # no more tokens to process - num2 = reduce_nums(num, num2, v) - return get_term([(NUM, num2)] + lst) - - # operator precedence - p2, v2 = lst[0] - if p2 != OP: - raise PreprocError('op expected %r' % lst) - - if prec[v2] >= prec[v]: - num2 = reduce_nums(num, num2, v) - return get_term([(NUM, num2)] + lst) - else: - num3, lst = get_num(lst[1:]) - num3 = reduce_nums(num2, num3, v2) - return get_term([(NUM, num), (p, v), (NUM, num3)] + lst) - - - raise PreprocError('cannot reduce %r' % lst) - -def reduce_eval(lst): - """ - Take a list of tokens and output true or false for #if/#elif conditions. - - :param lst: a list of tokens - :type lst: list of tuple(token, value) - :return: a token - :rtype: tuple(NUM, int) - """ - num, lst = get_term(lst) - return (NUM, num) - -def stringize(lst): - """ - Merge a list of tokens into a string - - :param lst: a list of tokens - :type lst: list of tuple(token, value) - :rtype: string - """ - lst = [str(v2) for (p2, v2) in lst] - return "".join(lst) - -def paste_tokens(t1, t2): - """ - Token pasting works between identifiers, particular operators, and identifiers and numbers:: - - a ## b -> ab - > ## = -> >= - a ## 2 -> a2 - - :param t1: token - :type t1: tuple(type, value) - :param t2: token - :type t2: tuple(type, value) - """ - p1 = None - if t1[0] == OP and t2[0] == OP: - p1 = OP - elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM): - p1 = IDENT - elif t1[0] == NUM and t2[0] == NUM: - p1 = NUM - if not p1: - raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2)) - return (p1, t1[1] + t2[1]) - -def reduce_tokens(lst, defs, ban=[]): - """ - Replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied - - :param lst: list of tokens - :type lst: list of tuple(token, value) - :param defs: macro definitions - :type defs: dict - :param ban: macros that cannot be substituted (recursion is not allowed) - :type ban: list of string - :return: the new list of tokens - :rtype: value, list - """ - - i = 0 - while i < len(lst): - (p, v) = lst[i] - - if p == IDENT and v == "defined": - del lst[i] - if i < len(lst): - (p2, v2) = lst[i] - if p2 == IDENT: - if v2 in defs: - lst[i] = (NUM, 1) - else: - lst[i] = (NUM, 0) - elif p2 == OP and v2 == '(': - del lst[i] - (p2, v2) = lst[i] - del lst[i] # remove the ident, and change the ) for the value - if v2 in defs: - lst[i] = (NUM, 1) - else: - lst[i] = (NUM, 0) - else: - raise PreprocError('Invalid define expression %r' % lst) - - elif p == IDENT and v in defs: - - if isinstance(defs[v], str): - a, b = extract_macro(defs[v]) - defs[v] = b - macro_def = defs[v] - to_add = macro_def[1] - - if isinstance(macro_def[0], list): - # macro without arguments - del lst[i] - accu = to_add[:] - reduce_tokens(accu, defs, ban+[v]) - for tmp in accu: - lst.insert(i, tmp) - i += 1 - else: - # collect the arguments for the funcall - - args = [] - del lst[i] - - if i >= len(lst): - raise PreprocError('expected ( after %r (got nothing)' % v) - - (p2, v2) = lst[i] - if p2 != OP or v2 != '(': - raise PreprocError('expected ( after %r' % v) - - del lst[i] - - one_param = [] - count_paren = 0 - while i < len(lst): - p2, v2 = lst[i] - - del lst[i] - if p2 == OP and count_paren == 0: - if v2 == '(': - one_param.append((p2, v2)) - count_paren += 1 - elif v2 == ')': - if one_param: - args.append(one_param) - break - elif v2 == ',': - if not one_param: - raise PreprocError('empty param in funcall %r' % v) - args.append(one_param) - one_param = [] - else: - one_param.append((p2, v2)) - else: - one_param.append((p2, v2)) - if v2 == '(': - count_paren += 1 - elif v2 == ')': - count_paren -= 1 - else: - raise PreprocError('malformed macro') - - # substitute the arguments within the define expression - accu = [] - arg_table = macro_def[0] - j = 0 - while j < len(to_add): - (p2, v2) = to_add[j] - - if p2 == OP and v2 == '#': - # stringize is for arguments only - if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table: - toks = args[arg_table[to_add[j+1][1]]] - accu.append((STR, stringize(toks))) - j += 1 - else: - accu.append((p2, v2)) - elif p2 == OP and v2 == '##': - # token pasting, how can man invent such a complicated system? - if accu and j+1 < len(to_add): - # we have at least two tokens - - t1 = accu[-1] - - if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table: - toks = args[arg_table[to_add[j+1][1]]] - - if toks: - accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1]) - accu.extend(toks[1:]) - else: - # error, case "a##" - accu.append((p2, v2)) - accu.extend(toks) - elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__': - # first collect the tokens - va_toks = [] - st = len(macro_def[0]) - pt = len(args) - for x in args[pt-st+1:]: - va_toks.extend(x) - va_toks.append((OP, ',')) - if va_toks: - va_toks.pop() # extra comma - if len(accu)>1: - (p3, v3) = accu[-1] - (p4, v4) = accu[-2] - if v3 == '##': - # remove the token paste - accu.pop() - if v4 == ',' and pt < st: - # remove the comma - accu.pop() - accu += va_toks - else: - accu[-1] = paste_tokens(t1, to_add[j+1]) - - j += 1 - else: - # Invalid paste, case "##a" or "b##" - accu.append((p2, v2)) - - elif p2 == IDENT and v2 in arg_table: - toks = args[arg_table[v2]] - reduce_tokens(toks, defs, ban+[v]) - accu.extend(toks) - else: - accu.append((p2, v2)) - - j += 1 - - - reduce_tokens(accu, defs, ban+[v]) - - for x in range(len(accu)-1, -1, -1): - lst.insert(i, accu[x]) - - i += 1 - - -def eval_macro(lst, defs): - """ - Reduce the tokens by :py:func:`waflib.Tools.c_preproc.reduce_tokens` and try to return a 0/1 result by :py:func:`waflib.Tools.c_preproc.reduce_eval`. - - :param lst: list of tokens - :type lst: list of tuple(token, value) - :param defs: macro definitions - :type defs: dict - :rtype: int - """ - reduce_tokens(lst, defs, []) - if not lst: - raise PreprocError('missing tokens to evaluate') - - if lst: - p, v = lst[0] - if p == IDENT and v not in defs: - raise PreprocError('missing macro %r' % lst) - - p, v = reduce_eval(lst) - return int(v) != 0 - -def extract_macro(txt): - """ - Process a macro definition of the form:: - #define f(x, y) x * y - - into a function or a simple macro without arguments - - :param txt: expression to exact a macro definition from - :type txt: string - :return: a tuple containing the name, the list of arguments and the replacement - :rtype: tuple(string, [list, list]) - """ - t = tokenize(txt) - if re_fun.search(txt): - p, name = t[0] - - p, v = t[1] - if p != OP: - raise PreprocError('expected (') - - i = 1 - pindex = 0 - params = {} - prev = '(' - - while 1: - i += 1 - p, v = t[i] - - if prev == '(': - if p == IDENT: - params[v] = pindex - pindex += 1 - prev = p - elif p == OP and v == ')': - break - else: - raise PreprocError('unexpected token (3)') - elif prev == IDENT: - if p == OP and v == ',': - prev = v - elif p == OP and v == ')': - break - else: - raise PreprocError('comma or ... expected') - elif prev == ',': - if p == IDENT: - params[v] = pindex - pindex += 1 - prev = p - elif p == OP and v == '...': - raise PreprocError('not implemented (1)') - else: - raise PreprocError('comma or ... expected (2)') - elif prev == '...': - raise PreprocError('not implemented (2)') - else: - raise PreprocError('unexpected else') - - #~ print (name, [params, t[i+1:]]) - return (name, [params, t[i+1:]]) - else: - (p, v) = t[0] - if len(t) > 1: - return (v, [[], t[1:]]) - else: - # empty define, assign an empty token - return (v, [[], [('T','')]]) - -re_include = re.compile(r'^\s*(<(?:.*)>|"(?:.*)")') -def extract_include(txt, defs): - """ - Process a line in the form:: - - #include foo - - :param txt: include line to process - :type txt: string - :param defs: macro definitions - :type defs: dict - :return: the file name - :rtype: string - """ - m = re_include.search(txt) - if m: - txt = m.group(1) - return txt[0], txt[1:-1] - - # perform preprocessing and look at the result, it must match an include - toks = tokenize(txt) - reduce_tokens(toks, defs, ['waf_include']) - - if not toks: - raise PreprocError('could not parse include %r' % txt) - - if len(toks) == 1: - if toks[0][0] == STR: - return '"', toks[0][1] - else: - if toks[0][1] == '<' and toks[-1][1] == '>': - ret = '<', stringize(toks).lstrip('<').rstrip('>') - return ret - - raise PreprocError('could not parse include %r' % txt) - -def parse_char(txt): - """ - Parse a c character - - :param txt: character to parse - :type txt: string - :return: a character literal - :rtype: string - """ - - if not txt: - raise PreprocError('attempted to parse a null char') - if txt[0] != '\\': - return ord(txt) - c = txt[1] - if c == 'x': - if len(txt) == 4 and txt[3] in string.hexdigits: - return int(txt[2:], 16) - return int(txt[2:], 16) - elif c.isdigit(): - if c == '0' and len(txt)==2: - return 0 - for i in 3, 2, 1: - if len(txt) > i and txt[1:1+i].isdigit(): - return (1+i, int(txt[1:1+i], 8)) - else: - try: - return chr_esc[c] - except KeyError: - raise PreprocError('could not parse char literal %r' % txt) - -def tokenize(s): - """ - Convert a string into a list of tokens (shlex.split does not apply to c/c++/d) - - :param s: input to tokenize - :type s: string - :return: a list of tokens - :rtype: list of tuple(token, value) - """ - return tokenize_private(s)[:] # force a copy of the results - -def tokenize_private(s): - ret = [] - for match in re_clexer.finditer(s): - m = match.group - for name in tok_types: - v = m(name) - if v: - if name == IDENT: - if v in g_optrans: - name = OP - elif v.lower() == "true": - v = 1 - name = NUM - elif v.lower() == "false": - v = 0 - name = NUM - elif name == NUM: - if m('oct'): - v = int(v, 8) - elif m('hex'): - v = int(m('hex'), 16) - elif m('n0'): - v = m('n0') - else: - v = m('char') - if v: - v = parse_char(v) - else: - v = m('n2') or m('n4') - elif name == OP: - if v == '%:': - v = '#' - elif v == '%:%:': - v = '##' - elif name == STR: - # remove the quotes around the string - v = v[1:-1] - ret.append((name, v)) - break - return ret - -def format_defines(lst): - ret = [] - for y in lst: - if y: - pos = y.find('=') - if pos == -1: - # "-DFOO" should give "#define FOO 1" - ret.append(y) - elif pos > 0: - # all others are assumed to be -DX=Y - ret.append('%s %s' % (y[:pos], y[pos+1:])) - else: - raise ValueError('Invalid define expression %r' % y) - return ret - -class c_parser(object): - """ - Used by :py:func:`waflib.Tools.c_preproc.scan` to parse c/h files. Note that by default, - only project headers are parsed. - """ - def __init__(self, nodepaths=None, defines=None): - self.lines = [] - """list of lines read""" - - if defines is None: - self.defs = {} - else: - self.defs = dict(defines) # make a copy - self.state = [] - - self.count_files = 0 - self.currentnode_stack = [] - - self.nodepaths = nodepaths or [] - """Include paths""" - - self.nodes = [] - """List of :py:class:`waflib.Node.Node` found so far""" - - self.names = [] - """List of file names that could not be matched by any file""" - - self.curfile = '' - """Current file""" - - self.ban_includes = set() - """Includes that must not be read (#pragma once)""" - - self.listed = set() - """Include nodes/names already listed to avoid duplicates in self.nodes/self.names""" - - def cached_find_resource(self, node, filename): - """ - Find a file from the input directory - - :param node: directory - :type node: :py:class:`waflib.Node.Node` - :param filename: header to find - :type filename: string - :return: the node if found, or None - :rtype: :py:class:`waflib.Node.Node` - """ - try: - cache = node.ctx.preproc_cache_node - except AttributeError: - cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE) - - key = (node, filename) - try: - return cache[key] - except KeyError: - ret = node.find_resource(filename) - if ret: - if getattr(ret, 'children', None): - ret = None - elif ret.is_child_of(node.ctx.bldnode): - tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode)) - if tmp and getattr(tmp, 'children', None): - ret = None - cache[key] = ret - return ret - - def tryfind(self, filename, kind='"', env=None): - """ - Try to obtain a node from the filename based from the include paths. Will add - the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to - :py:attr:`waflib.Tools.c_preproc.c_parser.names` if no corresponding file is found. Called by - :py:attr:`waflib.Tools.c_preproc.c_parser.start`. - - :param filename: header to find - :type filename: string - :return: the node if found - :rtype: :py:class:`waflib.Node.Node` - """ - if filename.endswith('.moc'): - # we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated - # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient. - self.names.append(filename) - return None - - self.curfile = filename - - found = None - if kind == '"': - if env.MSVC_VERSION: - for n in reversed(self.currentnode_stack): - found = self.cached_find_resource(n, filename) - if found: - break - else: - found = self.cached_find_resource(self.currentnode_stack[-1], filename) - - if not found: - for n in self.nodepaths: - found = self.cached_find_resource(n, filename) - if found: - break - - listed = self.listed - if found and not found in self.ban_includes: - if found not in listed: - listed.add(found) - self.nodes.append(found) - self.addlines(found) - else: - if filename not in listed: - listed.add(filename) - self.names.append(filename) - return found - - def filter_comments(self, node): - """ - Filter the comments from a c/h file, and return the preprocessor lines. - The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally. - - :return: the preprocessor directives as a list of (keyword, line) - :rtype: a list of string pairs - """ - # return a list of tuples : keyword, line - code = node.read() - if use_trigraphs: - for (a, b) in trig_def: - code = code.split(a).join(b) - code = re_nl.sub('', code) - code = re_cpp.sub(repl, code) - return re_lines.findall(code) - - def parse_lines(self, node): - try: - cache = node.ctx.preproc_cache_lines - except AttributeError: - cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE) - try: - return cache[node] - except KeyError: - cache[node] = lines = self.filter_comments(node) - lines.append((POPFILE, '')) - lines.reverse() - return lines - - def addlines(self, node): - """ - Add the lines from a header in the list of preprocessor lines to parse - - :param node: header - :type node: :py:class:`waflib.Node.Node` - """ - - self.currentnode_stack.append(node.parent) - - self.count_files += 1 - if self.count_files > recursion_limit: - # issue #812 - raise PreprocError('recursion limit exceeded') - - if Logs.verbose: - Logs.debug('preproc: reading file %r', node) - try: - lines = self.parse_lines(node) - except EnvironmentError: - raise PreprocError('could not read the file %r' % node) - except Exception: - if Logs.verbose > 0: - Logs.error('parsing %r failed %s', node, traceback.format_exc()) - else: - self.lines.extend(lines) - - def start(self, node, env): - """ - Preprocess a source file to obtain the dependencies, which are accumulated to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` - and :py:attr:`waflib.Tools.c_preproc.c_parser.names`. - - :param node: source file - :type node: :py:class:`waflib.Node.Node` - :param env: config set containing additional defines to take into account - :type env: :py:class:`waflib.ConfigSet.ConfigSet` - """ - Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name) - - self.current_file = node - self.addlines(node) - - # macros may be defined on the command-line, so they must be parsed as if they were part of the file - if env.DEFINES: - lst = format_defines(env.DEFINES) - lst.reverse() - self.lines.extend([('define', x) for x in lst]) - - while self.lines: - (token, line) = self.lines.pop() - if token == POPFILE: - self.count_files -= 1 - self.currentnode_stack.pop() - continue - - try: - state = self.state - - # make certain we define the state if we are about to enter in an if block - if token[:2] == 'if': - state.append(undefined) - elif token == 'endif': - state.pop() - - # skip lines when in a dead 'if' branch, wait for the endif - if token[0] != 'e': - if skipped in self.state or ignored in self.state: - continue - - if token == 'if': - ret = eval_macro(tokenize(line), self.defs) - if ret: - state[-1] = accepted - else: - state[-1] = ignored - elif token == 'ifdef': - m = re_mac.match(line) - if m and m.group() in self.defs: - state[-1] = accepted - else: - state[-1] = ignored - elif token == 'ifndef': - m = re_mac.match(line) - if m and m.group() in self.defs: - state[-1] = ignored - else: - state[-1] = accepted - elif token == 'include' or token == 'import': - (kind, inc) = extract_include(line, self.defs) - self.current_file = self.tryfind(inc, kind, env) - if token == 'import': - self.ban_includes.add(self.current_file) - elif token == 'elif': - if state[-1] == accepted: - state[-1] = skipped - elif state[-1] == ignored: - if eval_macro(tokenize(line), self.defs): - state[-1] = accepted - elif token == 'else': - if state[-1] == accepted: - state[-1] = skipped - elif state[-1] == ignored: - state[-1] = accepted - elif token == 'define': - try: - self.defs[self.define_name(line)] = line - except AttributeError: - raise PreprocError('Invalid define line %r' % line) - elif token == 'undef': - m = re_mac.match(line) - if m and m.group() in self.defs: - self.defs.__delitem__(m.group()) - #print "undef %s" % name - elif token == 'pragma': - if re_pragma_once.match(line.lower()): - self.ban_includes.add(self.current_file) - except Exception as e: - if Logs.verbose: - Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc()) - - def define_name(self, line): - """ - :param line: define line - :type line: string - :rtype: string - :return: the define name - """ - return re_mac.match(line).group() - -def scan(task): - """ - Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind:: - - #include some_macro() - - This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example - """ - try: - incn = task.generator.includes_nodes - except AttributeError: - raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": ' % task.generator) - - if go_absolute: - nodepaths = incn + [task.generator.bld.root.find_dir(x) for x in standard_includes] - else: - nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)] - - tmp = c_parser(nodepaths) - tmp.start(task.inputs[0], task.env) - return (tmp.nodes, tmp.names) diff --git a/waflib/Tools/c_tests.py b/waflib/Tools/c_tests.py deleted file mode 100644 index f858df5..0000000 --- a/waflib/Tools/c_tests.py +++ /dev/null @@ -1,229 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2016-2018 (ita) - -""" -Various configuration tests. -""" - -from waflib import Task -from waflib.Configure import conf -from waflib.TaskGen import feature, before_method, after_method - -LIB_CODE = ''' -#ifdef _MSC_VER -#define testEXPORT __declspec(dllexport) -#else -#define testEXPORT -#endif -testEXPORT int lib_func(void) { return 9; } -''' - -MAIN_CODE = ''' -#ifdef _MSC_VER -#define testEXPORT __declspec(dllimport) -#else -#define testEXPORT -#endif -testEXPORT int lib_func(void); -int main(int argc, char **argv) { - (void)argc; (void)argv; - return !(lib_func() == 9); -} -''' - -@feature('link_lib_test') -@before_method('process_source') -def link_lib_test_fun(self): - """ - The configuration test :py:func:`waflib.Configure.run_build` declares a unique task generator, - so we need to create other task generators from here to check if the linker is able to link libraries. - """ - def write_test_file(task): - task.outputs[0].write(task.generator.code) - - rpath = [] - if getattr(self, 'add_rpath', False): - rpath = [self.bld.path.get_bld().abspath()] - - mode = self.mode - m = '%s %s' % (mode, mode) - ex = self.test_exec and 'test_exec' or '' - bld = self.bld - bld(rule=write_test_file, target='test.' + mode, code=LIB_CODE) - bld(rule=write_test_file, target='main.' + mode, code=MAIN_CODE) - bld(features='%sshlib' % m, source='test.' + mode, target='test') - bld(features='%sprogram %s' % (m, ex), source='main.' + mode, target='app', use='test', rpath=rpath) - -@conf -def check_library(self, mode=None, test_exec=True): - """ - Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`. - - :param mode: c or cxx or d - :type mode: string - """ - if not mode: - mode = 'c' - if self.env.CXX: - mode = 'cxx' - self.check( - compile_filename = [], - features = 'link_lib_test', - msg = 'Checking for libraries', - mode = mode, - test_exec = test_exec) - -######################################################################################## - -INLINE_CODE = ''' -typedef int foo_t; -static %s foo_t static_foo () {return 0; } -%s foo_t foo () { - return 0; -} -''' -INLINE_VALUES = ['inline', '__inline__', '__inline'] - -@conf -def check_inline(self, **kw): - """ - Checks for the right value for inline macro. - Define INLINE_MACRO to 1 if the define is found. - If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__) - - :param define_name: define INLINE_MACRO by default to 1 if the macro is defined - :type define_name: string - :param features: by default *c* or *cxx* depending on the compiler present - :type features: list of string - """ - self.start_msg('Checking for inline') - - if not 'define_name' in kw: - kw['define_name'] = 'INLINE_MACRO' - if not 'features' in kw: - if self.env.CXX: - kw['features'] = ['cxx'] - else: - kw['features'] = ['c'] - - for x in INLINE_VALUES: - kw['fragment'] = INLINE_CODE % (x, x) - - try: - self.check(**kw) - except self.errors.ConfigurationError: - continue - else: - self.end_msg(x) - if x != 'inline': - self.define('inline', x, quote=False) - return x - self.fatal('could not use inline functions') - -######################################################################################## - -LARGE_FRAGMENT = '''#include <unistd.h> -int main(int argc, char **argv) { - (void)argc; (void)argv; - return !(sizeof(off_t) >= 8); -} -''' - -@conf -def check_large_file(self, **kw): - """ - Checks for large file support and define the macro HAVE_LARGEFILE - The test is skipped on win32 systems (DEST_BINFMT == pe). - - :param define_name: define to set, by default *HAVE_LARGEFILE* - :type define_name: string - :param execute: execute the test (yes by default) - :type execute: bool - """ - if not 'define_name' in kw: - kw['define_name'] = 'HAVE_LARGEFILE' - if not 'execute' in kw: - kw['execute'] = True - - if not 'features' in kw: - if self.env.CXX: - kw['features'] = ['cxx', 'cxxprogram'] - else: - kw['features'] = ['c', 'cprogram'] - - kw['fragment'] = LARGE_FRAGMENT - - kw['msg'] = 'Checking for large file support' - ret = True - try: - if self.env.DEST_BINFMT != 'pe': - ret = self.check(**kw) - except self.errors.ConfigurationError: - pass - else: - if ret: - return True - - kw['msg'] = 'Checking for -D_FILE_OFFSET_BITS=64' - kw['defines'] = ['_FILE_OFFSET_BITS=64'] - try: - ret = self.check(**kw) - except self.errors.ConfigurationError: - pass - else: - self.define('_FILE_OFFSET_BITS', 64) - return ret - - self.fatal('There is no support for large files') - -######################################################################################## - -ENDIAN_FRAGMENT = ''' -short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; -short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; -int use_ascii (int i) { - return ascii_mm[i] + ascii_ii[i]; -} -short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; -short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; -int use_ebcdic (int i) { - return ebcdic_mm[i] + ebcdic_ii[i]; -} -extern int foo; -''' - -class grep_for_endianness(Task.Task): - """ - Task that reads a binary and tries to determine the endianness - """ - color = 'PINK' - def run(self): - txt = self.inputs[0].read(flags='rb').decode('latin-1') - if txt.find('LiTTleEnDian') > -1: - self.generator.tmp.append('little') - elif txt.find('BIGenDianSyS') > -1: - self.generator.tmp.append('big') - else: - return -1 - -@feature('grep_for_endianness') -@after_method('process_source') -def grep_for_endianness_fun(self): - """ - Used by the endianness configuration test - """ - self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0]) - -@conf -def check_endianness(self): - """ - Executes a configuration test to determine the endianness - """ - tmp = [] - def check_msg(self): - return tmp[0] - self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', - msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg) - return tmp[0] - diff --git a/waflib/Tools/ccroot.py b/waflib/Tools/ccroot.py deleted file mode 100644 index 579d5b2..0000000 --- a/waflib/Tools/ccroot.py +++ /dev/null @@ -1,791 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -""" -Classes and methods shared by tools providing support for C-like language such -as C/C++/D/Assembly/Go (this support module is almost never used alone). -""" - -import os, re -from waflib import Task, Utils, Node, Errors, Logs -from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension -from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests -from waflib.Configure import conf - -SYSTEM_LIB_PATHS = ['/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib'] - -USELIB_VARS = Utils.defaultdict(set) -""" -Mapping for features to :py:class:`waflib.ConfigSet.ConfigSet` variables. See :py:func:`waflib.Tools.ccroot.propagate_uselib_vars`. -""" - -USELIB_VARS['c'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS', 'ARCH']) -USELIB_VARS['cxx'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS', 'ARCH']) -USELIB_VARS['d'] = set(['INCLUDES', 'DFLAGS']) -USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH']) - -USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS']) -USELIB_VARS['cshlib'] = USELIB_VARS['cxxshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS']) -USELIB_VARS['cstlib'] = USELIB_VARS['cxxstlib'] = set(['ARFLAGS', 'LINKDEPS']) - -USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) -USELIB_VARS['dshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) -USELIB_VARS['dstlib'] = set(['ARFLAGS', 'LINKDEPS']) - -USELIB_VARS['asm'] = set(['ASFLAGS']) - -# ================================================================================================= - -@taskgen_method -def create_compiled_task(self, name, node): - """ - Create the compilation task: c, cxx, asm, etc. The output node is created automatically (object file with a typical **.o** extension). - The task is appended to the list *compiled_tasks* which is then used by :py:func:`waflib.Tools.ccroot.apply_link` - - :param name: name of the task class - :type name: string - :param node: the file to compile - :type node: :py:class:`waflib.Node.Node` - :return: The task created - :rtype: :py:class:`waflib.Task.Task` - """ - out = '%s.%d.o' % (node.name, self.idx) - task = self.create_task(name, node, node.parent.find_or_declare(out)) - try: - self.compiled_tasks.append(task) - except AttributeError: - self.compiled_tasks = [task] - return task - -@taskgen_method -def to_incnodes(self, inlst): - """ - Task generator method provided to convert a list of string/nodes into a list of includes folders. - - The paths are assumed to be relative to the task generator path, except if they begin by **#** - in which case they are searched from the top-level directory (``bld.srcnode``). - The folders are simply assumed to be existing. - - The node objects in the list are returned in the output list. The strings are converted - into node objects if possible. The node is searched from the source directory, and if a match is found, - the equivalent build directory is created and added to the returned list too. When a folder cannot be found, it is ignored. - - :param inlst: list of folders - :type inlst: space-delimited string or a list of string/nodes - :rtype: list of :py:class:`waflib.Node.Node` - :return: list of include folders as nodes - """ - lst = [] - seen = set() - for x in self.to_list(inlst): - if x in seen or not x: - continue - seen.add(x) - - # with a real lot of targets, it is sometimes interesting to cache the results below - if isinstance(x, Node.Node): - lst.append(x) - else: - if os.path.isabs(x): - lst.append(self.bld.root.make_node(x) or x) - else: - if x[0] == '#': - p = self.bld.bldnode.make_node(x[1:]) - v = self.bld.srcnode.make_node(x[1:]) - else: - p = self.path.get_bld().make_node(x) - v = self.path.make_node(x) - if p.is_child_of(self.bld.bldnode): - p.mkdir() - lst.append(p) - lst.append(v) - return lst - -@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes') -@after_method('propagate_uselib_vars', 'process_source') -def apply_incpaths(self): - """ - Task generator method that processes the attribute *includes*:: - - tg = bld(features='includes', includes='.') - - The folders only need to be relative to the current directory, the equivalent build directory is - added automatically (for headers created in the build directory). This enables using a build directory - or not (``top == out``). - - This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``, - and the list of include paths in ``tg.env.INCLUDES``. - """ - - lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES) - self.includes_nodes = lst - cwd = self.get_cwd() - self.env.INCPATHS = [x.path_from(cwd) for x in lst] - -class link_task(Task.Task): - """ - Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`. - - .. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib - """ - color = 'YELLOW' - - weight = 3 - """Try to process link tasks as early as possible""" - - inst_to = None - """Default installation path for the link task outputs, or None to disable""" - - chmod = Utils.O755 - """Default installation mode for the link task outputs""" - - def add_target(self, target): - """ - Process the *target* attribute to add the platform-specific prefix/suffix such as *.so* or *.exe*. - The settings are retrieved from ``env.clsname_PATTERN`` - """ - if isinstance(target, str): - base = self.generator.path - if target.startswith('#'): - # for those who like flat structures - target = target[1:] - base = self.generator.bld.bldnode - - pattern = self.env[self.__class__.__name__ + '_PATTERN'] - if not pattern: - pattern = '%s' - folder, name = os.path.split(target) - - if self.__class__.__name__.find('shlib') > 0 and getattr(self.generator, 'vnum', None): - nums = self.generator.vnum.split('.') - if self.env.DEST_BINFMT == 'pe': - # include the version in the dll file name, - # the import lib file name stays unversioned. - name = name + '-' + nums[0] - elif self.env.DEST_OS == 'openbsd': - pattern = '%s.%s' % (pattern, nums[0]) - if len(nums) >= 2: - pattern += '.%s' % nums[1] - - if folder: - tmp = folder + os.sep + pattern % name - else: - tmp = pattern % name - target = base.find_or_declare(tmp) - self.set_outputs(target) - - def exec_command(self, *k, **kw): - ret = super(link_task, self).exec_command(*k, **kw) - if not ret and self.env.DO_MANIFEST: - ret = self.exec_mf() - return ret - - def exec_mf(self): - """ - Create manifest files for VS-like compilers (msvc, ifort, ...) - """ - if not self.env.MT: - return 0 - - manifest = None - for out_node in self.outputs: - if out_node.name.endswith('.manifest'): - manifest = out_node.abspath() - break - else: - # Should never get here. If we do, it means the manifest file was - # never added to the outputs list, thus we don't have a manifest file - # to embed, so we just return. - return 0 - - # embedding mode. Different for EXE's and DLL's. - # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx - mode = '' - for x in Utils.to_list(self.generator.features): - if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'): - mode = 1 - elif x in ('cshlib', 'cxxshlib', 'fcshlib'): - mode = 2 - - Logs.debug('msvc: embedding manifest in mode %r', mode) - - lst = [] + self.env.MT - lst.extend(Utils.to_list(self.env.MTFLAGS)) - lst.extend(['-manifest', manifest]) - lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode)) - - return super(link_task, self).exec_command(lst) - -class stlink_task(link_task): - """ - Base for static link tasks, which use *ar* most of the time. - The target is always removed before being written. - """ - run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' - - chmod = Utils.O644 - """Default installation mode for the static libraries""" - -def rm_tgt(cls): - old = cls.run - def wrap(self): - try: - os.remove(self.outputs[0].abspath()) - except OSError: - pass - return old(self) - setattr(cls, 'run', wrap) -rm_tgt(stlink_task) - -@feature('skip_stlib_link_deps') -@before_method('process_use') -def apply_skip_stlib_link_deps(self): - """ - This enables an optimization in the :py:func:wafilb.Tools.ccroot.processes_use: method that skips dependency and - link flag optimizations for targets that generate static libraries (via the :py:class:Tools.ccroot.stlink_task task). - The actual behavior is implemented in :py:func:wafilb.Tools.ccroot.processes_use: method so this feature only tells waf - to enable the new behavior. - """ - self.env.SKIP_STLIB_LINK_DEPS = True - -@feature('c', 'cxx', 'd', 'fc', 'asm') -@after_method('process_source') -def apply_link(self): - """ - Collect the tasks stored in ``compiled_tasks`` (created by :py:func:`waflib.Tools.ccroot.create_compiled_task`), and - use the outputs for a new instance of :py:class:`waflib.Tools.ccroot.link_task`. The class to use is the first link task - matching a name from the attribute *features*, for example:: - - def build(bld): - tg = bld(features='cxx cxxprogram cprogram', source='main.c', target='app') - - will create the task ``tg.link_task`` as a new instance of :py:class:`waflib.Tools.cxx.cxxprogram` - """ - - for x in self.features: - if x == 'cprogram' and 'cxx' in self.features: # limited compat - x = 'cxxprogram' - elif x == 'cshlib' and 'cxx' in self.features: - x = 'cxxshlib' - - if x in Task.classes: - if issubclass(Task.classes[x], link_task): - link = x - break - else: - return - - objs = [t.outputs[0] for t in getattr(self, 'compiled_tasks', [])] - self.link_task = self.create_task(link, objs) - self.link_task.add_target(self.target) - - # remember that the install paths are given by the task generators - try: - inst_to = self.install_path - except AttributeError: - inst_to = self.link_task.inst_to - if inst_to: - # install a copy of the node list we have at this moment (implib not added) - self.install_task = self.add_install_files( - install_to=inst_to, install_from=self.link_task.outputs[:], - chmod=self.link_task.chmod, task=self.link_task) - -@taskgen_method -def use_rec(self, name, **kw): - """ - Processes the ``use`` keyword recursively. This method is kind of private and only meant to be used from ``process_use`` - """ - - if name in self.tmp_use_not or name in self.tmp_use_seen: - return - - try: - y = self.bld.get_tgen_by_name(name) - except Errors.WafError: - self.uselib.append(name) - self.tmp_use_not.add(name) - return - - self.tmp_use_seen.append(name) - y.post() - - # bind temporary attributes on the task generator - y.tmp_use_objects = objects = kw.get('objects', True) - y.tmp_use_stlib = stlib = kw.get('stlib', True) - try: - link_task = y.link_task - except AttributeError: - y.tmp_use_var = '' - else: - objects = False - if not isinstance(link_task, stlink_task): - stlib = False - y.tmp_use_var = 'LIB' - else: - y.tmp_use_var = 'STLIB' - - p = self.tmp_use_prec - for x in self.to_list(getattr(y, 'use', [])): - if self.env["STLIB_" + x]: - continue - try: - p[x].append(name) - except KeyError: - p[x] = [name] - self.use_rec(x, objects=objects, stlib=stlib) - -@feature('c', 'cxx', 'd', 'use', 'fc') -@before_method('apply_incpaths', 'propagate_uselib_vars') -@after_method('apply_link', 'process_source') -def process_use(self): - """ - Process the ``use`` attribute which contains a list of task generator names:: - - def build(bld): - bld.shlib(source='a.c', target='lib1') - bld.program(source='main.c', target='app', use='lib1') - - See :py:func:`waflib.Tools.ccroot.use_rec`. - """ - - use_not = self.tmp_use_not = set() - self.tmp_use_seen = [] # we would like an ordered set - use_prec = self.tmp_use_prec = {} - self.uselib = self.to_list(getattr(self, 'uselib', [])) - self.includes = self.to_list(getattr(self, 'includes', [])) - names = self.to_list(getattr(self, 'use', [])) - - for x in names: - self.use_rec(x) - - for x in use_not: - if x in use_prec: - del use_prec[x] - - # topological sort - out = self.tmp_use_sorted = [] - tmp = [] - for x in self.tmp_use_seen: - for k in use_prec.values(): - if x in k: - break - else: - tmp.append(x) - - while tmp: - e = tmp.pop() - out.append(e) - try: - nlst = use_prec[e] - except KeyError: - pass - else: - del use_prec[e] - for x in nlst: - for y in use_prec: - if x in use_prec[y]: - break - else: - tmp.append(x) - if use_prec: - raise Errors.WafError('Cycle detected in the use processing %r' % use_prec) - out.reverse() - - link_task = getattr(self, 'link_task', None) - for x in out: - y = self.bld.get_tgen_by_name(x) - var = y.tmp_use_var - if var and link_task: - if self.env.SKIP_STLIB_LINK_DEPS and isinstance(link_task, stlink_task): - # If the skip_stlib_link_deps feature is enabled then we should - # avoid adding lib deps to the stlink_task instance. - pass - elif var == 'LIB' or y.tmp_use_stlib or x in names: - self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]]) - self.link_task.dep_nodes.extend(y.link_task.outputs) - tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd()) - self.env.append_unique(var + 'PATH', [tmp_path]) - else: - if y.tmp_use_objects: - self.add_objects_from_tgen(y) - - if getattr(y, 'export_includes', None): - # self.includes may come from a global variable #2035 - self.includes = self.includes + y.to_incnodes(y.export_includes) - - if getattr(y, 'export_defines', None): - self.env.append_value('DEFINES', self.to_list(y.export_defines)) - - - # and finally, add the use variables (no recursion needed) - for x in names: - try: - y = self.bld.get_tgen_by_name(x) - except Errors.WafError: - if not self.env['STLIB_' + x] and not x in self.uselib: - self.uselib.append(x) - else: - for k in self.to_list(getattr(y, 'use', [])): - if not self.env['STLIB_' + k] and not k in self.uselib: - self.uselib.append(k) - -@taskgen_method -def accept_node_to_link(self, node): - """ - PRIVATE INTERNAL USE ONLY - """ - return not node.name.endswith('.pdb') - -@taskgen_method -def add_objects_from_tgen(self, tg): - """ - Add the objects from the depending compiled tasks as link task inputs. - - Some objects are filtered: for instance, .pdb files are added - to the compiled tasks but not to the link tasks (to avoid errors) - PRIVATE INTERNAL USE ONLY - """ - try: - link_task = self.link_task - except AttributeError: - pass - else: - for tsk in getattr(tg, 'compiled_tasks', []): - for x in tsk.outputs: - if self.accept_node_to_link(x): - link_task.inputs.append(x) - -@taskgen_method -def get_uselib_vars(self): - """ - :return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`) - :rtype: list of string - """ - _vars = set() - for x in self.features: - if x in USELIB_VARS: - _vars |= USELIB_VARS[x] - return _vars - -@feature('c', 'cxx', 'd', 'fc', 'javac', 'cs', 'uselib', 'asm') -@after_method('process_use') -def propagate_uselib_vars(self): - """ - Process uselib variables for adding flags. For example, the following target:: - - def build(bld): - bld.env.AFLAGS_aaa = ['bar'] - from waflib.Tools.ccroot import USELIB_VARS - USELIB_VARS['aaa'] = ['AFLAGS'] - - tg = bld(features='aaa', aflags='test') - - The *aflags* attribute will be processed and this method will set:: - - tg.env.AFLAGS = ['bar', 'test'] - """ - _vars = self.get_uselib_vars() - env = self.env - app = env.append_value - feature_uselib = self.features + self.to_list(getattr(self, 'uselib', [])) - for var in _vars: - y = var.lower() - val = getattr(self, y, []) - if val: - app(var, self.to_list(val)) - - for x in feature_uselib: - val = env['%s_%s' % (var, x)] - if val: - app(var, val) - -# ============ the code above must not know anything about import libs ========== - -@feature('cshlib', 'cxxshlib', 'fcshlib') -@after_method('apply_link') -def apply_implib(self): - """ - Handle dlls and their import libs on Windows-like systems. - - A ``.dll.a`` file called *import library* is generated. - It must be installed as it is required for linking the library. - """ - if not self.env.DEST_BINFMT == 'pe': - return - - dll = self.link_task.outputs[0] - if isinstance(self.target, Node.Node): - name = self.target.name - else: - name = os.path.split(self.target)[1] - implib = self.env.implib_PATTERN % name - implib = dll.parent.find_or_declare(implib) - self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath()) - self.link_task.outputs.append(implib) - - if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe': - node = self.path.find_resource(self.defs) - if not node: - raise Errors.WafError('invalid def file %r' % self.defs) - if self.env.def_PATTERN: - self.env.append_value('LINKFLAGS', self.env.def_PATTERN % node.path_from(self.get_cwd())) - self.link_task.dep_nodes.append(node) - else: - # gcc for windows takes *.def file as input without any special flag - self.link_task.inputs.append(node) - - # where to put the import library - if getattr(self, 'install_task', None): - try: - # user has given a specific installation path for the import library - inst_to = self.install_path_implib - except AttributeError: - try: - # user has given an installation path for the main library, put the import library in it - inst_to = self.install_path - except AttributeError: - # else, put the library in BINDIR and the import library in LIBDIR - inst_to = '${IMPLIBDIR}' - self.install_task.install_to = '${BINDIR}' - if not self.env.IMPLIBDIR: - self.env.IMPLIBDIR = self.env.LIBDIR - self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib, - chmod=self.link_task.chmod, task=self.link_task) - -# ============ the code above must not know anything about vnum processing on unix platforms ========= - -re_vnum = re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$') -@feature('cshlib', 'cxxshlib', 'dshlib', 'fcshlib', 'vnum') -@after_method('apply_link', 'propagate_uselib_vars') -def apply_vnum(self): - """ - Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots:: - - def build(bld): - bld.shlib(source='a.c', target='foo', vnum='14.15.16') - - In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created: - - * ``libfoo.so → libfoo.so.14.15.16`` - * ``libfoo.so.14 → libfoo.so.14.15.16`` - - By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library. When necessary, the compatibility can be explicitly defined using `cnum` parameter: - - def build(bld): - bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15') - - In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library. - - On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library. - """ - if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'): - return - - link = self.link_task - if not re_vnum.match(self.vnum): - raise Errors.WafError('Invalid vnum %r for target %r' % (self.vnum, getattr(self, 'name', self))) - nums = self.vnum.split('.') - node = link.outputs[0] - - cnum = getattr(self, 'cnum', str(nums[0])) - cnums = cnum.split('.') - if len(cnums)>len(nums) or nums[0:len(cnums)] != cnums: - raise Errors.WafError('invalid compatibility version %s' % cnum) - - libname = node.name - if libname.endswith('.dylib'): - name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum) - name2 = libname.replace('.dylib', '.%s.dylib' % cnum) - else: - name3 = libname + '.' + self.vnum - name2 = libname + '.' + cnum - - # add the so name for the ld linker - to disable, just unset env.SONAME_ST - if self.env.SONAME_ST: - v = self.env.SONAME_ST % name2 - self.env.append_value('LINKFLAGS', v.split()) - - # the following task is just to enable execution from the build dir :-/ - if self.env.DEST_OS != 'openbsd': - outs = [node.parent.make_node(name3)] - if name2 != name3: - outs.append(node.parent.make_node(name2)) - self.create_task('vnum', node, outs) - - if getattr(self, 'install_task', None): - self.install_task.hasrun = Task.SKIPPED - self.install_task.no_errcheck_out = True - path = self.install_task.install_to - if self.env.DEST_OS == 'openbsd': - libname = self.link_task.outputs[0].name - t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod) - self.vnum_install_task = (t1,) - else: - t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod) - t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3) - if name2 != name3: - t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3) - self.vnum_install_task = (t1, t2, t3) - else: - self.vnum_install_task = (t1, t3) - - if '-dynamiclib' in self.env.LINKFLAGS: - # this requires after(propagate_uselib_vars) - try: - inst_to = self.install_path - except AttributeError: - inst_to = self.link_task.inst_to - if inst_to: - p = Utils.subst_vars(inst_to, self.env) - path = os.path.join(p, name2) - self.env.append_value('LINKFLAGS', ['-install_name', path]) - self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum) - self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum) - -class vnum(Task.Task): - """ - Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum` - """ - color = 'CYAN' - ext_in = ['.bin'] - def keyword(self): - return 'Symlinking' - def run(self): - for x in self.outputs: - path = x.abspath() - try: - os.remove(path) - except OSError: - pass - - try: - os.symlink(self.inputs[0].name, path) - except OSError: - return 1 - -class fake_shlib(link_task): - """ - Task used for reading a system library and adding the dependency on it - """ - def runnable_status(self): - for t in self.run_after: - if not t.hasrun: - return Task.ASK_LATER - return Task.SKIP_ME - -class fake_stlib(stlink_task): - """ - Task used for reading a system library and adding the dependency on it - """ - def runnable_status(self): - for t in self.run_after: - if not t.hasrun: - return Task.ASK_LATER - return Task.SKIP_ME - -@conf -def read_shlib(self, name, paths=[], export_includes=[], export_defines=[]): - """ - Read a system shared library, enabling its use as a local library. Will trigger a rebuild if the file changes:: - - def build(bld): - bld.read_shlib('m') - bld.program(source='main.c', use='m') - """ - return self(name=name, features='fake_lib', lib_paths=paths, lib_type='shlib', export_includes=export_includes, export_defines=export_defines) - -@conf -def read_stlib(self, name, paths=[], export_includes=[], export_defines=[]): - """ - Read a system static library, enabling a use as a local library. Will trigger a rebuild if the file changes. - """ - return self(name=name, features='fake_lib', lib_paths=paths, lib_type='stlib', export_includes=export_includes, export_defines=export_defines) - -lib_patterns = { - 'shlib' : ['lib%s.so', '%s.so', 'lib%s.dylib', 'lib%s.dll', '%s.dll'], - 'stlib' : ['lib%s.a', '%s.a', 'lib%s.dll', '%s.dll', 'lib%s.lib', '%s.lib'], -} - -@feature('fake_lib') -def process_lib(self): - """ - Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`. - """ - node = None - - names = [x % self.name for x in lib_patterns[self.lib_type]] - for x in self.lib_paths + [self.path] + SYSTEM_LIB_PATHS: - if not isinstance(x, Node.Node): - x = self.bld.root.find_node(x) or self.path.find_node(x) - if not x: - continue - - for y in names: - node = x.find_node(y) - if node: - try: - Utils.h_file(node.abspath()) - except EnvironmentError: - raise ValueError('Could not read %r' % y) - break - else: - continue - break - else: - raise Errors.WafError('could not find library %r' % self.name) - self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node]) - self.target = self.name - - -class fake_o(Task.Task): - def runnable_status(self): - return Task.SKIP_ME - -@extension('.o', '.obj') -def add_those_o_files(self, node): - tsk = self.create_task('fake_o', [], node) - try: - self.compiled_tasks.append(tsk) - except AttributeError: - self.compiled_tasks = [tsk] - -@feature('fake_obj') -@before_method('process_source') -def process_objs(self): - """ - Puts object files in the task generator outputs - """ - for node in self.to_nodes(self.source): - self.add_those_o_files(node) - self.source = [] - -@conf -def read_object(self, obj): - """ - Read an object file, enabling injection in libs/programs. Will trigger a rebuild if the file changes. - - :param obj: object file path, as string or Node - """ - if not isinstance(obj, self.path.__class__): - obj = self.path.find_resource(obj) - return self(features='fake_obj', source=obj, name=obj.name) - -@feature('cxxprogram', 'cprogram') -@after_method('apply_link', 'process_use') -def set_full_paths_hpux(self): - """ - On hp-ux, extend the libpaths and static library paths to absolute paths - """ - if self.env.DEST_OS != 'hp-ux': - return - base = self.bld.bldnode.abspath() - for var in ['LIBPATH', 'STLIBPATH']: - lst = [] - for x in self.env[var]: - if x.startswith('/'): - lst.append(x) - else: - lst.append(os.path.normpath(os.path.join(base, x))) - self.env[var] = lst - diff --git a/waflib/Tools/clang.py b/waflib/Tools/clang.py deleted file mode 100644 index 3828e39..0000000 --- a/waflib/Tools/clang.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Krzysztof KosiÅ„ski 2014 - -""" -Detect the Clang C compiler -""" - -from waflib.Tools import ccroot, ar, gcc -from waflib.Configure import conf - -@conf -def find_clang(conf): - """ - Finds the program clang and executes it to ensure it really is clang - """ - cc = conf.find_program('clang', var='CC') - conf.get_cc_version(cc, clang=True) - conf.env.CC_NAME = 'clang' - -def configure(conf): - conf.find_clang() - conf.find_program(['llvm-ar', 'ar'], var='AR') - conf.find_ar() - conf.gcc_common_flags() - conf.gcc_modifier_platform() - conf.cc_load_tools() - conf.cc_add_flags() - conf.link_add_flags() diff --git a/waflib/Tools/clangxx.py b/waflib/Tools/clangxx.py deleted file mode 100644 index 152013c..0000000 --- a/waflib/Tools/clangxx.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy 2009-2018 (ita) - -""" -Detect the Clang++ C++ compiler -""" - -from waflib.Tools import ccroot, ar, gxx -from waflib.Configure import conf - -@conf -def find_clangxx(conf): - """ - Finds the program clang++, and executes it to ensure it really is clang++ - """ - cxx = conf.find_program('clang++', var='CXX') - conf.get_cc_version(cxx, clang=True) - conf.env.CXX_NAME = 'clang' - -def configure(conf): - conf.find_clangxx() - conf.find_program(['llvm-ar', 'ar'], var='AR') - conf.find_ar() - conf.gxx_common_flags() - conf.gxx_modifier_platform() - conf.cxx_load_tools() - conf.cxx_add_flags() - conf.link_add_flags() - diff --git a/waflib/Tools/compiler_c.py b/waflib/Tools/compiler_c.py deleted file mode 100644 index 2dba3f8..0000000 --- a/waflib/Tools/compiler_c.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Matthias Jahn jahn dĂ´t matthias Ă¢t freenet dĂ´t de, 2007 (pmarat) - -""" -Try to detect a C compiler from the list of supported compilers (gcc, msvc, etc):: - - def options(opt): - opt.load('compiler_c') - def configure(cnf): - cnf.load('compiler_c') - def build(bld): - bld.program(source='main.c', target='app') - -The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_c.c_compiler`. To register -a new C compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use:: - - from waflib.Tools.compiler_c import c_compiler - c_compiler['win32'] = ['cfoo', 'msvc', 'gcc'] - - def options(opt): - opt.load('compiler_c') - def configure(cnf): - cnf.load('compiler_c') - def build(bld): - bld.program(source='main.c', target='app') - -Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using:: - - $ CC=clang waf configure -""" - -import re -from waflib.Tools import ccroot -from waflib import Utils -from waflib.Logs import debug - -c_compiler = { -'win32': ['msvc', 'gcc', 'clang'], -'cygwin': ['gcc'], -'darwin': ['clang', 'gcc'], -'aix': ['xlc', 'gcc', 'clang'], -'linux': ['gcc', 'clang', 'icc'], -'sunos': ['suncc', 'gcc'], -'irix': ['gcc', 'irixcc'], -'hpux': ['gcc'], -'osf1V': ['gcc'], -'gnu': ['gcc', 'clang'], -'java': ['gcc', 'msvc', 'clang', 'icc'], -'default':['clang', 'gcc'], -} -""" -Dict mapping platform names to Waf tools finding specific C compilers:: - - from waflib.Tools.compiler_c import c_compiler - c_compiler['linux'] = ['gcc', 'icc', 'suncc'] -""" - -def default_compilers(): - build_platform = Utils.unversioned_sys_platform() - possible_compiler_list = c_compiler.get(build_platform, c_compiler['default']) - return ' '.join(possible_compiler_list) - -def configure(conf): - """ - Detects a suitable C compiler - - :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found - """ - try: - test_for_compiler = conf.options.check_c_compiler or default_compilers() - except AttributeError: - conf.fatal("Add options(opt): opt.load('compiler_c')") - - for compiler in re.split('[ ,]+', test_for_compiler): - conf.env.stash() - conf.start_msg('Checking for %r (C compiler)' % compiler) - try: - conf.load(compiler) - except conf.errors.ConfigurationError as e: - conf.env.revert() - conf.end_msg(False) - debug('compiler_c: %r', e) - else: - if conf.env.CC: - conf.end_msg(conf.env.get_flat('CC')) - conf.env.COMPILER_CC = compiler - conf.env.commit() - break - conf.env.revert() - conf.end_msg(False) - else: - conf.fatal('could not configure a C compiler!') - -def options(opt): - """ - This is how to provide compiler preferences on the command-line:: - - $ waf configure --check-c-compiler=gcc - """ - test_for_compiler = default_compilers() - opt.load_special_tools('c_*.py', ban=['c_dumbpreproc.py']) - cc_compiler_opts = opt.add_option_group('Configuration options') - cc_compiler_opts.add_option('--check-c-compiler', default=None, - help='list of C compilers to try [%s]' % test_for_compiler, - dest="check_c_compiler") - - for x in test_for_compiler.split(): - opt.load('%s' % x) - diff --git a/waflib/Tools/compiler_cxx.py b/waflib/Tools/compiler_cxx.py deleted file mode 100644 index 1af65a2..0000000 --- a/waflib/Tools/compiler_cxx.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Matthias Jahn jahn dĂ´t matthias Ă¢t freenet dĂ´t de 2007 (pmarat) - -""" -Try to detect a C++ compiler from the list of supported compilers (g++, msvc, etc):: - - def options(opt): - opt.load('compiler_cxx') - def configure(cnf): - cnf.load('compiler_cxx') - def build(bld): - bld.program(source='main.cpp', target='app') - -The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_cxx.cxx_compiler`. To register -a new C++ compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use:: - - from waflib.Tools.compiler_cxx import cxx_compiler - cxx_compiler['win32'] = ['cfoo', 'msvc', 'gcc'] - - def options(opt): - opt.load('compiler_cxx') - def configure(cnf): - cnf.load('compiler_cxx') - def build(bld): - bld.program(source='main.c', target='app') - -Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using:: - - $ CXX=clang waf configure -""" - - -import re -from waflib.Tools import ccroot -from waflib import Utils -from waflib.Logs import debug - -cxx_compiler = { -'win32': ['msvc', 'g++', 'clang++'], -'cygwin': ['g++'], -'darwin': ['clang++', 'g++'], -'aix': ['xlc++', 'g++', 'clang++'], -'linux': ['g++', 'clang++', 'icpc'], -'sunos': ['sunc++', 'g++'], -'irix': ['g++'], -'hpux': ['g++'], -'osf1V': ['g++'], -'gnu': ['g++', 'clang++'], -'java': ['g++', 'msvc', 'clang++', 'icpc'], -'default': ['clang++', 'g++'] -} -""" -Dict mapping the platform names to Waf tools finding specific C++ compilers:: - - from waflib.Tools.compiler_cxx import cxx_compiler - cxx_compiler['linux'] = ['gxx', 'icpc', 'suncxx'] -""" - -def default_compilers(): - build_platform = Utils.unversioned_sys_platform() - possible_compiler_list = cxx_compiler.get(build_platform, cxx_compiler['default']) - return ' '.join(possible_compiler_list) - -def configure(conf): - """ - Detects a suitable C++ compiler - - :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found - """ - try: - test_for_compiler = conf.options.check_cxx_compiler or default_compilers() - except AttributeError: - conf.fatal("Add options(opt): opt.load('compiler_cxx')") - - for compiler in re.split('[ ,]+', test_for_compiler): - conf.env.stash() - conf.start_msg('Checking for %r (C++ compiler)' % compiler) - try: - conf.load(compiler) - except conf.errors.ConfigurationError as e: - conf.env.revert() - conf.end_msg(False) - debug('compiler_cxx: %r', e) - else: - if conf.env.CXX: - conf.end_msg(conf.env.get_flat('CXX')) - conf.env.COMPILER_CXX = compiler - conf.env.commit() - break - conf.env.revert() - conf.end_msg(False) - else: - conf.fatal('could not configure a C++ compiler!') - -def options(opt): - """ - This is how to provide compiler preferences on the command-line:: - - $ waf configure --check-cxx-compiler=gxx - """ - test_for_compiler = default_compilers() - opt.load_special_tools('cxx_*.py') - cxx_compiler_opts = opt.add_option_group('Configuration options') - cxx_compiler_opts.add_option('--check-cxx-compiler', default=None, - help='list of C++ compilers to try [%s]' % test_for_compiler, - dest="check_cxx_compiler") - - for x in test_for_compiler.split(): - opt.load('%s' % x) - diff --git a/waflib/Tools/compiler_d.py b/waflib/Tools/compiler_d.py deleted file mode 100644 index 43bb1f6..0000000 --- a/waflib/Tools/compiler_d.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2007 (dv) -# Thomas Nagy, 2016-2018 (ita) - -""" -Try to detect a D compiler from the list of supported compilers:: - - def options(opt): - opt.load('compiler_d') - def configure(cnf): - cnf.load('compiler_d') - def build(bld): - bld.program(source='main.d', target='app') - -Only three D compilers are really present at the moment: - -* gdc -* dmd, the ldc compiler having a very similar command-line interface -* ldc2 -""" - -import re -from waflib import Utils, Logs - -d_compiler = { -'default' : ['gdc', 'dmd', 'ldc2'] -} -""" -Dict mapping the platform names to lists of names of D compilers to try, in order of preference:: - - from waflib.Tools.compiler_d import d_compiler - d_compiler['default'] = ['gdc', 'dmd', 'ldc2'] -""" - -def default_compilers(): - build_platform = Utils.unversioned_sys_platform() - possible_compiler_list = d_compiler.get(build_platform, d_compiler['default']) - return ' '.join(possible_compiler_list) - -def configure(conf): - """ - Detects a suitable D compiler - - :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found - """ - try: - test_for_compiler = conf.options.check_d_compiler or default_compilers() - except AttributeError: - conf.fatal("Add options(opt): opt.load('compiler_d')") - - for compiler in re.split('[ ,]+', test_for_compiler): - conf.env.stash() - conf.start_msg('Checking for %r (D compiler)' % compiler) - try: - conf.load(compiler) - except conf.errors.ConfigurationError as e: - conf.env.revert() - conf.end_msg(False) - Logs.debug('compiler_d: %r', e) - else: - if conf.env.D: - conf.end_msg(conf.env.get_flat('D')) - conf.env.COMPILER_D = compiler - conf.env.commit() - break - conf.env.revert() - conf.end_msg(False) - else: - conf.fatal('could not configure a D compiler!') - -def options(opt): - """ - This is how to provide compiler preferences on the command-line:: - - $ waf configure --check-d-compiler=dmd - """ - test_for_compiler = default_compilers() - d_compiler_opts = opt.add_option_group('Configuration options') - d_compiler_opts.add_option('--check-d-compiler', default=None, - help='list of D compilers to try [%s]' % test_for_compiler, dest='check_d_compiler') - - for x in test_for_compiler.split(): - opt.load('%s' % x) - diff --git a/waflib/Tools/compiler_fc.py b/waflib/Tools/compiler_fc.py deleted file mode 100644 index 96b58e7..0000000 --- a/waflib/Tools/compiler_fc.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 - -import re -from waflib import Utils, Logs -from waflib.Tools import fc - -fc_compiler = { - 'win32' : ['gfortran','ifort'], - 'darwin' : ['gfortran', 'g95', 'ifort'], - 'linux' : ['gfortran', 'g95', 'ifort'], - 'java' : ['gfortran', 'g95', 'ifort'], - 'default': ['gfortran'], - 'aix' : ['gfortran'] -} -""" -Dict mapping the platform names to lists of names of Fortran compilers to try, in order of preference:: - - from waflib.Tools.compiler_c import c_compiler - c_compiler['linux'] = ['gfortran', 'g95', 'ifort'] -""" - -def default_compilers(): - build_platform = Utils.unversioned_sys_platform() - possible_compiler_list = fc_compiler.get(build_platform, fc_compiler['default']) - return ' '.join(possible_compiler_list) - -def configure(conf): - """ - Detects a suitable Fortran compiler - - :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found - """ - try: - test_for_compiler = conf.options.check_fortran_compiler or default_compilers() - except AttributeError: - conf.fatal("Add options(opt): opt.load('compiler_fc')") - for compiler in re.split('[ ,]+', test_for_compiler): - conf.env.stash() - conf.start_msg('Checking for %r (Fortran compiler)' % compiler) - try: - conf.load(compiler) - except conf.errors.ConfigurationError as e: - conf.env.revert() - conf.end_msg(False) - Logs.debug('compiler_fortran: %r', e) - else: - if conf.env.FC: - conf.end_msg(conf.env.get_flat('FC')) - conf.env.COMPILER_FORTRAN = compiler - conf.env.commit() - break - conf.env.revert() - conf.end_msg(False) - else: - conf.fatal('could not configure a Fortran compiler!') - -def options(opt): - """ - This is how to provide compiler preferences on the command-line:: - - $ waf configure --check-fortran-compiler=ifort - """ - test_for_compiler = default_compilers() - opt.load_special_tools('fc_*.py') - fortran_compiler_opts = opt.add_option_group('Configuration options') - fortran_compiler_opts.add_option('--check-fortran-compiler', default=None, - help='list of Fortran compiler to try [%s]' % test_for_compiler, - dest="check_fortran_compiler") - - for x in test_for_compiler.split(): - opt.load('%s' % x) - diff --git a/waflib/Tools/cs.py b/waflib/Tools/cs.py deleted file mode 100644 index aecca6d..0000000 --- a/waflib/Tools/cs.py +++ /dev/null @@ -1,211 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2018 (ita) - -""" -C# support. A simple example:: - - def configure(conf): - conf.load('cs') - def build(bld): - bld(features='cs', source='main.cs', gen='foo') - -Note that the configuration may compile C# snippets:: - - FRAG = ''' - namespace Moo { - public class Test { public static int Main(string[] args) { return 0; } } - }''' - def configure(conf): - conf.check(features='cs', fragment=FRAG, compile_filename='test.cs', gen='test.exe', - bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support') -""" - -from waflib import Utils, Task, Options, Errors -from waflib.TaskGen import before_method, after_method, feature -from waflib.Tools import ccroot -from waflib.Configure import conf - -ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES']) -ccroot.lib_patterns['csshlib'] = ['%s'] - -@feature('cs') -@before_method('process_source') -def apply_cs(self): - """ - Create a C# task bound to the attribute *cs_task*. There can be only one C# task by task generator. - """ - cs_nodes = [] - no_nodes = [] - for x in self.to_nodes(self.source): - if x.name.endswith('.cs'): - cs_nodes.append(x) - else: - no_nodes.append(x) - self.source = no_nodes - - bintype = getattr(self, 'bintype', self.gen.endswith('.dll') and 'library' or 'exe') - self.cs_task = tsk = self.create_task('mcs', cs_nodes, self.path.find_or_declare(self.gen)) - tsk.env.CSTYPE = '/target:%s' % bintype - tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath() - self.env.append_value('CSFLAGS', '/platform:%s' % getattr(self, 'platform', 'anycpu')) - - inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}') - if inst_to: - # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically - mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644) - self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod) - -@feature('cs') -@after_method('apply_cs') -def use_cs(self): - """ - C# applications honor the **use** keyword:: - - def build(bld): - bld(features='cs', source='My.cs', bintype='library', gen='my.dll', name='mylib') - bld(features='cs', source='Hi.cs', includes='.', bintype='exe', gen='hi.exe', use='mylib', name='hi') - """ - names = self.to_list(getattr(self, 'use', [])) - get = self.bld.get_tgen_by_name - for x in names: - try: - y = get(x) - except Errors.WafError: - self.env.append_value('CSFLAGS', '/reference:%s' % x) - continue - y.post() - - tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None) - if not tsk: - self.bld.fatal('cs task has no link task for use %r' % self) - self.cs_task.dep_nodes.extend(tsk.outputs) # dependency - self.cs_task.set_run_after(tsk) # order (redundant, the order is inferred from the nodes inputs/outputs) - self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath()) - -@feature('cs') -@after_method('apply_cs', 'use_cs') -def debug_cs(self): - """ - The C# targets may create .mdb or .pdb files:: - - def build(bld): - bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full') - # csdebug is a value in (True, 'full', 'pdbonly') - """ - csdebug = getattr(self, 'csdebug', self.env.CSDEBUG) - if not csdebug: - return - - node = self.cs_task.outputs[0] - if self.env.CS_NAME == 'mono': - out = node.parent.find_or_declare(node.name + '.mdb') - else: - out = node.change_ext('.pdb') - self.cs_task.outputs.append(out) - - if getattr(self, 'install_task', None): - self.pdb_install_task = self.add_install_files( - install_to=self.install_task.install_to, install_from=out) - - if csdebug == 'pdbonly': - val = ['/debug+', '/debug:pdbonly'] - elif csdebug == 'full': - val = ['/debug+', '/debug:full'] - else: - val = ['/debug-'] - self.env.append_value('CSFLAGS', val) - -@feature('cs') -@after_method('debug_cs') -def doc_cs(self): - """ - The C# targets may create .xml documentation files:: - - def build(bld): - bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdoc=True) - # csdoc is a boolean value - """ - csdoc = getattr(self, 'csdoc', self.env.CSDOC) - if not csdoc: - return - - node = self.cs_task.outputs[0] - out = node.change_ext('.xml') - self.cs_task.outputs.append(out) - - if getattr(self, 'install_task', None): - self.doc_install_task = self.add_install_files( - install_to=self.install_task.install_to, install_from=out) - - self.env.append_value('CSFLAGS', '/doc:%s' % out.abspath()) - -class mcs(Task.Task): - """ - Compile C# files - """ - color = 'YELLOW' - run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' - - def split_argfile(self, cmd): - inline = [cmd[0]] - infile = [] - for x in cmd[1:]: - # csc doesn't want /noconfig in @file - if x.lower() == '/noconfig': - inline.append(x) - else: - infile.append(self.quote_flag(x)) - return (inline, infile) - -def configure(conf): - """ - Find a C# compiler, set the variable MCS for the compiler and CS_NAME (mono or csc) - """ - csc = getattr(Options.options, 'cscbinary', None) - if csc: - conf.env.MCS = csc - conf.find_program(['csc', 'mcs', 'gmcs'], var='MCS') - conf.env.ASS_ST = '/r:%s' - conf.env.RES_ST = '/resource:%s' - - conf.env.CS_NAME = 'csc' - if str(conf.env.MCS).lower().find('mcs') > -1: - conf.env.CS_NAME = 'mono' - -def options(opt): - """ - Add a command-line option for the configuration:: - - $ waf configure --with-csc-binary=/foo/bar/mcs - """ - opt.add_option('--with-csc-binary', type='string', dest='cscbinary') - -class fake_csshlib(Task.Task): - """ - Task used for reading a foreign .net assembly and adding the dependency on it - """ - color = 'YELLOW' - inst_to = None - - def runnable_status(self): - return Task.SKIP_ME - -@conf -def read_csshlib(self, name, paths=[]): - """ - Read a foreign .net assembly for the *use* system:: - - def build(bld): - bld.read_csshlib('ManagedLibrary.dll', paths=[bld.env.mylibrarypath]) - bld(features='cs', source='Hi.cs', bintype='exe', gen='hi.exe', use='ManagedLibrary.dll') - - :param name: Name of the library - :type name: string - :param paths: Folders in which the library may be found - :type paths: list of string - :return: A task generator having the feature *fake_lib* which will call :py:func:`waflib.Tools.ccroot.process_lib` - :rtype: :py:class:`waflib.TaskGen.task_gen` - """ - return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib') - diff --git a/waflib/Tools/cxx.py b/waflib/Tools/cxx.py deleted file mode 100644 index 194fad7..0000000 --- a/waflib/Tools/cxx.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2005-2018 (ita) - -"Base for c++ programs and libraries" - -from waflib import TaskGen, Task -from waflib.Tools import c_preproc -from waflib.Tools.ccroot import link_task, stlink_task - -@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++') -def cxx_hook(self, node): - "Binds c++ file extensions to create :py:class:`waflib.Tools.cxx.cxx` instances" - return self.create_compiled_task('cxx', node) - -if not '.c' in TaskGen.task_gen.mappings: - TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp'] - -class cxx(Task.Task): - "Compiles C++ files into object files" - run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}' - vars = ['CXXDEPS'] # unused variable to depend on, just in case - ext_in = ['.h'] # set the build order easily by using ext_out=['.h'] - scan = c_preproc.scan - -class cxxprogram(link_task): - "Links object files into c++ programs" - run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' - vars = ['LINKDEPS'] - ext_out = ['.bin'] - inst_to = '${BINDIR}' - -class cxxshlib(cxxprogram): - "Links object files into c++ shared libraries" - inst_to = '${LIBDIR}' - -class cxxstlib(stlink_task): - "Links object files into c++ static libraries" - pass # do not remove - diff --git a/waflib/Tools/d.py b/waflib/Tools/d.py deleted file mode 100644 index e4cf73b..0000000 --- a/waflib/Tools/d.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2007 (dv) -# Thomas Nagy, 2007-2018 (ita) - -from waflib import Utils, Task, Errors -from waflib.TaskGen import taskgen_method, feature, extension -from waflib.Tools import d_scan, d_config -from waflib.Tools.ccroot import link_task, stlink_task - -class d(Task.Task): - "Compile a d file into an object file" - color = 'GREEN' - run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}' - scan = d_scan.scan - -class d_with_header(d): - "Compile a d file and generate a header" - run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}' - -class d_header(Task.Task): - "Compile d headers" - color = 'BLUE' - run_str = '${D} ${D_HEADER} ${SRC}' - -class dprogram(link_task): - "Link object files into a d program" - run_str = '${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}' - inst_to = '${BINDIR}' - -class dshlib(dprogram): - "Link object files into a d shared library" - inst_to = '${LIBDIR}' - -class dstlib(stlink_task): - "Link object files into a d static library" - pass # do not remove - -@extension('.d', '.di', '.D') -def d_hook(self, node): - """ - Compile *D* files. To get .di files as well as .o files, set the following:: - - def build(bld): - bld.program(source='foo.d', target='app', generate_headers=True) - - """ - ext = Utils.destos_to_binfmt(self.env.DEST_OS) == 'pe' and 'obj' or 'o' - out = '%s.%d.%s' % (node.name, self.idx, ext) - def create_compiled_task(self, name, node): - task = self.create_task(name, node, node.parent.find_or_declare(out)) - try: - self.compiled_tasks.append(task) - except AttributeError: - self.compiled_tasks = [task] - return task - - if getattr(self, 'generate_headers', None): - tsk = create_compiled_task(self, 'd_with_header', node) - tsk.outputs.append(node.change_ext(self.env.DHEADER_ext)) - else: - tsk = create_compiled_task(self, 'd', node) - return tsk - -@taskgen_method -def generate_header(self, filename): - """ - See feature request #104:: - - def build(bld): - tg = bld.program(source='foo.d', target='app') - tg.generate_header('blah.d') - # is equivalent to: - #tg = bld.program(source='foo.d', target='app', header_lst='blah.d') - - :param filename: header to create - :type filename: string - """ - try: - self.header_lst.append([filename, self.install_path]) - except AttributeError: - self.header_lst = [[filename, self.install_path]] - -@feature('d') -def process_header(self): - """ - Process the attribute 'header_lst' to create the d header compilation tasks:: - - def build(bld): - bld.program(source='foo.d', target='app', header_lst='blah.d') - """ - for i in getattr(self, 'header_lst', []): - node = self.path.find_resource(i[0]) - if not node: - raise Errors.WafError('file %r not found on d obj' % i[0]) - self.create_task('d_header', node, node.change_ext('.di')) - diff --git a/waflib/Tools/d_config.py b/waflib/Tools/d_config.py deleted file mode 100644 index 6637556..0000000 --- a/waflib/Tools/d_config.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2016-2018 (ita) - -from waflib import Utils -from waflib.Configure import conf - -@conf -def d_platform_flags(self): - """ - Sets the extensions dll/so for d programs and libraries - """ - v = self.env - if not v.DEST_OS: - v.DEST_OS = Utils.unversioned_sys_platform() - binfmt = Utils.destos_to_binfmt(self.env.DEST_OS) - if binfmt == 'pe': - v.dprogram_PATTERN = '%s.exe' - v.dshlib_PATTERN = 'lib%s.dll' - v.dstlib_PATTERN = 'lib%s.a' - elif binfmt == 'mac-o': - v.dprogram_PATTERN = '%s' - v.dshlib_PATTERN = 'lib%s.dylib' - v.dstlib_PATTERN = 'lib%s.a' - else: - v.dprogram_PATTERN = '%s' - v.dshlib_PATTERN = 'lib%s.so' - v.dstlib_PATTERN = 'lib%s.a' - -DLIB = ''' -version(D_Version2) { - import std.stdio; - int main() { - writefln("phobos2"); - return 0; - } -} else { - version(Tango) { - import tango.stdc.stdio; - int main() { - printf("tango"); - return 0; - } - } else { - import std.stdio; - int main() { - writefln("phobos1"); - return 0; - } - } -} -''' -"""Detection string for the D standard library""" - -@conf -def check_dlibrary(self, execute=True): - """ - Detects the kind of standard library that comes with the compiler, - and sets conf.env.DLIBRARY to tango, phobos1 or phobos2 - """ - ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True) - if execute: - self.env.DLIBRARY = ret.strip() - diff --git a/waflib/Tools/d_scan.py b/waflib/Tools/d_scan.py deleted file mode 100644 index 4e807a6..0000000 --- a/waflib/Tools/d_scan.py +++ /dev/null @@ -1,211 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2016-2018 (ita) - -""" -Provide a scanner for finding dependencies on d files -""" - -import re -from waflib import Utils - -def filter_comments(filename): - """ - :param filename: d file name - :type filename: string - :rtype: list - :return: a list of characters - """ - txt = Utils.readf(filename) - i = 0 - buf = [] - max = len(txt) - begin = 0 - while i < max: - c = txt[i] - if c == '"' or c == "'": # skip a string or character literal - buf.append(txt[begin:i]) - delim = c - i += 1 - while i < max: - c = txt[i] - if c == delim: - break - elif c == '\\': # skip the character following backslash - i += 1 - i += 1 - i += 1 - begin = i - elif c == '/': # try to replace a comment with whitespace - buf.append(txt[begin:i]) - i += 1 - if i == max: - break - c = txt[i] - if c == '+': # eat nesting /+ +/ comment - i += 1 - nesting = 1 - c = None - while i < max: - prev = c - c = txt[i] - if prev == '/' and c == '+': - nesting += 1 - c = None - elif prev == '+' and c == '/': - nesting -= 1 - if nesting == 0: - break - c = None - i += 1 - elif c == '*': # eat /* */ comment - i += 1 - c = None - while i < max: - prev = c - c = txt[i] - if prev == '*' and c == '/': - break - i += 1 - elif c == '/': # eat // comment - i += 1 - while i < max and txt[i] != '\n': - i += 1 - else: # no comment - begin = i - 1 - continue - i += 1 - begin = i - buf.append(' ') - else: - i += 1 - buf.append(txt[begin:]) - return buf - -class d_parser(object): - """ - Parser for d files - """ - def __init__(self, env, incpaths): - #self.code = '' - #self.module = '' - #self.imports = [] - - self.allnames = [] - - self.re_module = re.compile(r"module\s+([^;]+)") - self.re_import = re.compile(r"import\s+([^;]+)") - self.re_import_bindings = re.compile("([^:]+):(.*)") - self.re_import_alias = re.compile("[^=]+=(.+)") - - self.env = env - - self.nodes = [] - self.names = [] - - self.incpaths = incpaths - - def tryfind(self, filename): - """ - Search file a file matching an module/import directive - - :param filename: file to read - :type filename: string - """ - found = 0 - for n in self.incpaths: - found = n.find_resource(filename.replace('.', '/') + '.d') - if found: - self.nodes.append(found) - self.waiting.append(found) - break - if not found: - if not filename in self.names: - self.names.append(filename) - - def get_strings(self, code): - """ - :param code: d code to parse - :type code: string - :return: the modules that the code uses - :rtype: a list of match objects - """ - #self.imports = [] - self.module = '' - lst = [] - - # get the module name (if present) - - mod_name = self.re_module.search(code) - if mod_name: - self.module = re.sub(r'\s+', '', mod_name.group(1)) # strip all whitespaces - - # go through the code, have a look at all import occurrences - - # first, lets look at anything beginning with "import" and ending with ";" - import_iterator = self.re_import.finditer(code) - if import_iterator: - for import_match in import_iterator: - import_match_str = re.sub(r'\s+', '', import_match.group(1)) # strip all whitespaces - - # does this end with an import bindings declaration? - # (import bindings always terminate the list of imports) - bindings_match = self.re_import_bindings.match(import_match_str) - if bindings_match: - import_match_str = bindings_match.group(1) - # if so, extract the part before the ":" (since the module declaration(s) is/are located there) - - # split the matching string into a bunch of strings, separated by a comma - matches = import_match_str.split(',') - - for match in matches: - alias_match = self.re_import_alias.match(match) - if alias_match: - # is this an alias declaration? (alias = module name) if so, extract the module name - match = alias_match.group(1) - - lst.append(match) - return lst - - def start(self, node): - """ - The parsing starts here - - :param node: input file - :type node: :py:class:`waflib.Node.Node` - """ - self.waiting = [node] - # while the stack is not empty, add the dependencies - while self.waiting: - nd = self.waiting.pop(0) - self.iter(nd) - - def iter(self, node): - """ - Find all the modules that a file depends on, uses :py:meth:`waflib.Tools.d_scan.d_parser.tryfind` to process dependent files - - :param node: input file - :type node: :py:class:`waflib.Node.Node` - """ - path = node.abspath() # obtain the absolute path - code = "".join(filter_comments(path)) # read the file and filter the comments - names = self.get_strings(code) # obtain the import strings - for x in names: - # optimization - if x in self.allnames: - continue - self.allnames.append(x) - - # for each name, see if it is like a node or not - self.tryfind(x) - -def scan(self): - "look for .d/.di used by a d file" - env = self.env - gruik = d_parser(env, self.generator.includes_nodes) - node = self.inputs[0] - gruik.start(node) - nodes = gruik.nodes - names = gruik.names - return (nodes, names) - diff --git a/waflib/Tools/dbus.py b/waflib/Tools/dbus.py deleted file mode 100644 index d520f1c..0000000 --- a/waflib/Tools/dbus.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Ali Sabil, 2007 - -""" -Compiles dbus files with **dbus-binding-tool** - -Typical usage:: - - def options(opt): - opt.load('compiler_c dbus') - def configure(conf): - conf.load('compiler_c dbus') - def build(bld): - tg = bld.program( - includes = '.', - source = bld.path.ant_glob('*.c'), - target = 'gnome-hello') - tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server') -""" - -from waflib import Task, Errors -from waflib.TaskGen import taskgen_method, before_method - -@taskgen_method -def add_dbus_file(self, filename, prefix, mode): - """ - Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*. - - :param filename: xml file to compile - :type filename: string - :param prefix: dbus binding tool prefix (--prefix=prefix) - :type prefix: string - :param mode: dbus binding tool mode (--mode=mode) - :type mode: string - """ - if not hasattr(self, 'dbus_lst'): - self.dbus_lst = [] - if not 'process_dbus' in self.meths: - self.meths.append('process_dbus') - self.dbus_lst.append([filename, prefix, mode]) - -@before_method('process_source') -def process_dbus(self): - """ - Processes the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances. - """ - for filename, prefix, mode in getattr(self, 'dbus_lst', []): - node = self.path.find_resource(filename) - if not node: - raise Errors.WafError('file not found ' + filename) - tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h')) - tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix - tsk.env.DBUS_BINDING_TOOL_MODE = mode - -class dbus_binding_tool(Task.Task): - """ - Compiles a dbus file - """ - color = 'BLUE' - ext_out = ['.h'] - run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}' - shell = True # temporary workaround for #795 - -def configure(conf): - """ - Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL`` - """ - conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL') - diff --git a/waflib/Tools/dmd.py b/waflib/Tools/dmd.py deleted file mode 100644 index 8917ca1..0000000 --- a/waflib/Tools/dmd.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2007 (dv) -# Thomas Nagy, 2008-2018 (ita) - -import sys -from waflib.Tools import ar, d -from waflib.Configure import conf - -@conf -def find_dmd(conf): - """ - Finds the program *dmd*, *dmd2*, or *ldc* and set the variable *D* - """ - conf.find_program(['dmd', 'dmd2', 'ldc'], var='D') - - # make sure that we're dealing with dmd1, dmd2, or ldc(1) - out = conf.cmd_and_log(conf.env.D + ['--help']) - if out.find("D Compiler v") == -1: - out = conf.cmd_and_log(conf.env.D + ['-version']) - if out.find("based on DMD v1.") == -1: - conf.fatal("detected compiler is not dmd/ldc") - -@conf -def common_flags_ldc(conf): - """ - Sets the D flags required by *ldc* - """ - v = conf.env - v.DFLAGS = ['-d-version=Posix'] - v.LINKFLAGS = [] - v.DFLAGS_dshlib = ['-relocation-model=pic'] - -@conf -def common_flags_dmd(conf): - """ - Set the flags required by *dmd* or *dmd2* - """ - v = conf.env - - v.D_SRC_F = ['-c'] - v.D_TGT_F = '-of%s' - - v.D_LINKER = v.D - v.DLNK_SRC_F = '' - v.DLNK_TGT_F = '-of%s' - v.DINC_ST = '-I%s' - - v.DSHLIB_MARKER = v.DSTLIB_MARKER = '' - v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s' - v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s' - - v.LINKFLAGS_dprogram= ['-quiet'] - - v.DFLAGS_dshlib = ['-fPIC'] - v.LINKFLAGS_dshlib = ['-L-shared'] - - v.DHEADER_ext = '.di' - v.DFLAGS_d_with_header = ['-H', '-Hf'] - v.D_HDR_F = '%s' - -def configure(conf): - """ - Configuration for *dmd*, *dmd2*, and *ldc* - """ - conf.find_dmd() - - if sys.platform == 'win32': - out = conf.cmd_and_log(conf.env.D + ['--help']) - if out.find('D Compiler v2.') > -1: - conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead') - - conf.load('ar') - conf.load('d') - conf.common_flags_dmd() - conf.d_platform_flags() - - if str(conf.env.D).find('ldc') > -1: - conf.common_flags_ldc() - diff --git a/waflib/Tools/errcheck.py b/waflib/Tools/errcheck.py deleted file mode 100644 index de8d75a..0000000 --- a/waflib/Tools/errcheck.py +++ /dev/null @@ -1,237 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2011 (ita) - -""" -Common mistakes highlighting. - -There is a performance impact, so this tool is only loaded when running ``waf -v`` -""" - -typos = { -'feature':'features', -'sources':'source', -'targets':'target', -'include':'includes', -'export_include':'export_includes', -'define':'defines', -'importpath':'includes', -'installpath':'install_path', -'iscopy':'is_copy', -'uses':'use', -} - -meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects'] - -import sys -from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils -from waflib.Tools import ccroot - -def check_same_targets(self): - mp = Utils.defaultdict(list) - uids = {} - - def check_task(tsk): - if not isinstance(tsk, Task.Task): - return - if hasattr(tsk, 'no_errcheck_out'): - return - - for node in tsk.outputs: - mp[node].append(tsk) - try: - uids[tsk.uid()].append(tsk) - except KeyError: - uids[tsk.uid()] = [tsk] - - for g in self.groups: - for tg in g: - try: - for tsk in tg.tasks: - check_task(tsk) - except AttributeError: - # raised if not a task generator, which should be uncommon - check_task(tg) - - dupe = False - for (k, v) in mp.items(): - if len(v) > 1: - dupe = True - msg = '* Node %r is created more than once%s. The task generators are:' % (k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "") - Logs.error(msg) - for x in v: - if Logs.verbose > 1: - Logs.error(' %d. %r', 1 + v.index(x), x.generator) - else: - Logs.error(' %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None)) - Logs.error('If you think that this is an error, set no_errcheck_out on the task instance') - - if not dupe: - for (k, v) in uids.items(): - if len(v) > 1: - Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid') - tg_details = tsk.generator.name - if Logs.verbose > 2: - tg_details = tsk.generator - for tsk in v: - Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details) - -def check_invalid_constraints(self): - feat = set() - for x in list(TaskGen.feats.values()): - feat.union(set(x)) - for (x, y) in TaskGen.task_gen.prec.items(): - feat.add(x) - feat.union(set(y)) - ext = set() - for x in TaskGen.task_gen.mappings.values(): - ext.add(x.__name__) - invalid = ext & feat - if invalid: - Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method', list(invalid)) - - # the build scripts have been read, so we can check for invalid after/before attributes on task classes - for cls in list(Task.classes.values()): - if sys.hexversion > 0x3000000 and issubclass(cls, Task.Task) and isinstance(cls.hcode, str): - raise Errors.WafError('Class %r has hcode value %r of type <str>, expecting <bytes> (use Utils.h_cmd() ?)' % (cls, cls.hcode)) - - for x in ('before', 'after'): - for y in Utils.to_list(getattr(cls, x, [])): - if not Task.classes.get(y): - Logs.error('Erroneous order constraint %r=%r on task class %r', x, y, cls.__name__) - if getattr(cls, 'rule', None): - Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")', cls.__name__) - -def replace(m): - """ - Replaces existing BuildContext methods to verify parameter names, - for example ``bld(source=)`` has no ending *s* - """ - oldcall = getattr(Build.BuildContext, m) - def call(self, *k, **kw): - ret = oldcall(self, *k, **kw) - for x in typos: - if x in kw: - if x == 'iscopy' and 'subst' in getattr(self, 'features', ''): - continue - Logs.error('Fix the typo %r -> %r on %r', x, typos[x], ret) - return ret - setattr(Build.BuildContext, m, call) - -def enhance_lib(): - """ - Modifies existing classes and methods to enable error verification - """ - for m in meths_typos: - replace(m) - - # catch '..' in ant_glob patterns - def ant_glob(self, *k, **kw): - if k: - lst = Utils.to_list(k[0]) - for pat in lst: - sp = pat.split('/') - if '..' in sp: - Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0]) - if '.' in sp: - Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0]) - return self.old_ant_glob(*k, **kw) - Node.Node.old_ant_glob = Node.Node.ant_glob - Node.Node.ant_glob = ant_glob - - # catch ant_glob on build folders - def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False): - if remove: - try: - if self.is_child_of(self.ctx.bldnode) and not quiet: - quiet = True - Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self) - except AttributeError: - pass - return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet) - Node.Node.old_ant_iter = Node.Node.ant_iter - Node.Node.ant_iter = ant_iter - - # catch conflicting ext_in/ext_out/before/after declarations - old = Task.is_before - def is_before(t1, t2): - ret = old(t1, t2) - if ret and old(t2, t1): - Logs.error('Contradictory order constraints in classes %r %r', t1, t2) - return ret - Task.is_before = is_before - - # check for bld(feature='cshlib') where no 'c' is given - this can be either a mistake or on purpose - # so we only issue a warning - def check_err_features(self): - lst = self.to_list(self.features) - if 'shlib' in lst: - Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') - for x in ('c', 'cxx', 'd', 'fc'): - if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]: - Logs.error('%r features is probably missing %r', self, x) - TaskGen.feature('*')(check_err_features) - - # check for erroneous order constraints - def check_err_order(self): - if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features): - for x in ('before', 'after', 'ext_in', 'ext_out'): - if hasattr(self, x): - Logs.warn('Erroneous order constraint %r on non-rule based task generator %r', x, self) - else: - for x in ('before', 'after'): - for y in self.to_list(getattr(self, x, [])): - if not Task.classes.get(y): - Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self) - TaskGen.feature('*')(check_err_order) - - # check for @extension used with @feature/@before_method/@after_method - def check_compile(self): - check_invalid_constraints(self) - try: - ret = self.orig_compile() - finally: - check_same_targets(self) - return ret - Build.BuildContext.orig_compile = Build.BuildContext.compile - Build.BuildContext.compile = check_compile - - # check for invalid build groups #914 - def use_rec(self, name, **kw): - try: - y = self.bld.get_tgen_by_name(name) - except Errors.WafError: - pass - else: - idx = self.bld.get_group_idx(self) - odx = self.bld.get_group_idx(y) - if odx > idx: - msg = "Invalid 'use' across build groups:" - if Logs.verbose > 1: - msg += '\n target %r\n uses:\n %r' % (self, y) - else: - msg += " %r uses %r (try 'waf -v -v' for the full error)" % (self.name, name) - raise Errors.WafError(msg) - self.orig_use_rec(name, **kw) - TaskGen.task_gen.orig_use_rec = TaskGen.task_gen.use_rec - TaskGen.task_gen.use_rec = use_rec - - # check for env.append - def _getattr(self, name, default=None): - if name == 'append' or name == 'add': - raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique') - elif name == 'prepend': - raise Errors.WafError('env.prepend does not exist: use env.prepend_value') - if name in self.__slots__: - return super(ConfigSet.ConfigSet, self).__getattr__(name, default) - else: - return self[name] - ConfigSet.ConfigSet.__getattr__ = _getattr - - -def options(opt): - """ - Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options - """ - enhance_lib() - diff --git a/waflib/Tools/fc.py b/waflib/Tools/fc.py deleted file mode 100644 index fd4d39c..0000000 --- a/waflib/Tools/fc.py +++ /dev/null @@ -1,203 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# DC 2008 -# Thomas Nagy 2016-2018 (ita) - -""" -Fortran support -""" - -from waflib import Utils, Task, Errors -from waflib.Tools import ccroot, fc_config, fc_scan -from waflib.TaskGen import extension -from waflib.Configure import conf - -ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS']) -ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) -ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) -ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS']) - -@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08') -def fc_hook(self, node): - "Binds the Fortran file extensions create :py:class:`waflib.Tools.fc.fc` instances" - return self.create_compiled_task('fc', node) - -@conf -def modfile(conf, name): - """ - Turns a module name into the right module file name. - Defaults to all lower case. - """ - if name.find(':') >= 0: - # Depending on a submodule! - separator = conf.env.FC_SUBMOD_SEPARATOR or '@' - # Ancestors of the submodule will be prefixed to the - # submodule name, separated by a colon. - modpath = name.split(':') - # Only the ancestor (actual) module and the submodule name - # will be used for the filename. - modname = modpath[0] + separator + modpath[-1] - suffix = conf.env.FC_SUBMOD_SUFFIX or '.smod' - else: - modname = name - suffix = '.mod' - - return {'lower' :modname.lower() + suffix.lower(), - 'lower.MOD' :modname.lower() + suffix.upper(), - 'UPPER.mod' :modname.upper() + suffix.lower(), - 'UPPER' :modname.upper() + suffix.upper()}[conf.env.FC_MOD_CAPITALIZATION or 'lower'] - -def get_fortran_tasks(tsk): - """ - Obtains all fortran tasks from the same build group. Those tasks must not have - the attribute 'nomod' or 'mod_fortran_done' - - :return: a list of :py:class:`waflib.Tools.fc.fc` instances - """ - bld = tsk.generator.bld - tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator)) - return [x for x in tasks if isinstance(x, fc) and not getattr(x, 'nomod', None) and not getattr(x, 'mod_fortran_done', None)] - -class fc(Task.Task): - """ - Fortran tasks can only run when all fortran tasks in a current task group are ready to be executed - This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency) - Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop - """ - color = 'GREEN' - run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}' - vars = ["FORTRANMODPATHFLAG"] - - def scan(self): - """Fortran dependency scanner""" - tmp = fc_scan.fortran_parser(self.generator.includes_nodes) - tmp.task = self - tmp.start(self.inputs[0]) - return (tmp.nodes, tmp.names) - - def runnable_status(self): - """ - Sets the mod file outputs and the dependencies on the mod files over all Fortran tasks - executed by the main thread so there are no concurrency issues - """ - if getattr(self, 'mod_fortran_done', None): - return super(fc, self).runnable_status() - - # now, if we reach this part it is because this fortran task is the first in the list - bld = self.generator.bld - - # obtain the fortran tasks - lst = get_fortran_tasks(self) - - # disable this method for other tasks - for tsk in lst: - tsk.mod_fortran_done = True - - # wait for all the .f tasks to be ready for execution - # and ensure that the scanners are called at least once - for tsk in lst: - ret = tsk.runnable_status() - if ret == Task.ASK_LATER: - # we have to wait for one of the other fortran tasks to be ready - # this may deadlock if there are dependencies between fortran tasks - # but this should not happen (we are setting them here!) - for x in lst: - x.mod_fortran_done = None - - return Task.ASK_LATER - - ins = Utils.defaultdict(set) - outs = Utils.defaultdict(set) - - # the .mod files to create - for tsk in lst: - key = tsk.uid() - for x in bld.raw_deps[key]: - if x.startswith('MOD@'): - name = bld.modfile(x.replace('MOD@', '')) - node = bld.srcnode.find_or_declare(name) - tsk.set_outputs(node) - outs[node].add(tsk) - - # the .mod files to use - for tsk in lst: - key = tsk.uid() - for x in bld.raw_deps[key]: - if x.startswith('USE@'): - name = bld.modfile(x.replace('USE@', '')) - node = bld.srcnode.find_resource(name) - if node and node not in tsk.outputs: - if not node in bld.node_deps[key]: - bld.node_deps[key].append(node) - ins[node].add(tsk) - - # if the intersection matches, set the order - for k in ins.keys(): - for a in ins[k]: - a.run_after.update(outs[k]) - for x in outs[k]: - self.generator.bld.producer.revdeps[x].add(a) - - # the scanner cannot output nodes, so we have to set them - # ourselves as task.dep_nodes (additional input nodes) - tmp = [] - for t in outs[k]: - tmp.extend(t.outputs) - a.dep_nodes.extend(tmp) - a.dep_nodes.sort(key=lambda x: x.abspath()) - - # the task objects have changed: clear the signature cache - for tsk in lst: - try: - delattr(tsk, 'cache_sig') - except AttributeError: - pass - - return super(fc, self).runnable_status() - -class fcprogram(ccroot.link_task): - """Links Fortran programs""" - color = 'YELLOW' - run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}' - inst_to = '${BINDIR}' - -class fcshlib(fcprogram): - """Links Fortran libraries""" - inst_to = '${LIBDIR}' - -class fcstlib(ccroot.stlink_task): - """Links Fortran static libraries (uses ar by default)""" - pass # do not remove the pass statement - -class fcprogram_test(fcprogram): - """Custom link task to obtain compiler outputs for Fortran configuration tests""" - - def runnable_status(self): - """This task is always executed""" - ret = super(fcprogram_test, self).runnable_status() - if ret == Task.SKIP_ME: - ret = Task.RUN_ME - return ret - - def exec_command(self, cmd, **kw): - """Stores the compiler std our/err onto the build context, to bld.out + bld.err""" - bld = self.generator.bld - - kw['shell'] = isinstance(cmd, str) - kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE - kw['cwd'] = self.get_cwd() - bld.out = bld.err = '' - - bld.to_log('command: %s\n' % cmd) - - kw['output'] = 0 - try: - (bld.out, bld.err) = bld.cmd_and_log(cmd, **kw) - except Errors.WafError: - return -1 - - if bld.out: - bld.to_log('out: %s\n' % bld.out) - if bld.err: - bld.to_log('err: %s\n' % bld.err) - diff --git a/waflib/Tools/fc_config.py b/waflib/Tools/fc_config.py deleted file mode 100644 index dc5e5c9..0000000 --- a/waflib/Tools/fc_config.py +++ /dev/null @@ -1,488 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# DC 2008 -# Thomas Nagy 2016-2018 (ita) - -""" -Fortran configuration helpers -""" - -import re, os, sys, shlex -from waflib.Configure import conf -from waflib.TaskGen import feature, before_method - -FC_FRAGMENT = ' program main\n end program main\n' -FC_FRAGMENT2 = ' PROGRAM MAIN\n END\n' # what's the actual difference between these? - -@conf -def fc_flags(conf): - """ - Defines common fortran configuration flags and file extensions - """ - v = conf.env - - v.FC_SRC_F = [] - v.FC_TGT_F = ['-c', '-o'] - v.FCINCPATH_ST = '-I%s' - v.FCDEFINES_ST = '-D%s' - - if not v.LINK_FC: - v.LINK_FC = v.FC - - v.FCLNK_SRC_F = [] - v.FCLNK_TGT_F = ['-o'] - - v.FCFLAGS_fcshlib = ['-fpic'] - v.LINKFLAGS_fcshlib = ['-shared'] - v.fcshlib_PATTERN = 'lib%s.so' - - v.fcstlib_PATTERN = 'lib%s.a' - - v.FCLIB_ST = '-l%s' - v.FCLIBPATH_ST = '-L%s' - v.FCSTLIB_ST = '-l%s' - v.FCSTLIBPATH_ST = '-L%s' - v.FCSTLIB_MARKER = '-Wl,-Bstatic' - v.FCSHLIB_MARKER = '-Wl,-Bdynamic' - - v.SONAME_ST = '-Wl,-h,%s' - -@conf -def fc_add_flags(conf): - """ - Adds FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env - """ - conf.add_os_flags('FCPPFLAGS', dup=False) - conf.add_os_flags('FCFLAGS', dup=False) - conf.add_os_flags('LINKFLAGS', dup=False) - conf.add_os_flags('LDFLAGS', dup=False) - -@conf -def check_fortran(self, *k, **kw): - """ - Compiles a Fortran program to ensure that the settings are correct - """ - self.check_cc( - fragment = FC_FRAGMENT, - compile_filename = 'test.f', - features = 'fc fcprogram', - msg = 'Compiling a simple fortran app') - -@conf -def check_fc(self, *k, **kw): - """ - Same as :py:func:`waflib.Tools.c_config.check` but defaults to the *Fortran* programming language - (this overrides the C defaults in :py:func:`waflib.Tools.c_config.validate_c`) - """ - kw['compiler'] = 'fc' - if not 'compile_mode' in kw: - kw['compile_mode'] = 'fc' - if not 'type' in kw: - kw['type'] = 'fcprogram' - if not 'compile_filename' in kw: - kw['compile_filename'] = 'test.f90' - if not 'code' in kw: - kw['code'] = FC_FRAGMENT - return self.check(*k, **kw) - -# ------------------------------------------------------------------------ -# --- These are the default platform modifiers, refactored here for -# convenience. gfortran and g95 have much overlap. -# ------------------------------------------------------------------------ - -@conf -def fortran_modifier_darwin(conf): - """ - Defines Fortran flags and extensions for OSX systems - """ - v = conf.env - v.FCFLAGS_fcshlib = ['-fPIC'] - v.LINKFLAGS_fcshlib = ['-dynamiclib'] - v.fcshlib_PATTERN = 'lib%s.dylib' - v.FRAMEWORKPATH_ST = '-F%s' - v.FRAMEWORK_ST = ['-framework'] - - v.LINKFLAGS_fcstlib = [] - - v.FCSHLIB_MARKER = '' - v.FCSTLIB_MARKER = '' - v.SONAME_ST = '' - -@conf -def fortran_modifier_win32(conf): - """ - Defines Fortran flags for Windows platforms - """ - v = conf.env - v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe' - - v.fcshlib_PATTERN = '%s.dll' - v.implib_PATTERN = '%s.dll.a' - v.IMPLIB_ST = '-Wl,--out-implib,%s' - - v.FCFLAGS_fcshlib = [] - - # Auto-import is enabled by default even without this option, - # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages - # that the linker emits otherwise. - v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) - -@conf -def fortran_modifier_cygwin(conf): - """ - Defines Fortran flags for use on cygwin - """ - fortran_modifier_win32(conf) - v = conf.env - v.fcshlib_PATTERN = 'cyg%s.dll' - v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base']) - v.FCFLAGS_fcshlib = [] - -# ------------------------------------------------------------------------ - -@conf -def check_fortran_dummy_main(self, *k, **kw): - """ - Determines if a main function is needed by compiling a code snippet with - the C compiler and linking it with the Fortran compiler (useful on unix-like systems) - """ - if not self.env.CC: - self.fatal('A c compiler is required for check_fortran_dummy_main') - - lst = ['MAIN__', '__MAIN', '_MAIN', 'MAIN_', 'MAIN'] - lst.extend([m.lower() for m in lst]) - lst.append('') - - self.start_msg('Detecting whether we need a dummy main') - for main in lst: - kw['fortran_main'] = main - try: - self.check_cc( - fragment = 'int %s() { return 0; }\n' % (main or 'test'), - features = 'c fcprogram', - mandatory = True - ) - if not main: - self.env.FC_MAIN = -1 - self.end_msg('no') - else: - self.env.FC_MAIN = main - self.end_msg('yes %s' % main) - break - except self.errors.ConfigurationError: - pass - else: - self.end_msg('not found') - self.fatal('could not detect whether fortran requires a dummy main, see the config.log') - -# ------------------------------------------------------------------------ - -GCC_DRIVER_LINE = re.compile('^Driving:') -POSIX_STATIC_EXT = re.compile(r'\S+\.a') -POSIX_LIB_FLAGS = re.compile(r'-l\S+') - -@conf -def is_link_verbose(self, txt): - """Returns True if 'useful' link options can be found in txt""" - assert isinstance(txt, str) - for line in txt.splitlines(): - if not GCC_DRIVER_LINE.search(line): - if POSIX_STATIC_EXT.search(line) or POSIX_LIB_FLAGS.search(line): - return True - return False - -@conf -def check_fortran_verbose_flag(self, *k, **kw): - """ - Checks what kind of verbose (-v) flag works, then sets it to env.FC_VERBOSE_FLAG - """ - self.start_msg('fortran link verbose flag') - for x in ('-v', '--verbose', '-verbose', '-V'): - try: - self.check_cc( - features = 'fc fcprogram_test', - fragment = FC_FRAGMENT2, - compile_filename = 'test.f', - linkflags = [x], - mandatory=True) - except self.errors.ConfigurationError: - pass - else: - # output is on stderr or stdout (for xlf) - if self.is_link_verbose(self.test_bld.err) or self.is_link_verbose(self.test_bld.out): - self.end_msg(x) - break - else: - self.end_msg('failure') - self.fatal('Could not obtain the fortran link verbose flag (see config.log)') - - self.env.FC_VERBOSE_FLAG = x - return x - -# ------------------------------------------------------------------------ - -# linkflags which match those are ignored -LINKFLAGS_IGNORED = [r'-lang*', r'-lcrt[a-zA-Z0-9\.]*\.o', r'-lc$', r'-lSystem', r'-libmil', r'-LIST:*', r'-LNO:*'] -if os.name == 'nt': - LINKFLAGS_IGNORED.extend([r'-lfrt*', r'-luser32', r'-lkernel32', r'-ladvapi32', r'-lmsvcrt', r'-lshell32', r'-lmingw', r'-lmoldname']) -else: - LINKFLAGS_IGNORED.append(r'-lgcc*') -RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED] - -def _match_ignore(line): - """Returns True if the line should be ignored (Fortran verbose flag test)""" - for i in RLINKFLAGS_IGNORED: - if i.match(line): - return True - return False - -def parse_fortran_link(lines): - """Given the output of verbose link of Fortran compiler, this returns a - list of flags necessary for linking using the standard linker.""" - final_flags = [] - for line in lines: - if not GCC_DRIVER_LINE.match(line): - _parse_flink_line(line, final_flags) - return final_flags - -SPACE_OPTS = re.compile('^-[LRuYz]$') -NOSPACE_OPTS = re.compile('^-[RL]') - -def _parse_flink_token(lexer, token, tmp_flags): - # Here we go (convention for wildcard is shell, not regex !) - # 1 TODO: we first get some root .a libraries - # 2 TODO: take everything starting by -bI:* - # 3 Ignore the following flags: -lang* | -lcrt*.o | -lc | - # -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*) - # 4 take into account -lkernel32 - # 5 For options of the kind -[[LRuYz]], as they take one argument - # after, the actual option is the next token - # 6 For -YP,*: take and replace by -Larg where arg is the old - # argument - # 7 For -[lLR]*: take - - # step 3 - if _match_ignore(token): - pass - # step 4 - elif token.startswith('-lkernel32') and sys.platform == 'cygwin': - tmp_flags.append(token) - # step 5 - elif SPACE_OPTS.match(token): - t = lexer.get_token() - if t.startswith('P,'): - t = t[2:] - for opt in t.split(os.pathsep): - tmp_flags.append('-L%s' % opt) - # step 6 - elif NOSPACE_OPTS.match(token): - tmp_flags.append(token) - # step 7 - elif POSIX_LIB_FLAGS.match(token): - tmp_flags.append(token) - else: - # ignore anything not explicitly taken into account - pass - - t = lexer.get_token() - return t - -def _parse_flink_line(line, final_flags): - """private""" - lexer = shlex.shlex(line, posix = True) - lexer.whitespace_split = True - - t = lexer.get_token() - tmp_flags = [] - while t: - t = _parse_flink_token(lexer, t, tmp_flags) - - final_flags.extend(tmp_flags) - return final_flags - -@conf -def check_fortran_clib(self, autoadd=True, *k, **kw): - """ - Obtains the flags for linking with the C library - if this check works, add uselib='CLIB' to your task generators - """ - if not self.env.FC_VERBOSE_FLAG: - self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?') - - self.start_msg('Getting fortran runtime link flags') - try: - self.check_cc( - fragment = FC_FRAGMENT2, - compile_filename = 'test.f', - features = 'fc fcprogram_test', - linkflags = [self.env.FC_VERBOSE_FLAG] - ) - except Exception: - self.end_msg(False) - if kw.get('mandatory', True): - conf.fatal('Could not find the c library flags') - else: - out = self.test_bld.err - flags = parse_fortran_link(out.splitlines()) - self.end_msg('ok (%s)' % ' '.join(flags)) - self.env.LINKFLAGS_CLIB = flags - return flags - return [] - -def getoutput(conf, cmd, stdin=False): - """ - Obtains Fortran command outputs - """ - from waflib import Errors - if conf.env.env: - env = conf.env.env - else: - env = dict(os.environ) - env['LANG'] = 'C' - input = stdin and '\n'.encode() or None - try: - out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input) - except Errors.WafError as e: - # An WafError might indicate an error code during the command - # execution, in this case we still obtain the stderr and stdout, - # which we can use to find the version string. - if not (hasattr(e, 'stderr') and hasattr(e, 'stdout')): - raise e - else: - # Ignore the return code and return the original - # stdout and stderr. - out = e.stdout - err = e.stderr - except Exception: - conf.fatal('could not determine the compiler version %r' % cmd) - return (out, err) - -# ------------------------------------------------------------------------ - -ROUTINES_CODE = """\ - subroutine foobar() - return - end - subroutine foo_bar() - return - end -""" - -MAIN_CODE = """ -void %(dummy_func_nounder)s(void); -void %(dummy_func_under)s(void); -int %(main_func_name)s() { - %(dummy_func_nounder)s(); - %(dummy_func_under)s(); - return 0; -} -""" - -@feature('link_main_routines_func') -@before_method('process_source') -def link_main_routines_tg_method(self): - """ - The configuration test declares a unique task generator, - so we create other task generators from there for fortran link tests - """ - def write_test_file(task): - task.outputs[0].write(task.generator.code) - bld = self.bld - bld(rule=write_test_file, target='main.c', code=MAIN_CODE % self.__dict__) - bld(rule=write_test_file, target='test.f', code=ROUTINES_CODE) - bld(features='fc fcstlib', source='test.f', target='test') - bld(features='c fcprogram', source='main.c', target='app', use='test') - -def mangling_schemes(): - """ - Generate triplets for use with mangle_name - (used in check_fortran_mangling) - the order is tuned for gfortan - """ - for u in ('_', ''): - for du in ('', '_'): - for c in ("lower", "upper"): - yield (u, du, c) - -def mangle_name(u, du, c, name): - """Mangle a name from a triplet (used in check_fortran_mangling)""" - return getattr(name, c)() + u + (name.find('_') != -1 and du or '') - -@conf -def check_fortran_mangling(self, *k, **kw): - """ - Detect the mangling scheme, sets FORTRAN_MANGLING to the triplet found - - This test will compile a fortran static library, then link a c app against it - """ - if not self.env.CC: - self.fatal('A c compiler is required for link_main_routines') - if not self.env.FC: - self.fatal('A fortran compiler is required for link_main_routines') - if not self.env.FC_MAIN: - self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)') - - self.start_msg('Getting fortran mangling scheme') - for (u, du, c) in mangling_schemes(): - try: - self.check_cc( - compile_filename = [], - features = 'link_main_routines_func', - msg = 'nomsg', - errmsg = 'nomsg', - dummy_func_nounder = mangle_name(u, du, c, 'foobar'), - dummy_func_under = mangle_name(u, du, c, 'foo_bar'), - main_func_name = self.env.FC_MAIN - ) - except self.errors.ConfigurationError: - pass - else: - self.end_msg("ok ('%s', '%s', '%s-case')" % (u, du, c)) - self.env.FORTRAN_MANGLING = (u, du, c) - break - else: - self.end_msg(False) - self.fatal('mangler not found') - return (u, du, c) - -@feature('pyext') -@before_method('propagate_uselib_vars', 'apply_link') -def set_lib_pat(self): - """Sets the Fortran flags for linking with Python""" - self.env.fcshlib_PATTERN = self.env.pyext_PATTERN - -@conf -def detect_openmp(self): - """ - Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS`` - """ - for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'): - try: - self.check_fc( - msg = 'Checking for OpenMP flag %s' % x, - fragment = 'program main\n call omp_get_num_threads()\nend program main', - fcflags = x, - linkflags = x, - uselib_store = 'OPENMP' - ) - except self.errors.ConfigurationError: - pass - else: - break - else: - self.fatal('Could not find OpenMP') - -@conf -def check_gfortran_o_space(self): - if self.env.FC_NAME != 'GFORTRAN' or int(self.env.FC_VERSION[0]) > 4: - # This is for old compilers and only for gfortran. - # No idea how other implementations handle this. Be safe and bail out. - return - self.env.stash() - self.env.FCLNK_TGT_F = ['-o', ''] - try: - self.check_fc(msg='Checking if the -o link must be split from arguments', fragment=FC_FRAGMENT, features='fc fcshlib') - except self.errors.ConfigurationError: - self.env.revert() - else: - self.env.commit() diff --git a/waflib/Tools/fc_scan.py b/waflib/Tools/fc_scan.py deleted file mode 100644 index 0824c92..0000000 --- a/waflib/Tools/fc_scan.py +++ /dev/null @@ -1,120 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# DC 2008 -# Thomas Nagy 2016-2018 (ita) - -import re - -INC_REGEX = r"""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" -USE_REGEX = r"""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" -MOD_REGEX = r"""(?:^|;)\s*MODULE(?!\s+(?:PROCEDURE|SUBROUTINE|FUNCTION))\s+(\w+)""" -SMD_REGEX = r"""(?:^|;)\s*SUBMODULE\s*\(([\w:]+)\)\s*(\w+)""" - -re_inc = re.compile(INC_REGEX, re.I) -re_use = re.compile(USE_REGEX, re.I) -re_mod = re.compile(MOD_REGEX, re.I) -re_smd = re.compile(SMD_REGEX, re.I) - -class fortran_parser(object): - """ - This parser returns: - - * the nodes corresponding to the module names to produce - * the nodes corresponding to the include files used - * the module names used by the fortran files - """ - def __init__(self, incpaths): - self.seen = [] - """Files already parsed""" - - self.nodes = [] - """List of :py:class:`waflib.Node.Node` representing the dependencies to return""" - - self.names = [] - """List of module names to return""" - - self.incpaths = incpaths - """List of :py:class:`waflib.Node.Node` representing the include paths""" - - def find_deps(self, node): - """ - Parses a Fortran file to obtain the dependencies used/provided - - :param node: fortran file to read - :type node: :py:class:`waflib.Node.Node` - :return: lists representing the includes, the modules used, and the modules created by a fortran file - :rtype: tuple of list of strings - """ - txt = node.read() - incs = [] - uses = [] - mods = [] - for line in txt.splitlines(): - # line by line regexp search? optimize? - m = re_inc.search(line) - if m: - incs.append(m.group(1)) - m = re_use.search(line) - if m: - uses.append(m.group(1)) - m = re_mod.search(line) - if m: - mods.append(m.group(1)) - m = re_smd.search(line) - if m: - uses.append(m.group(1)) - mods.append('{0}:{1}'.format(m.group(1),m.group(2))) - return (incs, uses, mods) - - def start(self, node): - """ - Start parsing. Use the stack ``self.waiting`` to hold nodes to iterate on - - :param node: fortran file - :type node: :py:class:`waflib.Node.Node` - """ - self.waiting = [node] - while self.waiting: - nd = self.waiting.pop(0) - self.iter(nd) - - def iter(self, node): - """ - Processes a single file during dependency parsing. Extracts files used - modules used and modules provided. - """ - incs, uses, mods = self.find_deps(node) - for x in incs: - if x in self.seen: - continue - self.seen.append(x) - self.tryfind_header(x) - - for x in uses: - name = "USE@%s" % x - if not name in self.names: - self.names.append(name) - - for x in mods: - name = "MOD@%s" % x - if not name in self.names: - self.names.append(name) - - def tryfind_header(self, filename): - """ - Adds an include file to the list of nodes to process - - :param filename: file name - :type filename: string - """ - found = None - for n in self.incpaths: - found = n.find_resource(filename) - if found: - self.nodes.append(found) - self.waiting.append(found) - break - if not found: - if not filename in self.names: - self.names.append(filename) - diff --git a/waflib/Tools/flex.py b/waflib/Tools/flex.py deleted file mode 100644 index 2256657..0000000 --- a/waflib/Tools/flex.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# John O'Meara, 2006 -# Thomas Nagy, 2006-2018 (ita) - -""" -The **flex** program is a code generator which creates C or C++ files. -The generated files are compiled into object files. -""" - -import os, re -from waflib import Task, TaskGen -from waflib.Tools import ccroot - -def decide_ext(self, node): - if 'cxx' in self.features: - return ['.lex.cc'] - return ['.lex.c'] - -def flexfun(tsk): - env = tsk.env - bld = tsk.generator.bld - wd = bld.variant_dir - def to_list(xx): - if isinstance(xx, str): - return [xx] - return xx - tsk.last_cmd = lst = [] - lst.extend(to_list(env.FLEX)) - lst.extend(to_list(env.FLEXFLAGS)) - inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs] - if env.FLEX_MSYS: - inputs = [x.replace(os.sep, '/') for x in inputs] - lst.extend(inputs) - lst = [x for x in lst if x] - txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0) - tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207 - -TaskGen.declare_chain( - name = 'flex', - rule = flexfun, # issue #854 - ext_in = '.l', - decider = decide_ext, -) - -# To support the following: -# bld(features='c', flexflags='-P/foo') -Task.classes['flex'].vars = ['FLEXFLAGS', 'FLEX'] -ccroot.USELIB_VARS['c'].add('FLEXFLAGS') -ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS') - -def configure(conf): - """ - Detect the *flex* program - """ - conf.find_program('flex', var='FLEX') - conf.env.FLEXFLAGS = ['-t'] - - if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]): - # this is the flex shipped with MSYS - conf.env.FLEX_MSYS = True - diff --git a/waflib/Tools/g95.py b/waflib/Tools/g95.py deleted file mode 100644 index f69ba4f..0000000 --- a/waflib/Tools/g95.py +++ /dev/null @@ -1,66 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# KWS 2010 -# Thomas Nagy 2016-2018 (ita) - -import re -from waflib import Utils -from waflib.Tools import fc, fc_config, fc_scan, ar -from waflib.Configure import conf - -@conf -def find_g95(conf): - fc = conf.find_program('g95', var='FC') - conf.get_g95_version(fc) - conf.env.FC_NAME = 'G95' - -@conf -def g95_flags(conf): - v = conf.env - v.FCFLAGS_fcshlib = ['-fPIC'] - v.FORTRANMODFLAG = ['-fmod=', ''] # template for module path - v.FCFLAGS_DEBUG = ['-Werror'] # why not - -@conf -def g95_modifier_win32(conf): - fc_config.fortran_modifier_win32(conf) - -@conf -def g95_modifier_cygwin(conf): - fc_config.fortran_modifier_cygwin(conf) - -@conf -def g95_modifier_darwin(conf): - fc_config.fortran_modifier_darwin(conf) - -@conf -def g95_modifier_platform(conf): - dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform() - g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None) - if g95_modifier_func: - g95_modifier_func() - -@conf -def get_g95_version(conf, fc): - """get the compiler version""" - - version_re = re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search - cmd = fc + ['--version'] - out, err = fc_config.getoutput(conf, cmd, stdin=False) - if out: - match = version_re(out) - else: - match = version_re(err) - if not match: - conf.fatal('cannot determine g95 version') - k = match.groupdict() - conf.env.FC_VERSION = (k['major'], k['minor']) - -def configure(conf): - conf.find_g95() - conf.find_ar() - conf.fc_flags() - conf.fc_add_flags() - conf.g95_flags() - conf.g95_modifier_platform() - diff --git a/waflib/Tools/gas.py b/waflib/Tools/gas.py deleted file mode 100644 index 77afed7..0000000 --- a/waflib/Tools/gas.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2008-2018 (ita) - -"Detect as/gas/gcc for compiling assembly files" - -import waflib.Tools.asm # - leave this -from waflib.Tools import ar - -def configure(conf): - """ - Find the programs gas/as/gcc and set the variable *AS* - """ - conf.find_program(['gas', 'gcc'], var='AS') - conf.env.AS_TGT_F = ['-c', '-o'] - conf.env.ASLNK_TGT_F = ['-o'] - conf.find_ar() - conf.load('asm') diff --git a/waflib/Tools/gcc.py b/waflib/Tools/gcc.py deleted file mode 100644 index acdd473..0000000 --- a/waflib/Tools/gcc.py +++ /dev/null @@ -1,156 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2018 (ita) -# Ralf Habacker, 2006 (rh) -# Yinon Ehrlich, 2009 - -""" -gcc/llvm detection. -""" - -from waflib.Tools import ccroot, ar -from waflib.Configure import conf - -@conf -def find_gcc(conf): - """ - Find the program gcc, and if present, try to detect its version number - """ - cc = conf.find_program(['gcc', 'cc'], var='CC') - conf.get_cc_version(cc, gcc=True) - conf.env.CC_NAME = 'gcc' - -@conf -def gcc_common_flags(conf): - """ - Common flags for gcc on nearly all platforms - """ - v = conf.env - - v.CC_SRC_F = [] - v.CC_TGT_F = ['-c', '-o'] - - if not v.LINK_CC: - v.LINK_CC = v.CC - - v.CCLNK_SRC_F = [] - v.CCLNK_TGT_F = ['-o'] - v.CPPPATH_ST = '-I%s' - v.DEFINES_ST = '-D%s' - - v.LIB_ST = '-l%s' # template for adding libs - v.LIBPATH_ST = '-L%s' # template for adding libpaths - v.STLIB_ST = '-l%s' - v.STLIBPATH_ST = '-L%s' - v.RPATH_ST = '-Wl,-rpath,%s' - - v.SONAME_ST = '-Wl,-h,%s' - v.SHLIB_MARKER = '-Wl,-Bdynamic' - v.STLIB_MARKER = '-Wl,-Bstatic' - - v.cprogram_PATTERN = '%s' - - v.CFLAGS_cshlib = ['-fPIC'] - v.LINKFLAGS_cshlib = ['-shared'] - v.cshlib_PATTERN = 'lib%s.so' - - v.LINKFLAGS_cstlib = ['-Wl,-Bstatic'] - v.cstlib_PATTERN = 'lib%s.a' - - v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup'] - v.CFLAGS_MACBUNDLE = ['-fPIC'] - v.macbundle_PATTERN = '%s.bundle' - -@conf -def gcc_modifier_win32(conf): - """Configuration flags for executing gcc on Windows""" - v = conf.env - v.cprogram_PATTERN = '%s.exe' - - v.cshlib_PATTERN = '%s.dll' - v.implib_PATTERN = '%s.dll.a' - v.IMPLIB_ST = '-Wl,--out-implib,%s' - - v.CFLAGS_cshlib = [] - - # Auto-import is enabled by default even without this option, - # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages - # that the linker emits otherwise. - v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) - -@conf -def gcc_modifier_cygwin(conf): - """Configuration flags for executing gcc on Cygwin""" - gcc_modifier_win32(conf) - v = conf.env - v.cshlib_PATTERN = 'cyg%s.dll' - v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base']) - v.CFLAGS_cshlib = [] - -@conf -def gcc_modifier_darwin(conf): - """Configuration flags for executing gcc on MacOS""" - v = conf.env - v.CFLAGS_cshlib = ['-fPIC'] - v.LINKFLAGS_cshlib = ['-dynamiclib'] - v.cshlib_PATTERN = 'lib%s.dylib' - v.FRAMEWORKPATH_ST = '-F%s' - v.FRAMEWORK_ST = ['-framework'] - v.ARCH_ST = ['-arch'] - - v.LINKFLAGS_cstlib = [] - - v.SHLIB_MARKER = [] - v.STLIB_MARKER = [] - v.SONAME_ST = [] - -@conf -def gcc_modifier_aix(conf): - """Configuration flags for executing gcc on AIX""" - v = conf.env - v.LINKFLAGS_cprogram = ['-Wl,-brtl'] - v.LINKFLAGS_cshlib = ['-shared','-Wl,-brtl,-bexpfull'] - v.SHLIB_MARKER = [] - -@conf -def gcc_modifier_hpux(conf): - v = conf.env - v.SHLIB_MARKER = [] - v.STLIB_MARKER = [] - v.CFLAGS_cshlib = ['-fPIC','-DPIC'] - v.cshlib_PATTERN = 'lib%s.sl' - -@conf -def gcc_modifier_openbsd(conf): - conf.env.SONAME_ST = [] - -@conf -def gcc_modifier_osf1V(conf): - v = conf.env - v.SHLIB_MARKER = [] - v.STLIB_MARKER = [] - v.SONAME_ST = [] - -@conf -def gcc_modifier_platform(conf): - """Execute platform-specific functions based on *gcc_modifier_+NAME*""" - # * set configurations specific for a platform. - # * the destination platform is detected automatically by looking at the macros the compiler predefines, - # and if it's not recognised, it fallbacks to sys.platform. - gcc_modifier_func = getattr(conf, 'gcc_modifier_' + conf.env.DEST_OS, None) - if gcc_modifier_func: - gcc_modifier_func() - -def configure(conf): - """ - Configuration for gcc - """ - conf.find_gcc() - conf.find_ar() - conf.gcc_common_flags() - conf.gcc_modifier_platform() - conf.cc_load_tools() - conf.cc_add_flags() - conf.link_add_flags() - conf.check_gcc_o_space() - diff --git a/waflib/Tools/gdc.py b/waflib/Tools/gdc.py deleted file mode 100644 index d89a66d..0000000 --- a/waflib/Tools/gdc.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2007 (dv) - -from waflib.Tools import ar, d -from waflib.Configure import conf - -@conf -def find_gdc(conf): - """ - Finds the program gdc and set the variable *D* - """ - conf.find_program('gdc', var='D') - - out = conf.cmd_and_log(conf.env.D + ['--version']) - if out.find("gdc") == -1: - conf.fatal("detected compiler is not gdc") - -@conf -def common_flags_gdc(conf): - """ - Sets the flags required by *gdc* - """ - v = conf.env - - v.DFLAGS = [] - - v.D_SRC_F = ['-c'] - v.D_TGT_F = '-o%s' - - v.D_LINKER = v.D - v.DLNK_SRC_F = '' - v.DLNK_TGT_F = '-o%s' - v.DINC_ST = '-I%s' - - v.DSHLIB_MARKER = v.DSTLIB_MARKER = '' - v.DSTLIB_ST = v.DSHLIB_ST = '-l%s' - v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L%s' - - v.LINKFLAGS_dshlib = ['-shared'] - - v.DHEADER_ext = '.di' - v.DFLAGS_d_with_header = '-fintfc' - v.D_HDR_F = '-fintfc-file=%s' - -def configure(conf): - """ - Configuration for gdc - """ - conf.find_gdc() - conf.load('ar') - conf.load('d') - conf.common_flags_gdc() - conf.d_platform_flags() - diff --git a/waflib/Tools/gfortran.py b/waflib/Tools/gfortran.py deleted file mode 100644 index 1050667..0000000 --- a/waflib/Tools/gfortran.py +++ /dev/null @@ -1,93 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# DC 2008 -# Thomas Nagy 2016-2018 (ita) - -import re -from waflib import Utils -from waflib.Tools import fc, fc_config, fc_scan, ar -from waflib.Configure import conf - -@conf -def find_gfortran(conf): - """Find the gfortran program (will look in the environment variable 'FC')""" - fc = conf.find_program(['gfortran','g77'], var='FC') - # (fallback to g77 for systems, where no gfortran is available) - conf.get_gfortran_version(fc) - conf.env.FC_NAME = 'GFORTRAN' - -@conf -def gfortran_flags(conf): - v = conf.env - v.FCFLAGS_fcshlib = ['-fPIC'] - v.FORTRANMODFLAG = ['-J', ''] # template for module path - v.FCFLAGS_DEBUG = ['-Werror'] # why not - -@conf -def gfortran_modifier_win32(conf): - fc_config.fortran_modifier_win32(conf) - -@conf -def gfortran_modifier_cygwin(conf): - fc_config.fortran_modifier_cygwin(conf) - -@conf -def gfortran_modifier_darwin(conf): - fc_config.fortran_modifier_darwin(conf) - -@conf -def gfortran_modifier_platform(conf): - dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform() - gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None) - if gfortran_modifier_func: - gfortran_modifier_func() - -@conf -def get_gfortran_version(conf, fc): - """Get the compiler version""" - - # ensure this is actually gfortran, not an imposter. - version_re = re.compile(r"GNU\s*Fortran", re.I).search - cmd = fc + ['--version'] - out, err = fc_config.getoutput(conf, cmd, stdin=False) - if out: - match = version_re(out) - else: - match = version_re(err) - if not match: - conf.fatal('Could not determine the compiler type') - - # --- now get more detailed info -- see c_config.get_cc_version - cmd = fc + ['-dM', '-E', '-'] - out, err = fc_config.getoutput(conf, cmd, stdin=True) - - if out.find('__GNUC__') < 0: - conf.fatal('Could not determine the compiler type') - - k = {} - out = out.splitlines() - import shlex - - for line in out: - lst = shlex.split(line) - if len(lst)>2: - key = lst[1] - val = lst[2] - k[key] = val - - def isD(var): - return var in k - - def isT(var): - return var in k and k[var] != '0' - - conf.env.FC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__']) - -def configure(conf): - conf.find_gfortran() - conf.find_ar() - conf.fc_flags() - conf.fc_add_flags() - conf.gfortran_flags() - conf.gfortran_modifier_platform() - conf.check_gfortran_o_space() diff --git a/waflib/Tools/glib2.py b/waflib/Tools/glib2.py deleted file mode 100644 index 949fe37..0000000 --- a/waflib/Tools/glib2.py +++ /dev/null @@ -1,489 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2018 (ita) - -""" -Support for GLib2 tools: - -* marshal -* enums -* gsettings -* gresource -""" - -import os -import functools -from waflib import Context, Task, Utils, Options, Errors, Logs -from waflib.TaskGen import taskgen_method, before_method, feature, extension -from waflib.Configure import conf - -################## marshal files - -@taskgen_method -def add_marshal_file(self, filename, prefix): - """ - Adds a file to the list of marshal files to process. Store them in the attribute *marshal_list*. - - :param filename: xml file to compile - :type filename: string - :param prefix: marshal prefix (--prefix=prefix) - :type prefix: string - """ - if not hasattr(self, 'marshal_list'): - self.marshal_list = [] - self.meths.append('process_marshal') - self.marshal_list.append((filename, prefix)) - -@before_method('process_source') -def process_marshal(self): - """ - Processes the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances. - Adds the c file created to the list of source to process. - """ - for f, prefix in getattr(self, 'marshal_list', []): - node = self.path.find_resource(f) - - if not node: - raise Errors.WafError('file not found %r' % f) - - h_node = node.change_ext('.h') - c_node = node.change_ext('.c') - - task = self.create_task('glib_genmarshal', node, [h_node, c_node]) - task.env.GLIB_GENMARSHAL_PREFIX = prefix - self.source = self.to_nodes(getattr(self, 'source', [])) - self.source.append(c_node) - -class glib_genmarshal(Task.Task): - vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'] - color = 'BLUE' - ext_out = ['.h'] - def run(self): - bld = self.generator.bld - - get = self.env.get_flat - cmd1 = "%s %s --prefix=%s --header > %s" % ( - get('GLIB_GENMARSHAL'), - self.inputs[0].srcpath(), - get('GLIB_GENMARSHAL_PREFIX'), - self.outputs[0].abspath() - ) - - ret = bld.exec_command(cmd1) - if ret: - return ret - - #print self.outputs[1].abspath() - c = '''#include "%s"\n''' % self.outputs[0].name - self.outputs[1].write(c) - - cmd2 = "%s %s --prefix=%s --body >> %s" % ( - get('GLIB_GENMARSHAL'), - self.inputs[0].srcpath(), - get('GLIB_GENMARSHAL_PREFIX'), - self.outputs[1].abspath() - ) - return bld.exec_command(cmd2) - -########################## glib-mkenums - -@taskgen_method -def add_enums_from_template(self, source='', target='', template='', comments=''): - """ - Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*. - - :param source: enum file to process - :type source: string - :param target: target file - :type target: string - :param template: template file - :type template: string - :param comments: comments - :type comments: string - """ - if not hasattr(self, 'enums_list'): - self.enums_list = [] - self.meths.append('process_enums') - self.enums_list.append({'source': source, - 'target': target, - 'template': template, - 'file-head': '', - 'file-prod': '', - 'file-tail': '', - 'enum-prod': '', - 'value-head': '', - 'value-prod': '', - 'value-tail': '', - 'comments': comments}) - -@taskgen_method -def add_enums(self, source='', target='', - file_head='', file_prod='', file_tail='', enum_prod='', - value_head='', value_prod='', value_tail='', comments=''): - """ - Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*. - - :param source: enum file to process - :type source: string - :param target: target file - :type target: string - :param file_head: unused - :param file_prod: unused - :param file_tail: unused - :param enum_prod: unused - :param value_head: unused - :param value_prod: unused - :param value_tail: unused - :param comments: comments - :type comments: string - """ - if not hasattr(self, 'enums_list'): - self.enums_list = [] - self.meths.append('process_enums') - self.enums_list.append({'source': source, - 'template': '', - 'target': target, - 'file-head': file_head, - 'file-prod': file_prod, - 'file-tail': file_tail, - 'enum-prod': enum_prod, - 'value-head': value_head, - 'value-prod': value_prod, - 'value-tail': value_tail, - 'comments': comments}) - -@before_method('process_source') -def process_enums(self): - """ - Processes the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. - """ - for enum in getattr(self, 'enums_list', []): - task = self.create_task('glib_mkenums') - env = task.env - - inputs = [] - - # process the source - source_list = self.to_list(enum['source']) - if not source_list: - raise Errors.WafError('missing source ' + str(enum)) - source_list = [self.path.find_resource(k) for k in source_list] - inputs += source_list - env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list] - - # find the target - if not enum['target']: - raise Errors.WafError('missing target ' + str(enum)) - tgt_node = self.path.find_or_declare(enum['target']) - if tgt_node.name.endswith('.c'): - self.source.append(tgt_node) - env.GLIB_MKENUMS_TARGET = tgt_node.abspath() - - - options = [] - - if enum['template']: # template, if provided - template_node = self.path.find_resource(enum['template']) - options.append('--template %s' % (template_node.abspath())) - inputs.append(template_node) - params = {'file-head' : '--fhead', - 'file-prod' : '--fprod', - 'file-tail' : '--ftail', - 'enum-prod' : '--eprod', - 'value-head' : '--vhead', - 'value-prod' : '--vprod', - 'value-tail' : '--vtail', - 'comments': '--comments'} - for param, option in params.items(): - if enum[param]: - options.append('%s %r' % (option, enum[param])) - - env.GLIB_MKENUMS_OPTIONS = ' '.join(options) - - # update the task instance - task.set_inputs(inputs) - task.set_outputs(tgt_node) - -class glib_mkenums(Task.Task): - """ - Processes enum files - """ - run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}' - color = 'PINK' - ext_out = ['.h'] - -######################################### gsettings - -@taskgen_method -def add_settings_schemas(self, filename_list): - """ - Adds settings files to process to *settings_schema_files* - - :param filename_list: files - :type filename_list: list of string - """ - if not hasattr(self, 'settings_schema_files'): - self.settings_schema_files = [] - - if not isinstance(filename_list, list): - filename_list = [filename_list] - - self.settings_schema_files.extend(filename_list) - -@taskgen_method -def add_settings_enums(self, namespace, filename_list): - """ - Called only once by task generator to set the enums namespace. - - :param namespace: namespace - :type namespace: string - :param filename_list: enum files to process - :type filename_list: file list - """ - if hasattr(self, 'settings_enum_namespace'): - raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name) - self.settings_enum_namespace = namespace - - if not isinstance(filename_list, list): - filename_list = [filename_list] - self.settings_enum_files = filename_list - -@feature('glib2') -def process_settings(self): - """ - Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The - same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks. - - """ - enums_tgt_node = [] - install_files = [] - - settings_schema_files = getattr(self, 'settings_schema_files', []) - if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS: - raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure") - - # 1. process gsettings_enum_files (generate .enums.xml) - # - if hasattr(self, 'settings_enum_files'): - enums_task = self.create_task('glib_mkenums') - - source_list = self.settings_enum_files - source_list = [self.path.find_resource(k) for k in source_list] - enums_task.set_inputs(source_list) - enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list] - - target = self.settings_enum_namespace + '.enums.xml' - tgt_node = self.path.find_or_declare(target) - enums_task.set_outputs(tgt_node) - enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath() - enums_tgt_node = [tgt_node] - - install_files.append(tgt_node) - - options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace) - enums_task.env.GLIB_MKENUMS_OPTIONS = options - - # 2. process gsettings_schema_files (validate .gschema.xml files) - # - for schema in settings_schema_files: - schema_task = self.create_task ('glib_validate_schema') - - schema_node = self.path.find_resource(schema) - if not schema_node: - raise Errors.WafError("Cannot find the schema file %r" % schema) - install_files.append(schema_node) - source_list = enums_tgt_node + [schema_node] - - schema_task.set_inputs (source_list) - schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list] - - target_node = schema_node.change_ext('.xml.valid') - schema_task.set_outputs (target_node) - schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath() - - # 3. schemas install task - def compile_schemas_callback(bld): - if not bld.is_install: - return - compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS) - destdir = Options.options.destdir - paths = bld._compile_schemas_registered - if destdir: - paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths) - for path in paths: - Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path) - if self.bld.exec_command(compile_schemas + [path]): - Logs.warn('Could not update GSettings schema cache %r' % path) - - if self.bld.is_install: - schemadir = self.env.GSETTINGSSCHEMADIR - if not schemadir: - raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)') - - if install_files: - self.add_install_files(install_to=schemadir, install_from=install_files) - registered_schemas = getattr(self.bld, '_compile_schemas_registered', None) - if not registered_schemas: - registered_schemas = self.bld._compile_schemas_registered = set() - self.bld.add_post_fun(compile_schemas_callback) - registered_schemas.add(schemadir) - -class glib_validate_schema(Task.Task): - """ - Validates schema files - """ - run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}' - color = 'PINK' - -################## gresource - -@extension('.gresource.xml') -def process_gresource_source(self, node): - """ - Creates tasks that turn ``.gresource.xml`` files to C code - """ - if not self.env.GLIB_COMPILE_RESOURCES: - raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure") - - if 'gresource' in self.features: - return - - h_node = node.change_ext('_xml.h') - c_node = node.change_ext('_xml.c') - self.create_task('glib_gresource_source', node, [h_node, c_node]) - self.source.append(c_node) - -@feature('gresource') -def process_gresource_bundle(self): - """ - Creates tasks to turn ``.gresource`` files from ``.gresource.xml`` files:: - - def build(bld): - bld( - features='gresource', - source=['resources1.gresource.xml', 'resources2.gresource.xml'], - install_path='${LIBDIR}/${PACKAGE}' - ) - - :param source: XML files to process - :type source: list of string - :param install_path: installation path - :type install_path: string - """ - for i in self.to_list(self.source): - node = self.path.find_resource(i) - - task = self.create_task('glib_gresource_bundle', node, node.change_ext('')) - inst_to = getattr(self, 'install_path', None) - if inst_to: - self.add_install_files(install_to=inst_to, install_from=task.outputs) - -class glib_gresource_base(Task.Task): - """ - Base class for gresource based tasks - """ - color = 'BLUE' - base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}' - - def scan(self): - """ - Scans gresource dependencies through ``glib-compile-resources --generate-dependencies command`` - """ - bld = self.generator.bld - kw = {} - kw['cwd'] = self.get_cwd() - kw['quiet'] = Context.BOTH - - cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % ( - self.inputs[0].parent.srcpath(), - self.inputs[0].bld_dir(), - self.inputs[0].bldpath() - ), self.env) - - output = bld.cmd_and_log(cmd, **kw) - - nodes = [] - names = [] - for dep in output.splitlines(): - if dep: - node = bld.bldnode.find_node(dep) - if node: - nodes.append(node) - else: - names.append(dep) - - return (nodes, names) - -class glib_gresource_source(glib_gresource_base): - """ - Task to generate C source code (.h and .c files) from a gresource.xml file - """ - vars = ['GLIB_COMPILE_RESOURCES'] - fun_h = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}') - fun_c = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}') - ext_out = ['.h'] - - def run(self): - return self.fun_h[0](self) or self.fun_c[0](self) - -class glib_gresource_bundle(glib_gresource_base): - """ - Task to generate a .gresource binary file from a gresource.xml file - """ - run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}' - shell = True # temporary workaround for #795 - -@conf -def find_glib_genmarshal(conf): - conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL') - -@conf -def find_glib_mkenums(conf): - if not conf.env.PERL: - conf.find_program('perl', var='PERL') - conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS') - -@conf -def find_glib_compile_schemas(conf): - # when cross-compiling, gsettings.m4 locates the program with the following: - # pkg-config --variable glib_compile_schemas gio-2.0 - conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS') - - def getstr(varname): - return getattr(Options.options, varname, getattr(conf.env,varname, '')) - - gsettingsschemadir = getstr('GSETTINGSSCHEMADIR') - if not gsettingsschemadir: - datadir = getstr('DATADIR') - if not datadir: - prefix = conf.env.PREFIX - datadir = os.path.join(prefix, 'share') - gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas') - - conf.env.GSETTINGSSCHEMADIR = gsettingsschemadir - -@conf -def find_glib_compile_resources(conf): - conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES') - -def configure(conf): - """ - Finds the following programs: - - * *glib-genmarshal* and set *GLIB_GENMARSHAL* - * *glib-mkenums* and set *GLIB_MKENUMS* - * *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory) - * *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory) - """ - conf.find_glib_genmarshal() - conf.find_glib_mkenums() - conf.find_glib_compile_schemas(mandatory=False) - conf.find_glib_compile_resources(mandatory=False) - -def options(opt): - """ - Adds the ``--gsettingsschemadir`` command-line option - """ - gr = opt.add_option_group('Installation directories') - gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR') - diff --git a/waflib/Tools/gnu_dirs.py b/waflib/Tools/gnu_dirs.py deleted file mode 100644 index 2847071..0000000 --- a/waflib/Tools/gnu_dirs.py +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Ali Sabil, 2007 - -""" -Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call:: - - opt.load('gnu_dirs') - -and:: - - conf.load('gnu_dirs') - -Add options for the standard GNU directories, this tool will add the options -found in autotools, and will update the environment with the following -installation variables: - -============== ========================================= ======================= -Variable Description Default Value -============== ========================================= ======================= -PREFIX installation prefix /usr/local -EXEC_PREFIX installation prefix for binaries PREFIX -BINDIR user commands EXEC_PREFIX/bin -SBINDIR system binaries EXEC_PREFIX/sbin -LIBEXECDIR program-specific binaries EXEC_PREFIX/libexec -SYSCONFDIR host-specific configuration PREFIX/etc -SHAREDSTATEDIR architecture-independent variable data PREFIX/com -LOCALSTATEDIR variable data PREFIX/var -LIBDIR object code libraries EXEC_PREFIX/lib -INCLUDEDIR header files PREFIX/include -OLDINCLUDEDIR header files for non-GCC compilers /usr/include -DATAROOTDIR architecture-independent data root PREFIX/share -DATADIR architecture-independent data DATAROOTDIR -INFODIR GNU "info" documentation DATAROOTDIR/info -LOCALEDIR locale-dependent data DATAROOTDIR/locale -MANDIR manual pages DATAROOTDIR/man -DOCDIR documentation root DATAROOTDIR/doc/APPNAME -HTMLDIR HTML documentation DOCDIR -DVIDIR DVI documentation DOCDIR -PDFDIR PDF documentation DOCDIR -PSDIR PostScript documentation DOCDIR -============== ========================================= ======================= -""" - -import os, re -from waflib import Utils, Options, Context - -gnuopts = ''' -bindir, user commands, ${EXEC_PREFIX}/bin -sbindir, system binaries, ${EXEC_PREFIX}/sbin -libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec -sysconfdir, host-specific configuration, ${PREFIX}/etc -sharedstatedir, architecture-independent variable data, ${PREFIX}/com -localstatedir, variable data, ${PREFIX}/var -libdir, object code libraries, ${EXEC_PREFIX}/lib%s -includedir, header files, ${PREFIX}/include -oldincludedir, header files for non-GCC compilers, /usr/include -datarootdir, architecture-independent data root, ${PREFIX}/share -datadir, architecture-independent data, ${DATAROOTDIR} -infodir, GNU "info" documentation, ${DATAROOTDIR}/info -localedir, locale-dependent data, ${DATAROOTDIR}/locale -mandir, manual pages, ${DATAROOTDIR}/man -docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE} -htmldir, HTML documentation, ${DOCDIR} -dvidir, DVI documentation, ${DOCDIR} -pdfdir, PDF documentation, ${DOCDIR} -psdir, PostScript documentation, ${DOCDIR} -''' % Utils.lib64() - -_options = [x.split(', ') for x in gnuopts.splitlines() if x] - -def configure(conf): - """ - Reads the command-line options to set lots of variables in *conf.env*. The variables - BINDIR and LIBDIR will be overwritten. - """ - def get_param(varname, default): - return getattr(Options.options, varname, '') or default - - env = conf.env - env.LIBDIR = env.BINDIR = [] - env.EXEC_PREFIX = get_param('EXEC_PREFIX', env.PREFIX) - env.PACKAGE = getattr(Context.g_module, 'APPNAME', None) or env.PACKAGE - - complete = False - iter = 0 - while not complete and iter < len(_options) + 1: - iter += 1 - complete = True - for name, help, default in _options: - name = name.upper() - if not env[name]: - try: - env[name] = Utils.subst_vars(get_param(name, default).replace('/', os.sep), env) - except TypeError: - complete = False - - if not complete: - lst = [x for x, _, _ in _options if not env[x.upper()]] - raise conf.errors.WafError('Variable substitution failure %r' % lst) - -def options(opt): - """ - Adds lots of command-line options, for example:: - - --exec-prefix: EXEC_PREFIX - """ - inst_dir = opt.add_option_group('Installation prefix', -'By default, "waf install" will put the files in\ - "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\ - than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"') - - for k in ('--prefix', '--destdir'): - option = opt.parser.get_option(k) - if option: - opt.parser.remove_option(k) - inst_dir.add_option(option) - - inst_dir.add_option('--exec-prefix', - help = 'installation prefix for binaries [PREFIX]', - default = '', - dest = 'EXEC_PREFIX') - - dirs_options = opt.add_option_group('Installation directories') - - for name, help, default in _options: - option_name = '--' + name - str_default = default - str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default)) - dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper()) - diff --git a/waflib/Tools/gxx.py b/waflib/Tools/gxx.py deleted file mode 100644 index 22c5d26..0000000 --- a/waflib/Tools/gxx.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2018 (ita) -# Ralf Habacker, 2006 (rh) -# Yinon Ehrlich, 2009 - -""" -g++/llvm detection. -""" - -from waflib.Tools import ccroot, ar -from waflib.Configure import conf - -@conf -def find_gxx(conf): - """ - Finds the program g++, and if present, try to detect its version number - """ - cxx = conf.find_program(['g++', 'c++'], var='CXX') - conf.get_cc_version(cxx, gcc=True) - conf.env.CXX_NAME = 'gcc' - -@conf -def gxx_common_flags(conf): - """ - Common flags for g++ on nearly all platforms - """ - v = conf.env - - v.CXX_SRC_F = [] - v.CXX_TGT_F = ['-c', '-o'] - - if not v.LINK_CXX: - v.LINK_CXX = v.CXX - - v.CXXLNK_SRC_F = [] - v.CXXLNK_TGT_F = ['-o'] - v.CPPPATH_ST = '-I%s' - v.DEFINES_ST = '-D%s' - - v.LIB_ST = '-l%s' # template for adding libs - v.LIBPATH_ST = '-L%s' # template for adding libpaths - v.STLIB_ST = '-l%s' - v.STLIBPATH_ST = '-L%s' - v.RPATH_ST = '-Wl,-rpath,%s' - - v.SONAME_ST = '-Wl,-h,%s' - v.SHLIB_MARKER = '-Wl,-Bdynamic' - v.STLIB_MARKER = '-Wl,-Bstatic' - - v.cxxprogram_PATTERN = '%s' - - v.CXXFLAGS_cxxshlib = ['-fPIC'] - v.LINKFLAGS_cxxshlib = ['-shared'] - v.cxxshlib_PATTERN = 'lib%s.so' - - v.LINKFLAGS_cxxstlib = ['-Wl,-Bstatic'] - v.cxxstlib_PATTERN = 'lib%s.a' - - v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup'] - v.CXXFLAGS_MACBUNDLE = ['-fPIC'] - v.macbundle_PATTERN = '%s.bundle' - -@conf -def gxx_modifier_win32(conf): - """Configuration flags for executing gcc on Windows""" - v = conf.env - v.cxxprogram_PATTERN = '%s.exe' - - v.cxxshlib_PATTERN = '%s.dll' - v.implib_PATTERN = '%s.dll.a' - v.IMPLIB_ST = '-Wl,--out-implib,%s' - - v.CXXFLAGS_cxxshlib = [] - - # Auto-import is enabled by default even without this option, - # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages - # that the linker emits otherwise. - v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import']) - -@conf -def gxx_modifier_cygwin(conf): - """Configuration flags for executing g++ on Cygwin""" - gxx_modifier_win32(conf) - v = conf.env - v.cxxshlib_PATTERN = 'cyg%s.dll' - v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base']) - v.CXXFLAGS_cxxshlib = [] - -@conf -def gxx_modifier_darwin(conf): - """Configuration flags for executing g++ on MacOS""" - v = conf.env - v.CXXFLAGS_cxxshlib = ['-fPIC'] - v.LINKFLAGS_cxxshlib = ['-dynamiclib'] - v.cxxshlib_PATTERN = 'lib%s.dylib' - v.FRAMEWORKPATH_ST = '-F%s' - v.FRAMEWORK_ST = ['-framework'] - v.ARCH_ST = ['-arch'] - - v.LINKFLAGS_cxxstlib = [] - - v.SHLIB_MARKER = [] - v.STLIB_MARKER = [] - v.SONAME_ST = [] - -@conf -def gxx_modifier_aix(conf): - """Configuration flags for executing g++ on AIX""" - v = conf.env - v.LINKFLAGS_cxxprogram= ['-Wl,-brtl'] - - v.LINKFLAGS_cxxshlib = ['-shared', '-Wl,-brtl,-bexpfull'] - v.SHLIB_MARKER = [] - -@conf -def gxx_modifier_hpux(conf): - v = conf.env - v.SHLIB_MARKER = [] - v.STLIB_MARKER = [] - v.CFLAGS_cxxshlib = ['-fPIC','-DPIC'] - v.cxxshlib_PATTERN = 'lib%s.sl' - -@conf -def gxx_modifier_openbsd(conf): - conf.env.SONAME_ST = [] - -@conf -def gcc_modifier_osf1V(conf): - v = conf.env - v.SHLIB_MARKER = [] - v.STLIB_MARKER = [] - v.SONAME_ST = [] - -@conf -def gxx_modifier_platform(conf): - """Execute platform-specific functions based on *gxx_modifier_+NAME*""" - # * set configurations specific for a platform. - # * the destination platform is detected automatically by looking at the macros the compiler predefines, - # and if it's not recognised, it fallbacks to sys.platform. - gxx_modifier_func = getattr(conf, 'gxx_modifier_' + conf.env.DEST_OS, None) - if gxx_modifier_func: - gxx_modifier_func() - -def configure(conf): - """ - Configuration for g++ - """ - conf.find_gxx() - conf.find_ar() - conf.gxx_common_flags() - conf.gxx_modifier_platform() - conf.cxx_load_tools() - conf.cxx_add_flags() - conf.link_add_flags() - conf.check_gcc_o_space('cxx') - diff --git a/waflib/Tools/icc.py b/waflib/Tools/icc.py deleted file mode 100644 index b6492c8..0000000 --- a/waflib/Tools/icc.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Stian Selnes 2008 -# Thomas Nagy 2009-2018 (ita) - -""" -Detects the Intel C compiler -""" - -import sys -from waflib.Tools import ccroot, ar, gcc -from waflib.Configure import conf - -@conf -def find_icc(conf): - """ - Finds the program icc and execute it to ensure it really is icc - """ - cc = conf.find_program(['icc', 'ICL'], var='CC') - conf.get_cc_version(cc, icc=True) - conf.env.CC_NAME = 'icc' - -def configure(conf): - conf.find_icc() - conf.find_ar() - conf.gcc_common_flags() - conf.gcc_modifier_platform() - conf.cc_load_tools() - conf.cc_add_flags() - conf.link_add_flags() diff --git a/waflib/Tools/icpc.py b/waflib/Tools/icpc.py deleted file mode 100644 index 8a6cc6c..0000000 --- a/waflib/Tools/icpc.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy 2009-2018 (ita) - -""" -Detects the Intel C++ compiler -""" - -import sys -from waflib.Tools import ccroot, ar, gxx -from waflib.Configure import conf - -@conf -def find_icpc(conf): - """ - Finds the program icpc, and execute it to ensure it really is icpc - """ - cxx = conf.find_program('icpc', var='CXX') - conf.get_cc_version(cxx, icc=True) - conf.env.CXX_NAME = 'icc' - -def configure(conf): - conf.find_icpc() - conf.find_ar() - conf.gxx_common_flags() - conf.gxx_modifier_platform() - conf.cxx_load_tools() - conf.cxx_add_flags() - conf.link_add_flags() - diff --git a/waflib/Tools/ifort.py b/waflib/Tools/ifort.py deleted file mode 100644 index 17d3052..0000000 --- a/waflib/Tools/ifort.py +++ /dev/null @@ -1,413 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# DC 2008 -# Thomas Nagy 2016-2018 (ita) - -import os, re, traceback -from waflib import Utils, Logs, Errors -from waflib.Tools import fc, fc_config, fc_scan, ar, ccroot -from waflib.Configure import conf -from waflib.TaskGen import after_method, feature - -@conf -def find_ifort(conf): - fc = conf.find_program('ifort', var='FC') - conf.get_ifort_version(fc) - conf.env.FC_NAME = 'IFORT' - -@conf -def ifort_modifier_win32(self): - v = self.env - v.IFORT_WIN32 = True - v.FCSTLIB_MARKER = '' - v.FCSHLIB_MARKER = '' - - v.FCLIB_ST = v.FCSTLIB_ST = '%s.lib' - v.FCLIBPATH_ST = v.STLIBPATH_ST = '/LIBPATH:%s' - v.FCINCPATH_ST = '/I%s' - v.FCDEFINES_ST = '/D%s' - - v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe' - v.fcshlib_PATTERN = '%s.dll' - v.fcstlib_PATTERN = v.implib_PATTERN = '%s.lib' - - v.FCLNK_TGT_F = '/out:' - v.FC_TGT_F = ['/c', '/o', ''] - v.FCFLAGS_fcshlib = '' - v.LINKFLAGS_fcshlib = '/DLL' - v.AR_TGT_F = '/out:' - v.IMPLIB_ST = '/IMPLIB:%s' - - v.append_value('LINKFLAGS', '/subsystem:console') - if v.IFORT_MANIFEST: - v.append_value('LINKFLAGS', ['/MANIFEST']) - -@conf -def ifort_modifier_darwin(conf): - fc_config.fortran_modifier_darwin(conf) - -@conf -def ifort_modifier_platform(conf): - dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform() - ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None) - if ifort_modifier_func: - ifort_modifier_func() - -@conf -def get_ifort_version(conf, fc): - """ - Detects the compiler version and sets ``conf.env.FC_VERSION`` - """ - version_re = re.compile(r"\bIntel\b.*\bVersion\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search - if Utils.is_win32: - cmd = fc - else: - cmd = fc + ['-logo'] - - out, err = fc_config.getoutput(conf, cmd, stdin=False) - match = version_re(out) or version_re(err) - if not match: - conf.fatal('cannot determine ifort version.') - k = match.groupdict() - conf.env.FC_VERSION = (k['major'], k['minor']) - -def configure(conf): - """ - Detects the Intel Fortran compilers - """ - if Utils.is_win32: - compiler, version, path, includes, libdirs, arch = conf.detect_ifort() - v = conf.env - v.DEST_CPU = arch - v.PATH = path - v.INCLUDES = includes - v.LIBPATH = libdirs - v.MSVC_COMPILER = compiler - try: - v.MSVC_VERSION = float(version) - except ValueError: - v.MSVC_VERSION = float(version[:-3]) - - conf.find_ifort_win32() - conf.ifort_modifier_win32() - else: - conf.find_ifort() - conf.find_program('xiar', var='AR') - conf.find_ar() - conf.fc_flags() - conf.fc_add_flags() - conf.ifort_modifier_platform() - - -all_ifort_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')] -"""List of icl platforms""" - -@conf -def gather_ifort_versions(conf, versions): - """ - List compiler versions by looking up registry keys - """ - version_pattern = re.compile(r'^...?.?\....?.?') - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran') - except OSError: - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran') - except OSError: - return - index = 0 - while 1: - try: - version = Utils.winreg.EnumKey(all_versions, index) - except OSError: - break - index += 1 - if not version_pattern.match(version): - continue - targets = {} - for target,arch in all_ifort_platforms: - if target=='intel64': - targetDir='EM64T_NATIVE' - else: - targetDir=target - try: - Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir) - icl_version=Utils.winreg.OpenKey(all_versions,version) - path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') - except OSError: - pass - else: - batch_file=os.path.join(path,'bin','ifortvars.bat') - if os.path.isfile(batch_file): - targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) - - for target,arch in all_ifort_platforms: - try: - icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target) - path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir') - except OSError: - continue - else: - batch_file=os.path.join(path,'bin','ifortvars.bat') - if os.path.isfile(batch_file): - targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) - major = version[0:2] - versions['intel ' + major] = targets - -@conf -def setup_ifort(conf, versiondict): - """ - Checks installed compilers and targets and returns the first combination from the user's - options, env, or the global supported lists that checks. - - :param versiondict: dict(platform -> dict(architecture -> configuration)) - :type versiondict: dict(string -> dict(string -> target_compiler) - :return: the compiler, revision, path, include dirs, library paths and target architecture - :rtype: tuple of strings - """ - platforms = Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_ifort_platforms] - desired_versions = conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys()))) - for version in desired_versions: - try: - targets = versiondict[version] - except KeyError: - continue - for arch in platforms: - try: - cfg = targets[arch] - except KeyError: - continue - cfg.evaluate() - if cfg.is_valid: - compiler,revision = version.rsplit(' ', 1) - return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu - conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys()))) - -@conf -def get_ifort_version_win32(conf, compiler, version, target, vcvars): - # FIXME hack - try: - conf.msvc_cnt += 1 - except AttributeError: - conf.msvc_cnt = 1 - batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt) - batfile.write("""@echo off -set INCLUDE= -set LIB= -call "%s" %s -echo PATH=%%PATH%% -echo INCLUDE=%%INCLUDE%% -echo LIB=%%LIB%%;%%LIBPATH%% -""" % (vcvars,target)) - sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()]) - batfile.delete() - lines = sout.splitlines() - - if not lines[0]: - lines.pop(0) - - MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None - for line in lines: - if line.startswith('PATH='): - path = line[5:] - MSVC_PATH = path.split(';') - elif line.startswith('INCLUDE='): - MSVC_INCDIR = [i for i in line[8:].split(';') if i] - elif line.startswith('LIB='): - MSVC_LIBDIR = [i for i in line[4:].split(';') if i] - if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR): - conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)') - - # Check if the compiler is usable at all. - # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run. - env = dict(os.environ) - env.update(PATH = path) - compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) - fc = conf.find_program(compiler_name, path_list=MSVC_PATH) - - # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically. - if 'CL' in env: - del(env['CL']) - - try: - conf.cmd_and_log(fc + ['/help'], env=env) - except UnicodeError: - st = traceback.format_exc() - if conf.logger: - conf.logger.error(st) - conf.fatal('ifort: Unicode error - check the code page?') - except Exception as e: - Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e)) - conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)') - else: - Logs.debug('ifort: get_ifort_version: %r %r %r -> OK', compiler, version, target) - finally: - conf.env[compiler_name] = '' - - return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR) - -class target_compiler(object): - """ - Wraps a compiler configuration; call evaluate() to determine - whether the configuration is usable. - """ - def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None): - """ - :param ctx: configuration context to use to eventually get the version environment - :param compiler: compiler name - :param cpu: target cpu - :param version: compiler version number - :param bat_target: ? - :param bat: path to the batch file to run - :param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths) - """ - self.conf = ctx - self.name = None - self.is_valid = False - self.is_done = False - - self.compiler = compiler - self.cpu = cpu - self.version = version - self.bat_target = bat_target - self.bat = bat - self.callback = callback - - def evaluate(self): - if self.is_done: - return - self.is_done = True - try: - vs = self.conf.get_ifort_version_win32(self.compiler, self.version, self.bat_target, self.bat) - except Errors.ConfigurationError: - self.is_valid = False - return - if self.callback: - vs = self.callback(self, vs) - self.is_valid = True - (self.bindirs, self.incdirs, self.libdirs) = vs - - def __str__(self): - return str((self.bindirs, self.incdirs, self.libdirs)) - - def __repr__(self): - return repr((self.bindirs, self.incdirs, self.libdirs)) - -@conf -def detect_ifort(self): - return self.setup_ifort(self.get_ifort_versions(False)) - -@conf -def get_ifort_versions(self, eval_and_save=True): - """ - :return: platforms to compiler configurations - :rtype: dict - """ - dct = {} - self.gather_ifort_versions(dct) - return dct - -def _get_prog_names(self, compiler): - if compiler=='intel': - compiler_name = 'ifort' - linker_name = 'XILINK' - lib_name = 'XILIB' - else: - # assumes CL.exe - compiler_name = 'CL' - linker_name = 'LINK' - lib_name = 'LIB' - return compiler_name, linker_name, lib_name - -@conf -def find_ifort_win32(conf): - # the autodetection is supposed to be performed before entering in this method - v = conf.env - path = v.PATH - compiler = v.MSVC_COMPILER - version = v.MSVC_VERSION - - compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) - v.IFORT_MANIFEST = (compiler == 'intel' and version >= 11) - - # compiler - fc = conf.find_program(compiler_name, var='FC', path_list=path) - - # before setting anything, check if the compiler is really intel fortran - env = dict(conf.environ) - if path: - env.update(PATH = ';'.join(path)) - if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env): - conf.fatal('not intel fortran compiler could not be identified') - - v.FC_NAME = 'IFORT' - - if not v.LINK_FC: - conf.find_program(linker_name, var='LINK_FC', path_list=path, mandatory=True) - - if not v.AR: - conf.find_program(lib_name, path_list=path, var='AR', mandatory=True) - v.ARFLAGS = ['/nologo'] - - # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later - if v.IFORT_MANIFEST: - conf.find_program('MT', path_list=path, var='MT') - v.MTFLAGS = ['/nologo'] - - try: - conf.load('winres') - except Errors.WafError: - Logs.warn('Resource compiler not found. Compiling resource file is disabled') - -####################################################################################################### -##### conf above, build below - -@after_method('apply_link') -@feature('fc') -def apply_flags_ifort(self): - """ - Adds additional flags implied by msvc, such as subsystems and pdb files:: - - def build(bld): - bld.stlib(source='main.c', target='bar', subsystem='gruik') - """ - if not self.env.IFORT_WIN32 or not getattr(self, 'link_task', None): - return - - is_static = isinstance(self.link_task, ccroot.stlink_task) - - subsystem = getattr(self, 'subsystem', '') - if subsystem: - subsystem = '/subsystem:%s' % subsystem - flags = is_static and 'ARFLAGS' or 'LINKFLAGS' - self.env.append_value(flags, subsystem) - - if not is_static: - for f in self.env.LINKFLAGS: - d = f.lower() - if d[1:] == 'debug': - pdbnode = self.link_task.outputs[0].change_ext('.pdb') - self.link_task.outputs.append(pdbnode) - - if getattr(self, 'install_task', None): - self.pdb_install_task = self.add_install_files(install_to=self.install_task.install_to, install_from=pdbnode) - - break - -@feature('fcprogram', 'fcshlib', 'fcprogram_test') -@after_method('apply_link') -def apply_manifest_ifort(self): - """ - Enables manifest embedding in Fortran DLLs when using ifort on Windows - See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx - """ - if self.env.IFORT_WIN32 and getattr(self, 'link_task', None): - # it seems ifort.exe cannot be called for linking - self.link_task.env.FC = self.env.LINK_FC - - if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self, 'link_task', None): - out_node = self.link_task.outputs[0] - man_node = out_node.parent.find_or_declare(out_node.name + '.manifest') - self.link_task.outputs.append(man_node) - self.env.DO_MANIFEST = True - diff --git a/waflib/Tools/intltool.py b/waflib/Tools/intltool.py deleted file mode 100644 index af95ba8..0000000 --- a/waflib/Tools/intltool.py +++ /dev/null @@ -1,231 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2018 (ita) - -""" -Support for translation tools such as msgfmt and intltool - -Usage:: - - def configure(conf): - conf.load('gnu_dirs intltool') - - def build(bld): - # process the .po files into .gmo files, and install them in LOCALEDIR - bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}") - - # process an input file, substituting the translations from the po dir - bld( - features = "intltool_in", - podir = "../po", - style = "desktop", - flags = ["-u"], - source = 'kupfer.desktop.in', - install_path = "${DATADIR}/applications", - ) - -Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory. -""" - -from __future__ import with_statement - -import os, re -from waflib import Context, Task, Utils, Logs -import waflib.Tools.ccroot -from waflib.TaskGen import feature, before_method, taskgen_method -from waflib.Logs import error -from waflib.Configure import conf - -_style_flags = { - 'ba': '-b', - 'desktop': '-d', - 'keys': '-k', - 'quoted': '--quoted-style', - 'quotedxml': '--quotedxml-style', - 'rfc822deb': '-r', - 'schemas': '-s', - 'xml': '-x', -} - -@taskgen_method -def ensure_localedir(self): - """ - Expands LOCALEDIR from DATAROOTDIR/locale if possible, or falls back to PREFIX/share/locale - """ - # use the tool gnu_dirs to provide options to define this - if not self.env.LOCALEDIR: - if self.env.DATAROOTDIR: - self.env.LOCALEDIR = os.path.join(self.env.DATAROOTDIR, 'locale') - else: - self.env.LOCALEDIR = os.path.join(self.env.PREFIX, 'share', 'locale') - -@before_method('process_source') -@feature('intltool_in') -def apply_intltool_in_f(self): - """ - Creates tasks to translate files by intltool-merge:: - - def build(bld): - bld( - features = "intltool_in", - podir = "../po", - style = "desktop", - flags = ["-u"], - source = 'kupfer.desktop.in', - install_path = "${DATADIR}/applications", - ) - - :param podir: location of the .po files - :type podir: string - :param source: source files to process - :type source: list of string - :param style: the intltool-merge mode of operation, can be one of the following values: - ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``. - See the ``intltool-merge`` man page for more information about supported modes of operation. - :type style: string - :param flags: compilation flags ("-quc" by default) - :type flags: list of string - :param install_path: installation path - :type install_path: string - """ - try: - self.meths.remove('process_source') - except ValueError: - pass - - self.ensure_localedir() - - podir = getattr(self, 'podir', '.') - podirnode = self.path.find_dir(podir) - if not podirnode: - error("could not find the podir %r" % podir) - return - - cache = getattr(self, 'intlcache', '.intlcache') - self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)] - self.env.INTLPODIR = podirnode.bldpath() - self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT)) - - if '-c' in self.env.INTLFLAGS: - self.bld.fatal('Redundant -c flag in intltool task %r' % self) - - style = getattr(self, 'style', None) - if style: - try: - style_flag = _style_flags[style] - except KeyError: - self.bld.fatal('intltool_in style "%s" is not valid' % style) - - self.env.append_unique('INTLFLAGS', [style_flag]) - - for i in self.to_list(self.source): - node = self.path.find_resource(i) - - task = self.create_task('intltool', node, node.change_ext('')) - inst = getattr(self, 'install_path', None) - if inst: - self.add_install_files(install_to=inst, install_from=task.outputs) - -@feature('intltool_po') -def apply_intltool_po(self): - """ - Creates tasks to process po files:: - - def build(bld): - bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}") - - The relevant task generator arguments are: - - :param podir: directory of the .po files - :type podir: string - :param appname: name of the application - :type appname: string - :param install_path: installation directory - :type install_path: string - - The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process. - """ - try: - self.meths.remove('process_source') - except ValueError: - pass - - self.ensure_localedir() - - appname = getattr(self, 'appname', getattr(Context.g_module, Context.APPNAME, 'set_your_app_name')) - podir = getattr(self, 'podir', '.') - inst = getattr(self, 'install_path', '${LOCALEDIR}') - - linguas = self.path.find_node(os.path.join(podir, 'LINGUAS')) - if linguas: - # scan LINGUAS file for locales to process - with open(linguas.abspath()) as f: - langs = [] - for line in f.readlines(): - # ignore lines containing comments - if not line.startswith('#'): - langs += line.split() - re_linguas = re.compile('[-a-zA-Z_@.]+') - for lang in langs: - # Make sure that we only process lines which contain locales - if re_linguas.match(lang): - node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po')) - task = self.create_task('po', node, node.change_ext('.mo')) - - if inst: - filename = task.outputs[0].name - (langname, ext) = os.path.splitext(filename) - inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo' - self.add_install_as(install_to=inst_file, install_from=task.outputs[0], - chmod=getattr(self, 'chmod', Utils.O644)) - - else: - Logs.pprint('RED', "Error no LINGUAS file found in po directory") - -class po(Task.Task): - """ - Compiles .po files into .gmo files - """ - run_str = '${MSGFMT} -o ${TGT} ${SRC}' - color = 'BLUE' - -class intltool(Task.Task): - """ - Calls intltool-merge to update translation files - """ - run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}' - color = 'BLUE' - -@conf -def find_msgfmt(conf): - """ - Detects msgfmt and sets the ``MSGFMT`` variable - """ - conf.find_program('msgfmt', var='MSGFMT') - -@conf -def find_intltool_merge(conf): - """ - Detects intltool-merge - """ - if not conf.env.PERL: - conf.find_program('perl', var='PERL') - conf.env.INTLCACHE_ST = '--cache=%s' - conf.env.INTLFLAGS_DEFAULT = ['-q', '-u'] - conf.find_program('intltool-merge', interpreter='PERL', var='INTLTOOL') - -def configure(conf): - """ - Detects the program *msgfmt* and set *conf.env.MSGFMT*. - Detects the program *intltool-merge* and set *conf.env.INTLTOOL*. - It is possible to set INTLTOOL in the environment, but it must not have spaces in it:: - - $ INTLTOOL="/path/to/the program/intltool" waf configure - - If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*. - """ - conf.find_msgfmt() - conf.find_intltool_merge() - if conf.env.CC or conf.env.CXX: - conf.check(header_name='locale.h') - diff --git a/waflib/Tools/irixcc.py b/waflib/Tools/irixcc.py deleted file mode 100644 index c3ae1ac..0000000 --- a/waflib/Tools/irixcc.py +++ /dev/null @@ -1,66 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# imported from samba - -""" -Compiler definition for irix/MIPSpro cc compiler -""" - -from waflib import Errors -from waflib.Tools import ccroot, ar -from waflib.Configure import conf - -@conf -def find_irixcc(conf): - v = conf.env - cc = None - if v.CC: - cc = v.CC - elif 'CC' in conf.environ: - cc = conf.environ['CC'] - if not cc: - cc = conf.find_program('cc', var='CC') - if not cc: - conf.fatal('irixcc was not found') - - try: - conf.cmd_and_log(cc + ['-version']) - except Errors.WafError: - conf.fatal('%r -version could not be executed' % cc) - - v.CC = cc - v.CC_NAME = 'irix' - -@conf -def irixcc_common_flags(conf): - v = conf.env - - v.CC_SRC_F = '' - v.CC_TGT_F = ['-c', '-o'] - v.CPPPATH_ST = '-I%s' - v.DEFINES_ST = '-D%s' - - if not v.LINK_CC: - v.LINK_CC = v.CC - - v.CCLNK_SRC_F = '' - v.CCLNK_TGT_F = ['-o'] - - v.LIB_ST = '-l%s' # template for adding libs - v.LIBPATH_ST = '-L%s' # template for adding libpaths - v.STLIB_ST = '-l%s' - v.STLIBPATH_ST = '-L%s' - - v.cprogram_PATTERN = '%s' - v.cshlib_PATTERN = 'lib%s.so' - v.cstlib_PATTERN = 'lib%s.a' - -def configure(conf): - conf.find_irixcc() - conf.find_cpp() - conf.find_ar() - conf.irixcc_common_flags() - conf.cc_load_tools() - conf.cc_add_flags() - conf.link_add_flags() - diff --git a/waflib/Tools/javaw.py b/waflib/Tools/javaw.py deleted file mode 100644 index 9daed39..0000000 --- a/waflib/Tools/javaw.py +++ /dev/null @@ -1,579 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Thomas Nagy, 2006-2018 (ita) - -""" -Java support - -Javac is one of the few compilers that behaves very badly: - -#. it outputs files where it wants to (-d is only for the package root) - -#. it recompiles files silently behind your back - -#. it outputs an undefined amount of files (inner classes) - -Remember that the compilation can be performed using Jython[1] rather than regular Python. Instead of -running one of the following commands:: - - ./waf configure - python waf configure - -You would have to run:: - - java -jar /path/to/jython.jar waf configure - -[1] http://www.jython.org/ - -Usage -===== - -Load the "java" tool. - -def configure(conf): - conf.load('java') - -Java tools will be autodetected and eventually, if present, the quite -standard JAVA_HOME environment variable will be used. The also standard -CLASSPATH variable is used for library searching. - -In configuration phase checks can be done on the system environment, for -example to check if a class is known in the classpath:: - - conf.check_java_class('java.io.FileOutputStream') - -or if the system supports JNI applications building:: - - conf.check_jni_headers() - - -The java tool supports compiling java code, creating jar files and -creating javadoc documentation. This can be either done separately or -together in a single definition. For example to manage them separately:: - - bld(features = 'javac', - srcdir = 'src', - compat = '1.7', - use = 'animals', - name = 'cats-src', - ) - - bld(features = 'jar', - basedir = '.', - destfile = '../cats.jar', - name = 'cats', - use = 'cats-src' - ) - - -Or together by defining all the needed attributes:: - - bld(features = 'javac jar javadoc', - srcdir = 'src/', # folder containing the sources to compile - outdir = 'src', # folder where to output the classes (in the build directory) - compat = '1.6', # java compatibility version number - classpath = ['.', '..'], - - # jar - basedir = 'src', # folder containing the classes and other files to package (must match outdir) - destfile = 'foo.jar', # do not put the destfile in the folder of the java classes! - use = 'NNN', - jaropts = ['-C', 'default/src/', '.'], # can be used to give files - manifest = 'src/Manifest.mf', # Manifest file to include - - # javadoc - javadoc_package = ['com.meow' , 'com.meow.truc.bar', 'com.meow.truc.foo'], - javadoc_output = 'javadoc', - ) - -External jar dependencies can be mapped to a standard waf "use" dependency by -setting an environment variable with a CLASSPATH prefix in the configuration, -for example:: - - conf.env.CLASSPATH_NNN = ['aaaa.jar', 'bbbb.jar'] - -and then NNN can be freely used in rules as:: - - use = 'NNN', - -In the java tool the dependencies via use are not transitive by default, as -this necessity depends on the code. To enable recursive dependency scanning -use on a specific rule: - - recurse_use = True - -Or build-wise by setting RECURSE_JAVA: - - bld.env.RECURSE_JAVA = True - -Unit tests can be integrated in the waf unit test environment using the javatest extra. -""" - -import os, shutil -from waflib import Task, Utils, Errors, Node -from waflib.Configure import conf -from waflib.TaskGen import feature, before_method, after_method, taskgen_method - -from waflib.Tools import ccroot -ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS']) - -SOURCE_RE = '**/*.java' -JAR_RE = '**/*' - -class_check_source = ''' -public class Test { - public static void main(String[] argv) { - Class lib; - if (argv.length < 1) { - System.err.println("Missing argument"); - System.exit(77); - } - try { - lib = Class.forName(argv[0]); - } catch (ClassNotFoundException e) { - System.err.println("ClassNotFoundException"); - System.exit(1); - } - lib = null; - System.exit(0); - } -} -''' - -@feature('javac') -@before_method('process_source') -def apply_java(self): - """ - Create a javac task for compiling *.java files*. There can be - only one javac task by task generator. - """ - Utils.def_attrs(self, jarname='', classpath='', - sourcepath='.', srcdir='.', - jar_mf_attributes={}, jar_mf_classpath=[]) - - outdir = getattr(self, 'outdir', None) - if outdir: - if not isinstance(outdir, Node.Node): - outdir = self.path.get_bld().make_node(self.outdir) - else: - outdir = self.path.get_bld() - outdir.mkdir() - self.outdir = outdir - self.env.OUTDIR = outdir.abspath() - - self.javac_task = tsk = self.create_task('javac') - tmp = [] - - srcdir = getattr(self, 'srcdir', '') - if isinstance(srcdir, Node.Node): - srcdir = [srcdir] - for x in Utils.to_list(srcdir): - if isinstance(x, Node.Node): - y = x - else: - y = self.path.find_dir(x) - if not y: - self.bld.fatal('Could not find the folder %s from %s' % (x, self.path)) - tmp.append(y) - - tsk.srcdir = tmp - - if getattr(self, 'compat', None): - tsk.env.append_value('JAVACFLAGS', ['-source', str(self.compat)]) - - if hasattr(self, 'sourcepath'): - fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)] - names = os.pathsep.join([x.srcpath() for x in fold]) - else: - names = [x.srcpath() for x in tsk.srcdir] - - if names: - tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names]) - - -@taskgen_method -def java_use_rec(self, name, **kw): - """ - Processes recursively the *use* attribute for each referred java compilation - """ - if name in self.tmp_use_seen: - return - - self.tmp_use_seen.append(name) - - try: - y = self.bld.get_tgen_by_name(name) - except Errors.WafError: - self.uselib.append(name) - return - else: - y.post() - # Add generated JAR name for CLASSPATH. Task ordering (set_run_after) - # is already guaranteed by ordering done between the single tasks - if hasattr(y, 'jar_task'): - self.use_lst.append(y.jar_task.outputs[0].abspath()) - - for x in self.to_list(getattr(y, 'use', [])): - self.java_use_rec(x) - -@feature('javac') -@before_method('propagate_uselib_vars') -@after_method('apply_java') -def use_javac_files(self): - """ - Processes the *use* attribute referring to other java compilations - """ - self.use_lst = [] - self.tmp_use_seen = [] - self.uselib = self.to_list(getattr(self, 'uselib', [])) - names = self.to_list(getattr(self, 'use', [])) - get = self.bld.get_tgen_by_name - for x in names: - try: - y = get(x) - except Errors.WafError: - self.uselib.append(x) - else: - y.post() - if hasattr(y, 'jar_task'): - self.use_lst.append(y.jar_task.outputs[0].abspath()) - self.javac_task.set_run_after(y.jar_task) - else: - for tsk in y.tasks: - self.javac_task.set_run_after(tsk) - - # If recurse use scan is enabled recursively add use attribute for each used one - if getattr(self, 'recurse_use', False) or self.bld.env.RECURSE_JAVA: - self.java_use_rec(x) - - self.env.append_value('CLASSPATH', self.use_lst) - -@feature('javac') -@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files') -def set_classpath(self): - """ - Sets the CLASSPATH value on the *javac* task previously created. - """ - if getattr(self, 'classpath', None): - self.env.append_unique('CLASSPATH', getattr(self, 'classpath', [])) - for x in self.tasks: - x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep - -@feature('jar') -@after_method('apply_java', 'use_javac_files') -@before_method('process_source') -def jar_files(self): - """ - Creates a jar task (one maximum per task generator) - """ - destfile = getattr(self, 'destfile', 'test.jar') - jaropts = getattr(self, 'jaropts', []) - manifest = getattr(self, 'manifest', None) - - basedir = getattr(self, 'basedir', None) - if basedir: - if not isinstance(self.basedir, Node.Node): - basedir = self.path.get_bld().make_node(basedir) - else: - basedir = self.path.get_bld() - if not basedir: - self.bld.fatal('Could not find the basedir %r for %r' % (self.basedir, self)) - - self.jar_task = tsk = self.create_task('jar_create') - if manifest: - jarcreate = getattr(self, 'jarcreate', 'cfm') - if not isinstance(manifest,Node.Node): - node = self.path.find_resource(manifest) - else: - node = manifest - if not node: - self.bld.fatal('invalid manifest file %r for %r' % (manifest, self)) - tsk.dep_nodes.append(node) - jaropts.insert(0, node.abspath()) - else: - jarcreate = getattr(self, 'jarcreate', 'cf') - if not isinstance(destfile, Node.Node): - destfile = self.path.find_or_declare(destfile) - if not destfile: - self.bld.fatal('invalid destfile %r for %r' % (destfile, self)) - tsk.set_outputs(destfile) - tsk.basedir = basedir - - jaropts.append('-C') - jaropts.append(basedir.bldpath()) - jaropts.append('.') - - tsk.env.JAROPTS = jaropts - tsk.env.JARCREATE = jarcreate - - if getattr(self, 'javac_task', None): - tsk.set_run_after(self.javac_task) - -@feature('jar') -@after_method('jar_files') -def use_jar_files(self): - """ - Processes the *use* attribute to set the build order on the - tasks created by another task generator. - """ - self.uselib = self.to_list(getattr(self, 'uselib', [])) - names = self.to_list(getattr(self, 'use', [])) - get = self.bld.get_tgen_by_name - for x in names: - try: - y = get(x) - except Errors.WafError: - self.uselib.append(x) - else: - y.post() - self.jar_task.run_after.update(y.tasks) - -class JTask(Task.Task): - """ - Base class for java and jar tasks; provides functionality to run long commands - """ - def split_argfile(self, cmd): - inline = [cmd[0]] - infile = [] - for x in cmd[1:]: - # jar and javac do not want -J flags in @file - if x.startswith('-J'): - inline.append(x) - else: - infile.append(self.quote_flag(x)) - return (inline, infile) - -class jar_create(JTask): - """ - Creates a jar file - """ - color = 'GREEN' - run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}' - - def runnable_status(self): - """ - Wait for dependent tasks to be executed, then read the - files to update the list of inputs. - """ - for t in self.run_after: - if not t.hasrun: - return Task.ASK_LATER - if not self.inputs: - try: - self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False, quiet=True) if id(x) != id(self.outputs[0])] - except Exception: - raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self)) - return super(jar_create, self).runnable_status() - -class javac(JTask): - """ - Compiles java files - """ - color = 'BLUE' - run_str = '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}' - vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR'] - """ - The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change. - """ - def uid(self): - """Identify java tasks by input&output folder""" - lst = [self.__class__.__name__, self.generator.outdir.abspath()] - for x in self.srcdir: - lst.append(x.abspath()) - return Utils.h_list(lst) - - def runnable_status(self): - """ - Waits for dependent tasks to be complete, then read the file system to find the input nodes. - """ - for t in self.run_after: - if not t.hasrun: - return Task.ASK_LATER - - if not self.inputs: - self.inputs = [] - for x in self.srcdir: - if x.exists(): - self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False, quiet=True)) - return super(javac, self).runnable_status() - - def post_run(self): - """ - List class files created - """ - for node in self.generator.outdir.ant_glob('**/*.class', quiet=True): - self.generator.bld.node_sigs[node] = self.uid() - self.generator.bld.task_sigs[self.uid()] = self.cache_sig - -@feature('javadoc') -@after_method('process_rule') -def create_javadoc(self): - """ - Creates a javadoc task (feature 'javadoc') - """ - tsk = self.create_task('javadoc') - tsk.classpath = getattr(self, 'classpath', []) - self.javadoc_package = Utils.to_list(self.javadoc_package) - if not isinstance(self.javadoc_output, Node.Node): - self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output) - -class javadoc(Task.Task): - """ - Builds java documentation - """ - color = 'BLUE' - - def __str__(self): - return '%s: %s -> %s\n' % (self.__class__.__name__, self.generator.srcdir, self.generator.javadoc_output) - - def run(self): - env = self.env - bld = self.generator.bld - wd = bld.bldnode - - #add src node + bld node (for generated java code) - srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir - srcpath += os.pathsep - srcpath += self.generator.path.get_bld().abspath() + os.sep + self.generator.srcdir - - classpath = env.CLASSPATH - classpath += os.pathsep - classpath += os.pathsep.join(self.classpath) - classpath = "".join(classpath) - - self.last_cmd = lst = [] - lst.extend(Utils.to_list(env.JAVADOC)) - lst.extend(['-d', self.generator.javadoc_output.abspath()]) - lst.extend(['-sourcepath', srcpath]) - lst.extend(['-classpath', classpath]) - lst.extend(['-subpackages']) - lst.extend(self.generator.javadoc_package) - lst = [x for x in lst if x] - - self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0) - - def post_run(self): - nodes = self.generator.javadoc_output.ant_glob('**', quiet=True) - for node in nodes: - self.generator.bld.node_sigs[node] = self.uid() - self.generator.bld.task_sigs[self.uid()] = self.cache_sig - -def configure(self): - """ - Detects the javac, java and jar programs - """ - # If JAVA_PATH is set, we prepend it to the path list - java_path = self.environ['PATH'].split(os.pathsep) - v = self.env - - if 'JAVA_HOME' in self.environ: - java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path - self.env.JAVA_HOME = [self.environ['JAVA_HOME']] - - for x in 'javac java jar javadoc'.split(): - self.find_program(x, var=x.upper(), path_list=java_path) - - if 'CLASSPATH' in self.environ: - v.CLASSPATH = self.environ['CLASSPATH'] - - if not v.JAR: - self.fatal('jar is required for making java packages') - if not v.JAVAC: - self.fatal('javac is required for compiling java classes') - - v.JARCREATE = 'cf' # can use cvf - v.JAVACFLAGS = [] - -@conf -def check_java_class(self, classname, with_classpath=None): - """ - Checks if the specified java class exists - - :param classname: class to check, like java.util.HashMap - :type classname: string - :param with_classpath: additional classpath to give - :type with_classpath: string - """ - javatestdir = '.waf-javatest' - - classpath = javatestdir - if self.env.CLASSPATH: - classpath += os.pathsep + self.env.CLASSPATH - if isinstance(with_classpath, str): - classpath += os.pathsep + with_classpath - - shutil.rmtree(javatestdir, True) - os.mkdir(javatestdir) - - Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source) - - # Compile the source - self.exec_command(self.env.JAVAC + [os.path.join(javatestdir, 'Test.java')], shell=False) - - # Try to run the app - cmd = self.env.JAVA + ['-cp', classpath, 'Test', classname] - self.to_log("%s\n" % str(cmd)) - found = self.exec_command(cmd, shell=False) - - self.msg('Checking for java class %s' % classname, not found) - - shutil.rmtree(javatestdir, True) - - return found - -@conf -def check_jni_headers(conf): - """ - Checks for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets:: - - def options(opt): - opt.load('compiler_c') - - def configure(conf): - conf.load('compiler_c java') - conf.check_jni_headers() - - def build(bld): - bld.shlib(source='a.c', target='app', use='JAVA') - """ - if not conf.env.CC_NAME and not conf.env.CXX_NAME: - conf.fatal('load a compiler first (gcc, g++, ..)') - - if not conf.env.JAVA_HOME: - conf.fatal('set JAVA_HOME in the system environment') - - # jni requires the jvm - javaHome = conf.env.JAVA_HOME[0] - - dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include') - if dir is None: - dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/../Headers') # think different?! - if dir is None: - conf.fatal('JAVA_HOME does not seem to be set properly') - - f = dir.ant_glob('**/(jni|jni_md).h') - incDirs = [x.parent.abspath() for x in f] - - dir = conf.root.find_dir(conf.env.JAVA_HOME[0]) - f = dir.ant_glob('**/*jvm.(so|dll|dylib)') - libDirs = [x.parent.abspath() for x in f] or [javaHome] - - # On windows, we need both the .dll and .lib to link. On my JDK, they are - # in different directories... - f = dir.ant_glob('**/*jvm.(lib)') - if f: - libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f] - - if conf.env.DEST_OS == 'freebsd': - conf.env.append_unique('LINKFLAGS_JAVA', '-pthread') - for d in libDirs: - try: - conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm', - libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA') - except Exception: - pass - else: - break - else: - conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs) - diff --git a/waflib/Tools/ldc2.py b/waflib/Tools/ldc2.py deleted file mode 100644 index a51c344..0000000 --- a/waflib/Tools/ldc2.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Alex Rønne Petersen, 2012 (alexrp/Zor) - -from waflib.Tools import ar, d -from waflib.Configure import conf - -@conf -def find_ldc2(conf): - """ - Finds the program *ldc2* and set the variable *D* - """ - conf.find_program(['ldc2'], var='D') - - out = conf.cmd_and_log(conf.env.D + ['-version']) - if out.find("based on DMD v2.") == -1: - conf.fatal("detected compiler is not ldc2") - -@conf -def common_flags_ldc2(conf): - """ - Sets the D flags required by *ldc2* - """ - v = conf.env - - v.D_SRC_F = ['-c'] - v.D_TGT_F = '-of%s' - - v.D_LINKER = v.D - v.DLNK_SRC_F = '' - v.DLNK_TGT_F = '-of%s' - v.DINC_ST = '-I%s' - - v.DSHLIB_MARKER = v.DSTLIB_MARKER = '' - v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s' - v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s' - - v.LINKFLAGS_dshlib = ['-L-shared'] - - v.DHEADER_ext = '.di' - v.DFLAGS_d_with_header = ['-H', '-Hf'] - v.D_HDR_F = '%s' - - v.LINKFLAGS = [] - v.DFLAGS_dshlib = ['-relocation-model=pic'] - -def configure(conf): - """ - Configuration for *ldc2* - """ - conf.find_ldc2() - conf.load('ar') - conf.load('d') - conf.common_flags_ldc2() - conf.d_platform_flags() - diff --git a/waflib/Tools/lua.py b/waflib/Tools/lua.py deleted file mode 100644 index 15a333a..0000000 --- a/waflib/Tools/lua.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Sebastian Schlingmann, 2008 -# Thomas Nagy, 2008-2018 (ita) - -""" -Lua support. - -Compile *.lua* files into *.luac*:: - - def configure(conf): - conf.load('lua') - conf.env.LUADIR = '/usr/local/share/myapp/scripts/' - def build(bld): - bld(source='foo.lua') -""" - -from waflib.TaskGen import extension -from waflib import Task - -@extension('.lua') -def add_lua(self, node): - tsk = self.create_task('luac', node, node.change_ext('.luac')) - inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None) - if inst_to: - self.add_install_files(install_to=inst_to, install_from=tsk.outputs) - return tsk - -class luac(Task.Task): - run_str = '${LUAC} -s -o ${TGT} ${SRC}' - color = 'PINK' - -def configure(conf): - """ - Detect the luac compiler and set *conf.env.LUAC* - """ - conf.find_program('luac', var='LUAC') - diff --git a/waflib/Tools/md5_tstamp.py b/waflib/Tools/md5_tstamp.py deleted file mode 100644 index 2a58792..0000000 --- a/waflib/Tools/md5_tstamp.py +++ /dev/null @@ -1,38 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 - -""" -Re-calculate md5 hashes of files only when the file time have changed. - -The hashes can also reflect either the file contents (STRONGEST=True) or the -file time and file size. - -The performance benefits of this module are usually insignificant. -""" - -import os, stat -from waflib import Utils, Build, Node - -STRONGEST = True - -Build.SAVED_ATTRS.append('hashes_md5_tstamp') -def h_file(self): - filename = self.abspath() - st = os.stat(filename) - - cache = self.ctx.hashes_md5_tstamp - if filename in cache and cache[filename][0] == st.st_mtime: - return cache[filename][1] - - if STRONGEST: - ret = Utils.h_file(filename) - else: - if stat.S_ISDIR(st[stat.ST_MODE]): - raise IOError('Not a file') - ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest() - - cache[filename] = (st.st_mtime, ret) - return ret -h_file.__doc__ = Node.Node.h_file.__doc__ -Node.Node.h_file = h_file - diff --git a/waflib/Tools/msvc.py b/waflib/Tools/msvc.py deleted file mode 100644 index ff58449..0000000 --- a/waflib/Tools/msvc.py +++ /dev/null @@ -1,1020 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Carlos Rafael Giani, 2006 (dv) -# Tamas Pal, 2007 (folti) -# Nicolas Mercier, 2009 -# Matt Clarkson, 2012 - -""" -Microsoft Visual C++/Intel C++ compiler support - -If you get detection problems, first try any of the following:: - - chcp 65001 - set PYTHONIOENCODING=... - set PYTHONLEGACYWINDOWSSTDIO=1 - -Usage:: - - $ waf configure --msvc_version="msvc 10.0,msvc 9.0" --msvc_target="x64" - -or:: - - def configure(conf): - conf.env.MSVC_VERSIONS = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0'] - conf.env.MSVC_TARGETS = ['x64'] - conf.load('msvc') - -or:: - - def configure(conf): - conf.load('msvc', funs='no_autodetect') - conf.check_lib_msvc('gdi32') - conf.check_libs_msvc('kernel32 user32') - def build(bld): - tg = bld.program(source='main.c', target='app', use='KERNEL32 USER32 GDI32') - -Platforms and targets will be tested in the order they appear; -the first good configuration will be used. - -To force testing all the configurations that are not used, use the ``--no-msvc-lazy`` option -or set ``conf.env.MSVC_LAZY_AUTODETECT=False``. - -Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm - -Compilers supported: - -* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 15 (Visual Studio 2017) -* wsdk => Windows SDK, versions 6.0, 6.1, 7.0, 7.1, 8.0 -* icl => Intel compiler, versions 9, 10, 11, 13 -* winphone => Visual Studio to target Windows Phone 8 native (version 8.0 for now) -* Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i) -* PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i) - -To use WAF in a VS2008 Make file project (see http://code.google.com/p/waf/issues/detail?id=894) -You may consider to set the environment variable "VS_UNICODE_OUTPUT" to nothing before calling waf. -So in your project settings use something like 'cmd.exe /C "set VS_UNICODE_OUTPUT=& set PYTHONUNBUFFERED=true & waf build"'. -cmd.exe /C "chcp 1252 & set PYTHONUNBUFFERED=true && set && waf configure" -Setting PYTHONUNBUFFERED gives the unbuffered output. -""" - -import os, sys, re, traceback -from waflib import Utils, Logs, Options, Errors -from waflib.TaskGen import after_method, feature - -from waflib.Configure import conf -from waflib.Tools import ccroot, c, cxx, ar - -g_msvc_systemlibs = ''' -aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet -cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs -credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d -ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp -faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid -gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop -kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi -mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree -msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm -netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp -odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32 -osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu -ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm -rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32 -shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32 -traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg -version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm -wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp -'''.split() -"""importlibs provided by MSVC/Platform SDK. Do NOT search them""" - -all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), - ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64'), ('x86_arm', 'arm'), ('x86_arm64', 'arm64'), - ('amd64_x86', 'x86'), ('amd64_arm', 'arm'), ('amd64_arm64', 'arm64') ] -"""List of msvc platforms""" - -all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ] -"""List of wince platforms""" - -all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')] -"""List of icl platforms""" - -def options(opt): - opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default='') - opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='') - opt.add_option('--no-msvc-lazy', action='store_false', help = 'lazily check msvc target environments', default=True, dest='msvc_lazy') - -@conf -def setup_msvc(conf, versiondict): - """ - Checks installed compilers and targets and returns the first combination from the user's - options, env, or the global supported lists that checks. - - :param versiondict: dict(platform -> dict(architecture -> configuration)) - :type versiondict: dict(string -> dict(string -> target_compiler) - :return: the compiler, revision, path, include dirs, library paths and target architecture - :rtype: tuple of strings - """ - platforms = getattr(Options.options, 'msvc_targets', '').split(',') - if platforms == ['']: - platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] - desired_versions = getattr(Options.options, 'msvc_version', '').split(',') - if desired_versions == ['']: - desired_versions = conf.env.MSVC_VERSIONS or list(reversed(sorted(versiondict.keys()))) - - # Override lazy detection by evaluating after the fact. - lazy_detect = getattr(Options.options, 'msvc_lazy', True) - if conf.env.MSVC_LAZY_AUTODETECT is False: - lazy_detect = False - - if not lazy_detect: - for val in versiondict.values(): - for arch in list(val.keys()): - cfg = val[arch] - cfg.evaluate() - if not cfg.is_valid: - del val[arch] - conf.env.MSVC_INSTALLED_VERSIONS = versiondict - - for version in desired_versions: - Logs.debug('msvc: detecting %r - %r', version, desired_versions) - try: - targets = versiondict[version] - except KeyError: - continue - - seen = set() - for arch in platforms: - if arch in seen: - continue - else: - seen.add(arch) - try: - cfg = targets[arch] - except KeyError: - continue - - cfg.evaluate() - if cfg.is_valid: - compiler,revision = version.rsplit(' ', 1) - return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu - conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys()))) - -@conf -def get_msvc_version(conf, compiler, version, target, vcvars): - """ - Checks that an installed compiler actually runs and uses vcvars to obtain the - environment needed by the compiler. - - :param compiler: compiler type, for looking up the executable name - :param version: compiler version, for debugging only - :param target: target architecture - :param vcvars: batch file to run to check the environment - :return: the location of the compiler executable, the location of include dirs, and the library paths - :rtype: tuple of strings - """ - Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target) - - try: - conf.msvc_cnt += 1 - except AttributeError: - conf.msvc_cnt = 1 - batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt) - batfile.write("""@echo off -set INCLUDE= -set LIB= -call "%s" %s -echo PATH=%%PATH%% -echo INCLUDE=%%INCLUDE%% -echo LIB=%%LIB%%;%%LIBPATH%% -""" % (vcvars,target)) - sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()]) - lines = sout.splitlines() - - if not lines[0]: - lines.pop(0) - - MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None - for line in lines: - if line.startswith('PATH='): - path = line[5:] - MSVC_PATH = path.split(';') - elif line.startswith('INCLUDE='): - MSVC_INCDIR = [i for i in line[8:].split(';') if i] - elif line.startswith('LIB='): - MSVC_LIBDIR = [i for i in line[4:].split(';') if i] - if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR): - conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)') - - # Check if the compiler is usable at all. - # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run. - env = dict(os.environ) - env.update(PATH = path) - compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler) - cxx = conf.find_program(compiler_name, path_list=MSVC_PATH) - - # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically. - if 'CL' in env: - del(env['CL']) - - try: - conf.cmd_and_log(cxx + ['/help'], env=env) - except UnicodeError: - st = traceback.format_exc() - if conf.logger: - conf.logger.error(st) - conf.fatal('msvc: Unicode error - check the code page?') - except Exception as e: - Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e)) - conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)') - else: - Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target) - finally: - conf.env[compiler_name] = '' - - return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR) - -def gather_wince_supported_platforms(): - """ - Checks SmartPhones SDKs - - :param versions: list to modify - :type versions: list - """ - supported_wince_platforms = [] - try: - ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs') - except OSError: - try: - ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs') - except OSError: - ce_sdk = '' - if not ce_sdk: - return supported_wince_platforms - - index = 0 - while 1: - try: - sdk_device = Utils.winreg.EnumKey(ce_sdk, index) - sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device) - except OSError: - break - index += 1 - try: - path,type = Utils.winreg.QueryValueEx(sdk, 'SDKRootDir') - except OSError: - try: - path,type = Utils.winreg.QueryValueEx(sdk,'SDKInformation') - except OSError: - continue - path,xml = os.path.split(path) - path = str(path) - path,device = os.path.split(path) - if not device: - path,device = os.path.split(path) - platforms = [] - for arch,compiler in all_wince_platforms: - if os.path.isdir(os.path.join(path, device, 'Lib', arch)): - platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch))) - if platforms: - supported_wince_platforms.append((device, platforms)) - return supported_wince_platforms - -def gather_msvc_detected_versions(): - #Detected MSVC versions! - version_pattern = re.compile(r'^(\d\d?\.\d\d?)(Exp)?$') - detected_versions = [] - for vcver,vcvar in (('VCExpress','Exp'), ('VisualStudio','')): - prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\' + vcver - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix) - except OSError: - prefix = 'SOFTWARE\\Microsoft\\' + vcver - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix) - except OSError: - continue - - index = 0 - while 1: - try: - version = Utils.winreg.EnumKey(all_versions, index) - except OSError: - break - index += 1 - match = version_pattern.match(version) - if match: - versionnumber = float(match.group(1)) - else: - continue - detected_versions.append((versionnumber, version+vcvar, prefix+'\\'+version)) - def fun(tup): - return tup[0] - - detected_versions.sort(key = fun) - return detected_versions - -class target_compiler(object): - """ - Wrap a compiler configuration; call evaluate() to determine - whether the configuration is usable. - """ - def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None): - """ - :param ctx: configuration context to use to eventually get the version environment - :param compiler: compiler name - :param cpu: target cpu - :param version: compiler version number - :param bat_target: ? - :param bat: path to the batch file to run - """ - self.conf = ctx - self.name = None - self.is_valid = False - self.is_done = False - - self.compiler = compiler - self.cpu = cpu - self.version = version - self.bat_target = bat_target - self.bat = bat - self.callback = callback - - def evaluate(self): - if self.is_done: - return - self.is_done = True - try: - vs = self.conf.get_msvc_version(self.compiler, self.version, self.bat_target, self.bat) - except Errors.ConfigurationError: - self.is_valid = False - return - if self.callback: - vs = self.callback(self, vs) - self.is_valid = True - (self.bindirs, self.incdirs, self.libdirs) = vs - - def __str__(self): - return str((self.compiler, self.cpu, self.version, self.bat_target, self.bat)) - - def __repr__(self): - return repr((self.compiler, self.cpu, self.version, self.bat_target, self.bat)) - -@conf -def gather_wsdk_versions(conf, versions): - """ - Use winreg to add the msvc versions to the input list - - :param versions: list to modify - :type versions: list - """ - version_pattern = re.compile(r'^v..?.?\...?.?') - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') - except OSError: - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') - except OSError: - return - index = 0 - while 1: - try: - version = Utils.winreg.EnumKey(all_versions, index) - except OSError: - break - index += 1 - if not version_pattern.match(version): - continue - try: - msvc_version = Utils.winreg.OpenKey(all_versions, version) - path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder') - except OSError: - continue - if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')): - targets = {} - for target,arch in all_msvc_platforms: - targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')) - versions['wsdk ' + version[1:]] = targets - -@conf -def gather_msvc_targets(conf, versions, version, vc_path): - #Looking for normal MSVC compilers! - targets = {} - - if os.path.isfile(os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')): - for target,realtarget in all_msvc_platforms[::-1]: - targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')) - elif os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')): - for target,realtarget in all_msvc_platforms[::-1]: - targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'vcvarsall.bat')) - elif os.path.isfile(os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')): - targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')) - elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')): - targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat')) - if targets: - versions['msvc %s' % version] = targets - -@conf -def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_platforms): - #Looking for Win CE compilers! - for device,platforms in supported_platforms: - targets = {} - for platform,compiler,include,lib in platforms: - winCEpath = os.path.join(vc_path, 'ce') - if not os.path.isdir(winCEpath): - continue - - if os.path.isdir(os.path.join(winCEpath, 'lib', platform)): - bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] - incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include] - libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib] - def combine_common(obj, compiler_env): - # TODO this is likely broken, remove in waf 2.1 - (common_bindirs,_1,_2) = compiler_env - return (bindirs + common_bindirs, incdirs, libdirs) - targets[platform] = target_compiler(conf, 'msvc', platform, version, 'x86', vsvars, combine_common) - if targets: - versions[device + ' ' + version] = targets - -@conf -def gather_winphone_targets(conf, versions, version, vc_path, vsvars): - #Looking for WinPhone compilers - targets = {} - for target,realtarget in all_msvc_platforms[::-1]: - targets[target] = target_compiler(conf, 'winphone', realtarget, version, target, vsvars) - if targets: - versions['winphone ' + version] = targets - -@conf -def gather_vswhere_versions(conf, versions): - try: - import json - except ImportError: - Logs.error('Visual Studio 2017 detection requires Python 2.6') - return - - prg_path = os.environ.get('ProgramFiles(x86)', os.environ.get('ProgramFiles', 'C:\\Program Files (x86)')) - - vswhere = os.path.join(prg_path, 'Microsoft Visual Studio', 'Installer', 'vswhere.exe') - args = [vswhere, '-products', '*', '-legacy', '-format', 'json'] - try: - txt = conf.cmd_and_log(args) - except Errors.WafError as e: - Logs.debug('msvc: vswhere.exe failed %s', e) - return - - if sys.version_info[0] < 3: - txt = txt.decode(Utils.console_encoding()) - - arr = json.loads(txt) - arr.sort(key=lambda x: x['installationVersion']) - for entry in arr: - ver = entry['installationVersion'] - ver = str('.'.join(ver.split('.')[:2])) - path = str(os.path.abspath(entry['installationPath'])) - if os.path.exists(path) and ('msvc %s' % ver) not in versions: - conf.gather_msvc_targets(versions, ver, path) - -@conf -def gather_msvc_versions(conf, versions): - vc_paths = [] - for (v,version,reg) in gather_msvc_detected_versions(): - try: - try: - msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\VC") - except OSError: - msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\Microsoft Visual C++") - path,type = Utils.winreg.QueryValueEx(msvc_version, 'ProductDir') - except OSError: - try: - msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7") - path,type = Utils.winreg.QueryValueEx(msvc_version, version) - except OSError: - continue - else: - vc_paths.append((version, os.path.abspath(str(path)))) - continue - else: - vc_paths.append((version, os.path.abspath(str(path)))) - - wince_supported_platforms = gather_wince_supported_platforms() - - for version,vc_path in vc_paths: - vs_path = os.path.dirname(vc_path) - vsvars = os.path.join(vs_path, 'Common7', 'Tools', 'vsvars32.bat') - if wince_supported_platforms and os.path.isfile(vsvars): - conf.gather_wince_targets(versions, version, vc_path, vsvars, wince_supported_platforms) - - # WP80 works with 11.0Exp and 11.0, both of which resolve to the same vc_path. - # Stop after one is found. - for version,vc_path in vc_paths: - vs_path = os.path.dirname(vc_path) - vsvars = os.path.join(vs_path, 'VC', 'WPSDK', 'WP80', 'vcvarsphoneall.bat') - if os.path.isfile(vsvars): - conf.gather_winphone_targets(versions, '8.0', vc_path, vsvars) - break - - for version,vc_path in vc_paths: - vs_path = os.path.dirname(vc_path) - conf.gather_msvc_targets(versions, version, vc_path) - -@conf -def gather_icl_versions(conf, versions): - """ - Checks ICL compilers - - :param versions: list to modify - :type versions: list - """ - version_pattern = re.compile(r'^...?.?\....?.?') - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') - except OSError: - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++') - except OSError: - return - index = 0 - while 1: - try: - version = Utils.winreg.EnumKey(all_versions, index) - except OSError: - break - index += 1 - if not version_pattern.match(version): - continue - targets = {} - for target,arch in all_icl_platforms: - if target=='intel64': - targetDir='EM64T_NATIVE' - else: - targetDir=target - try: - Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir) - icl_version=Utils.winreg.OpenKey(all_versions,version) - path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') - except OSError: - pass - else: - batch_file=os.path.join(path,'bin','iclvars.bat') - if os.path.isfile(batch_file): - targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) - for target,arch in all_icl_platforms: - try: - icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target) - path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir') - except OSError: - continue - else: - batch_file=os.path.join(path,'bin','iclvars.bat') - if os.path.isfile(batch_file): - targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) - major = version[0:2] - versions['intel ' + major] = targets - -@conf -def gather_intel_composer_versions(conf, versions): - """ - Checks ICL compilers that are part of Intel Composer Suites - - :param versions: list to modify - :type versions: list - """ - version_pattern = re.compile(r'^...?.?\...?.?.?') - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites') - except OSError: - try: - all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites') - except OSError: - return - index = 0 - while 1: - try: - version = Utils.winreg.EnumKey(all_versions, index) - except OSError: - break - index += 1 - if not version_pattern.match(version): - continue - targets = {} - for target,arch in all_icl_platforms: - if target=='intel64': - targetDir='EM64T_NATIVE' - else: - targetDir=target - try: - try: - defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir) - except OSError: - if targetDir == 'EM64T_NATIVE': - defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T') - else: - raise - uid,type = Utils.winreg.QueryValueEx(defaults, 'SubKey') - Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir) - icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++') - path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') - except OSError: - pass - else: - batch_file=os.path.join(path,'bin','iclvars.bat') - if os.path.isfile(batch_file): - targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file) - # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012 - # http://software.intel.com/en-us/forums/topic/328487 - compilervars_warning_attr = '_compilervars_warning_key' - if version[0:2] == '13' and getattr(conf, compilervars_warning_attr, True): - setattr(conf, compilervars_warning_attr, False) - patch_url = 'http://software.intel.com/en-us/forums/topic/328487' - compilervars_arch = os.path.join(path, 'bin', 'compilervars_arch.bat') - for vscomntool in ('VS110COMNTOOLS', 'VS100COMNTOOLS'): - if vscomntool in os.environ: - vs_express_path = os.environ[vscomntool] + r'..\IDE\VSWinExpress.exe' - dev_env_path = os.environ[vscomntool] + r'..\IDE\devenv.exe' - if (r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"' in Utils.readf(compilervars_arch) and - not os.path.exists(vs_express_path) and not os.path.exists(dev_env_path)): - Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ' - '(VSWinExpress.exe) but it does not seem to be installed at %r. ' - 'The intel command line set up will fail to configure unless the file %r' - 'is patched. See: %s') % (vs_express_path, compilervars_arch, patch_url)) - major = version[0:2] - versions['intel ' + major] = targets - -@conf -def detect_msvc(self): - return self.setup_msvc(self.get_msvc_versions()) - -@conf -def get_msvc_versions(self): - """ - :return: platform to compiler configurations - :rtype: dict - """ - dct = Utils.ordered_iter_dict() - self.gather_icl_versions(dct) - self.gather_intel_composer_versions(dct) - self.gather_wsdk_versions(dct) - self.gather_msvc_versions(dct) - self.gather_vswhere_versions(dct) - Logs.debug('msvc: detected versions %r', list(dct.keys())) - return dct - -@conf -def find_lt_names_msvc(self, libname, is_static=False): - """ - Win32/MSVC specific code to glean out information from libtool la files. - this function is not attached to the task_gen class. Returns a triplet: - (library absolute path, library name without extension, whether the library is static) - """ - lt_names=[ - 'lib%s.la' % libname, - '%s.la' % libname, - ] - - for path in self.env.LIBPATH: - for la in lt_names: - laf=os.path.join(path,la) - dll=None - if os.path.exists(laf): - ltdict = Utils.read_la_file(laf) - lt_libdir=None - if ltdict.get('libdir', ''): - lt_libdir = ltdict['libdir'] - if not is_static and ltdict.get('library_names', ''): - dllnames=ltdict['library_names'].split() - dll=dllnames[0].lower() - dll=re.sub(r'\.dll$', '', dll) - return (lt_libdir, dll, False) - elif ltdict.get('old_library', ''): - olib=ltdict['old_l |