aboutsummaryrefslogtreecommitdiffstats
path: root/wscript
diff options
context:
space:
mode:
Diffstat (limited to 'wscript')
-rw-r--r--wscript356
1 files changed, 199 insertions, 157 deletions
diff --git a/wscript b/wscript
index 6215039..b30239a 100644
--- a/wscript
+++ b/wscript
@@ -1,6 +1,7 @@
#!/usr/bin/env python
import os
+import re
import sys
from waflib import Context, Logs, Options, Scripting, Utils
@@ -8,10 +9,15 @@ from waflib.extras import autowaf as autowaf
# Mandatory waf variables
APPNAME = 'lv2' # Package name for waf dist
-VERSION = '1.15.5' # Package version for waf dist
+VERSION = '1.17.2' # Package version for waf dist
top = '.' # Source directory
out = 'build' # Build directory
+# Release variables
+uri = 'http://lv2plug.in/ns/lv2'
+dist_pattern = 'http://lv2plug.in/spec/lv2-%d.%d.%d.tar.bz2'
+post_tags = []
+
# Map of specification base name to old URI-style include path
spec_map = {
'atom' : 'lv2/lv2plug.in/ns/ext/atom',
@@ -42,31 +48,29 @@ spec_map = {
def options(ctx):
ctx.load('compiler_c')
ctx.load('lv2')
- autowaf.set_options(ctx, test=True)
- opt = ctx.get_option_group('Configuration options')
- autowaf.add_flags(
- opt,
- {'no-coverage': 'Do not use gcov for code coverage',
- 'online-docs': 'Build documentation for web hosting',
- 'no-plugins': 'Do not build example plugins',
- 'copy-headers': 'Copy headers instead of linking to bundle'})
+ ctx.add_flags(
+ ctx.configuration_options(),
+ {'no-coverage': 'Do not use gcov for code coverage',
+ 'online-docs': 'Build documentation for web hosting',
+ 'no-check-links': 'Do not check documentation for broken links',
+ 'no-plugins': 'Do not build example plugins',
+ 'copy-headers': 'Copy headers instead of linking to bundle'})
def configure(conf):
- autowaf.display_header('LV2 Configuration')
try:
conf.load('compiler_c', cache=True)
except:
Options.options.build_tests = False
Options.options.no_plugins = True
+ if Options.options.online_docs:
+ Options.options.docs = True
+
conf.load('lv2', cache=True)
conf.load('autowaf', cache=True)
autowaf.set_c_lang(conf, 'c99')
- if Options.options.online_docs:
- Options.options.docs = True
-
- if Options.options.ultra_strict:
+ if Options.options.ultra_strict and not conf.env.MSVC_COMPILER:
conf.env.append_value('CFLAGS', ['-Wconversion'])
if conf.env.DEST_OS == 'win32' or not hasattr(os.path, 'relpath'):
@@ -85,21 +89,43 @@ def configure(conf):
except:
Logs.warn('Asciidoc not found, book will not be built')
+ if not Options.options.no_check_links:
+ if not conf.find_program('linkchecker',
+ var='LINKCHECKER', mandatory=False):
+ Logs.warn('Documentation will not be checked for broken links')
+
# Check for gcov library (for test coverage)
if (conf.env.BUILD_TESTS
and not Options.options.no_coverage
and not conf.is_defined('HAVE_GCOV')):
conf.check_cc(lib='gcov', define_name='HAVE_GCOV', mandatory=False)
- autowaf.set_recursive()
+ if conf.env.BUILD_TESTS:
+ conf.find_program('serdi', mandatory=False)
+ conf.find_program('sord_validate', mandatory=False)
+
+ autowaf.set_lib_env(conf, 'lv2', VERSION, has_objects=False)
+ autowaf.set_local_lib(conf, 'lv2', has_objects=False)
+
+ conf.run_env.append_unique('LV2_PATH',
+ [os.path.join(conf.path.abspath(), 'lv2')])
if conf.env.BUILD_PLUGINS:
- for i in conf.path.ant_glob('plugins/*.lv2', src=False, dir=True):
+ for i in ['eg-amp.lv2',
+ 'eg-fifths.lv2',
+ 'eg-metro.lv2',
+ 'eg-midigate.lv2',
+ 'eg-params.lv2',
+ 'eg-sampler.lv2',
+ 'eg-scope.lv2']:
try:
- conf.recurse(i.srcpath())
- conf.env.LV2_BUILD += [i.srcpath()]
- except:
- Logs.warn('Configuration failed, %s will not be built\n' % i)
+ path = os.path.join('plugins', i)
+ conf.recurse(path)
+ conf.env.LV2_BUILD += [path]
+ conf.run_env.append_unique(
+ 'LV2_PATH', [conf.build_path('plugins/%s/lv2' % i)])
+ except Exception as e:
+ Logs.warn('Configuration failed, not building %s (%s)' % (i, e))
autowaf.display_summary(
conf,
@@ -133,16 +159,18 @@ def ttl_files(path, specdir):
return map(abspath,
path.ant_glob(specdir.path_from(path) + '/*.ttl'))
-def load_ttl(files):
+def load_ttl(files, exclude = []):
import rdflib
model = rdflib.ConjunctiveGraph()
for f in files:
- model.parse(f, format='n3')
+ if f not in exclude:
+ model.parse(f, format='n3')
return model
# Task to build extension index
def build_index(task):
- sys.path.append('./lv2specgen')
+ src_dir = task.inputs[0].parent.parent
+ sys.path.append(str(src_dir.find_node('lv2specgen')))
import rdflib
import lv2specgen
@@ -150,7 +178,8 @@ def build_index(task):
lv2 = rdflib.Namespace('http://lv2plug.in/ns/lv2core#')
rdf = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
- model = load_ttl(['lv2/core/meta.ttl'])
+ model = load_ttl([str(src_dir.find_node('lv2/core/meta.ttl')),
+ str(src_dir.find_node('lv2/core/people.ttl'))])
# Get date for this version, and list of all LV2 distributions
proj = rdflib.URIRef('http://lv2plug.in/ns/lv2')
@@ -168,26 +197,6 @@ def build_index(task):
else:
print('warning: %s has no file release\n' % proj)
- # Get history for this LV2 release
- entries = lv2specgen.specHistoryEntries(model, proj, {})
-
- # Add entries for every spec that has the same distribution
- ctx = task.generator.bld
- subdirs = specdirs(ctx.path)
- for specdir in subdirs:
- m = load_ttl(ttl_files(ctx.path, specdir))
- name = os.path.basename(specdir.abspath())
- spec = m.value(None, rdf.type, lv2.Specification)
- if spec:
- for dist in dists:
- release = m.value(None, doap['file-release'], dist[1])
- if release:
- entries[dist] += lv2specgen.releaseChangeset(
- m, release, str(name))
-
- # Generate history for all post-unification LV2 distributions
- history = lv2specgen.specHistoryMarkup(entries)
-
rows = []
for f in task.inputs:
if not f.abspath().endswith('index.html.in'):
@@ -204,8 +213,7 @@ def build_index(task):
subst_file(task.inputs[0].abspath(), task.outputs[0].abspath(),
{'@ROWS@': ''.join(rows),
'@LV2_VERSION@': VERSION,
- '@DATE@': date,
- '@HISTORY@': history})
+ '@DATE@': date})
def build_spec(bld, path):
name = os.path.basename(path)
@@ -214,20 +222,23 @@ def build_spec(bld, path):
old_include_dir = os.path.join(bld.env.INCLUDEDIR, spec_map[name])
# Build test program if applicable
- if bld.env.BUILD_TESTS and bld.path.find_node(path + '/%s-test.c' % name):
+ for test in bld.path.ant_glob(os.path.join(path, '*-test.c')):
test_lib = []
test_cflags = ['']
test_linkflags = ['']
if bld.is_defined('HAVE_GCOV'):
- test_lib += ['gcov', 'rt']
+ test_lib += ['gcov']
test_cflags += ['--coverage']
test_linkflags += ['--coverage']
+ if bld.env.DEST_OS not in ['darwin', 'win32']:
+ test_lib += ['rt']
# Unit test program
bld(features = 'c cprogram',
- source = path + '/%s-test.c' % name,
+ source = test,
lib = test_lib,
- target = path + '/%s-test' % name,
+ uselib = 'LV2',
+ target = os.path.splitext(str(test.get_bld()))[0],
install_path = None,
cflags = test_cflags,
linkflags = test_linkflags)
@@ -250,11 +261,11 @@ def build(bld):
specs = (bld.path.ant_glob('lv2/*', dir=True))
# Copy lv2.h to include directory for backwards compatibility
- old_lv2_h_path = os.path.join(bld.env.INCLUDEDIR, 'lv2/lv2.h')
+ old_lv2_h_path = os.path.join(bld.env.INCLUDEDIR, 'lv2.h')
if bld.env.COPY_HEADERS:
- bld.install_files(old_lv2_h_path, 'lv2/core/lv2.h')
+ bld.install_files(os.path.dirname(old_lv2_h_path), 'lv2/core/lv2.h')
else:
- bld.symlink_as(old_lv2_h_path, 'core/lv2.h')
+ bld.symlink_as(old_lv2_h_path, 'lv2/core/lv2.h')
# LV2 pkgconfig file
bld(features = 'subst',
@@ -275,7 +286,7 @@ def build(bld):
# Build extensions
for spec in specs:
- build_spec(bld, spec.srcpath())
+ build_spec(bld, spec.path_from(bld.path))
# Build plugins
for plugin in bld.env.LV2_BUILD:
@@ -294,31 +305,30 @@ def build(bld):
bld.install_files('${LV2DIR}/schemas.lv2/',
bld.path.ant_glob('schemas.lv2/*.ttl'))
+ if bld.env.ONLINE_DOCS:
+ # Generate .htaccess files
+ for d in ('ns', 'ns/ext', 'ns/extensions'):
+ path = os.path.join(str(bld.path.get_bld()), d)
+ bld(features = 'subst',
+ source = 'doc/htaccess.in',
+ target = os.path.join(path, '.htaccess'),
+ install_path = None,
+ BASE = '/' + d)
+
if bld.env.DOCS or bld.env.ONLINE_DOCS:
- # Prepare spec output directories
+ # Copy spec files to build dir
for spec in specs:
- # Copy spec files to build dir
- srcpath = spec.srcpath()
- name = os.path.basename(srcpath)
- full_path = spec_map[name]
+ srcpath = spec.path_from(bld.path)
+ basename = os.path.basename(srcpath)
+ full_path = spec_map[basename]
+ name = 'lv2core' if basename == 'core' else basename
path = chop_lv2_prefix(full_path)
- base = full_path[len('lv2/lv2plug.in'):]
- for f in bld.path.ant_glob(srcpath + '/*.*'):
- target = os.path.join(path, os.path.basename(f.srcpath()))
- bld(features = 'subst',
- is_copy = True,
- name = 'copy',
- source = f,
- target = target)
-
- # Generate .htaccess file
- if bld.env.ONLINE_DOCS:
- bld(features = 'subst',
- source = 'doc/htaccess.in',
- target = os.path.join(base, '.htaccess'),
- install_path = None,
- NAME = name,
- BASE = base)
+ spec_path = os.path.join(path[3:], name + '.ttl')
+
+ bld(features = 'subst',
+ is_copy = True,
+ source = os.path.join(srcpath, name + '.ttl'),
+ target = path + '.ttl')
# Copy stylesheets to build directory
for i in ['style.css', 'pygments.css']:
@@ -328,49 +338,46 @@ def build(bld):
source = 'doc/%s' % i,
target = 'aux/%s' % i)
- bld(features = 'subst',
- is_copy = True,
- name = 'copy',
- source = 'doc/doxy-style.css',
- target = 'doc/html/doxy-style.css')
-
# Build Doxygen documentation (and tags file)
- autowaf.build_dox(bld, 'LV2', VERSION, top, out, 'lv2plug.in/doc', False)
+ autowaf.build_dox(bld, 'LV2', VERSION, top, out, 'doc', False)
bld.add_group()
index_files = []
for spec in specs:
# Call lv2specgen to generate spec docs
- srcpath = spec.srcpath()
+ srcpath = spec.path_from(bld.path)
basename = os.path.basename(srcpath)
full_path = spec_map[basename]
name = 'lv2core' if basename == 'core' else basename
- ttl_name = 'lv2.ttl' if basename == 'core' else name + '.ttl'
- index_file = os.path.join('index_rows', name)
- index_files += [index_file]
- root_path = os.path.relpath('lv2/lv2plug.in/ns', full_path)
- html_path = '%s/%s.html' % (chop_lv2_prefix(full_path), name)
- out_bundle = os.path.dirname(html_path)
-
- cmd = ('../lv2specgen/lv2specgen.py' +
+ ttl_name = name + '.ttl'
+ index_file = bld.path.get_bld().make_node('index_rows/' + name)
+ index_files += [index_file.path_from(bld.path)]
+ chopped_path = chop_lv2_prefix(full_path)
+
+ assert chopped_path.startswith('ns/')
+ root_path = os.path.relpath('/', os.path.dirname(chopped_path[2:]))
+ html_path = '%s.html' % chopped_path
+ out_dir = os.path.dirname(html_path)
+
+ cmd = (str(bld.path.find_node('lv2specgen/lv2specgen.py')) +
' --root-uri=http://lv2plug.in/ns/ --root-path=' + root_path +
' --list-email=devel@lists.lv2plug.in'
' --list-page=http://lists.lv2plug.in/listinfo.cgi/devel-lv2plug.in'
- ' --style-uri=' + os.path.relpath('aux/style.css', out_bundle) +
- ' --docdir=' + os.path.relpath('doc/html', os.path.dirname(html_path)) +
- ' --tags=doc/tags' +
- ' --index=' + index_file +
+ ' --style-uri=' + os.path.relpath('aux/style.css', out_dir) +
+ ' --docdir=' + os.path.relpath('doc/html', out_dir) +
+ ' --tags=%s' % bld.path.get_bld().make_node('doc/tags') +
+ ' --index=' + str(index_file) +
' ${SRC} ${TGT}')
bld(rule = cmd,
source = os.path.join(srcpath, ttl_name),
- target = [html_path, index_file])
+ target = [html_path, index_file],
+ shell = False)
# Install documentation
- if not bld.env.ONLINE_DOCS:
- base = chop_lv2_prefix(srcpath)
- bld.install_files('${DOCDIR}/' + srcpath,
- bld.path.get_bld().ant_glob(base + '/*.html'))
+ base = chop_lv2_prefix(srcpath)
+ bld.install_files(os.path.join('${DOCDIR}', 'lv2', os.path.dirname(html_path)),
+ html_path)
index_files.sort()
bld.add_group()
@@ -382,9 +389,17 @@ def build(bld):
target = 'ns/index.html')
# Install main documentation files
- if not bld.env.ONLINE_DOCS:
- bld.install_files('${DOCDIR}/lv2/lv2plug.in/aux/', 'aux/style.css')
- bld.install_files('${DOCDIR}/lv2/lv2plug.in/ns/', 'ns/index.html')
+ bld.install_files('${DOCDIR}/lv2/aux/', 'aux/style.css')
+ bld.install_files('${DOCDIR}/lv2/ns/', 'ns/index.html')
+
+ def check_links(ctx):
+ import subprocess
+ if ctx.env.LINKCHECKER:
+ if subprocess.call([ctx.env.LINKCHECKER[0], '--no-status', out]):
+ ctx.fatal('Documentation contains broken links')
+
+ if bld.cmd == 'build':
+ bld.add_post_fun(check_links)
if bld.env.BUILD_TESTS:
# Generate a compile test .c file that includes all headers
@@ -403,6 +418,8 @@ def build(bld):
bld(features = 'c cprogram',
source = bld.path.get_bld().make_node('build-test.c'),
target = 'build-test',
+ includes = '.',
+ uselib = 'LV2',
install_path = None)
if bld.env.BUILD_BOOK:
@@ -426,14 +443,31 @@ def lint(ctx):
"build-test.c")
subprocess.call(cmd, cwd='build', shell=True)
-def test(ctx):
- "runs unit tests"
- autowaf.pre_test(ctx, APPNAME, dirs=['.'])
- for i in ctx.path.ant_glob('**/*-test'):
- os.environ['PATH'] = '.' + os.pathsep + os.getenv('PATH')
- test = i.path_from(ctx.path.find_node('build'))
- autowaf.run_test(ctx, APPNAME, test, dirs=['.'], name=i)
- autowaf.post_test(ctx, APPNAME, dirs=['.'])
+def test(tst):
+ import tempfile
+
+ with tst.group("Data") as check:
+ schemas = list(map(str, tst.path.ant_glob("schemas.lv2/*.ttl")))
+ spec_files = list(map(str, tst.path.ant_glob("lv2/**/*.ttl")))
+ plugin_files = list(map(str, tst.path.ant_glob("plugins/**/*.ttl")))
+ bld_files = list(map(str, tst.path.get_bld().ant_glob("**/*.ttl")))
+
+ if "SERDI" in tst.env:
+ for f in spec_files:
+ with tempfile.NamedTemporaryFile(mode="w") as tmp:
+ base_dir = os.path.dirname(f)
+ cmd = tst.env.SERDI + ["-o", "turtle", f, base_dir]
+ check(cmd, stdout=tmp.name)
+ check.file_equals(f, tmp.name)
+
+ if "SORD_VALIDATE" in tst.env:
+ all_files = schemas + spec_files + plugin_files + bld_files
+ check(tst.env.SORD_VALIDATE + all_files)
+
+ with tst.group('Unit') as check:
+ pattern = tst.env.cprogram_PATTERN % '**/*-test'
+ for test in tst.path.get_bld().ant_glob(pattern):
+ check([str(test)])
class Dist(Scripting.Dist):
def execute(self):
@@ -452,61 +486,69 @@ class DistCheck(Dist, Scripting.DistCheck):
def archive(self):
Dist.archive(self)
+def _get_news_entries(ctx):
+ from waflib.extras import autoship
+
+ # Get project-level news entries
+ lv2_entries = autoship.read_ttl_news('lv2',
+ ['lv2/core/meta.ttl',
+ 'lv2/core/people.ttl'],
+ dist_pattern = dist_pattern)
+
+ release_pattern = r'http://lv2plug.in/spec/lv2-([0-9\.]*).tar.bz2'
+ current_version = sorted(lv2_entries.keys(), reverse=True)[0]
+
+ # Add items from every specification
+ for specdir in specdirs(ctx.path):
+ name = os.path.basename(specdir.abspath())
+ entries = autoship.read_ttl_news(name, ttl_files(ctx.path, specdir))
+
+ def add_items(lv2_version, name, items):
+ for item in items:
+ lv2_entries[lv2_version]["items"] += ["%s: %s" % (name, item)]
+
+ if entries and name != "core":
+ latest_revision = sorted(entries.keys(), reverse=True)[0]
+ for revision, entry in entries.items():
+ if "dist" in entry:
+ match = re.match(release_pattern, entry["dist"])
+ if match:
+ # Append news items to corresponding LV2 version
+ version = tuple(map(int, match.group(1).split('.')))
+ add_items(version, name, entry["items"])
+
+ elif revision == latest_revision:
+ # Dev version that isn't in a release yet, append to current
+ add_items(current_version, name, entry["items"])
+
+ # Sort news items in each versions
+ for revision, entry in lv2_entries.items():
+ entry["items"].sort()
+
+ return lv2_entries
+
def posts(ctx):
"generates news posts in Pelican Markdown format"
- subdirs = specdirs(ctx.path)
- dev_dist = 'http://lv2plug.in/spec/lv2-%s.tar.bz2' % VERSION
+
+ from waflib.extras import autoship
try:
os.mkdir(os.path.join(out, 'posts'))
except:
pass
- # Get all entries (as in dist())
- top_entries = {}
- for specdir in subdirs:
- entries = autowaf.get_rdf_news(os.path.basename(specdir.abspath()),
- ttl_files(ctx.path, specdir),
- top_entries,
- dev_dist = dev_dist)
+ autoship.write_posts(_get_news_entries(ctx),
+ os.path.join(out, 'posts'),
+ {'Author': 'drobilla'})
+
+def news(ctx):
+ """write an amalgamated NEWS file to the source directory"""
- entries = autowaf.get_rdf_news('lv2',
- ['lv2/core/meta.ttl'],
- None,
- top_entries,
- dev_dist = dev_dist)
+ from waflib.extras import autoship
- autowaf.write_posts(entries,
- {'Author': 'drobilla'},
- os.path.join(out, 'posts'))
+ autoship.write_news(_get_news_entries(ctx), 'NEWS')
def dist(ctx):
- subdirs = specdirs(ctx.path)
- dev_dist = 'http://lv2plug.in/spec/lv2-%s.tar.bz2' % VERSION
-
- # Write NEWS files in source directory
- top_entries = {}
- for specdir in subdirs:
- entries = autowaf.get_rdf_news(os.path.basename(specdir.abspath()),
- ttl_files(ctx.path, specdir),
- top_entries,
- dev_dist = dev_dist)
- autowaf.write_news(entries, specdir.abspath() + '/NEWS')
-
- # Write top level amalgamated NEWS file
- entries = autowaf.get_rdf_news('lv2',
- ['lv2/lv2plug.in/ns/lv2core/meta.ttl'],
- None,
- top_entries,
- dev_dist = dev_dist)
- autowaf.write_news(entries, 'NEWS')
-
- # Build archive
+ news(ctx)
ctx.archive()
-
- # Delete generated NEWS files from source directory
- for i in subdirs + [ctx.path]:
- try:
- os.remove(os.path.join(i.abspath(), 'NEWS'))
- except:
- pass
+ os.remove('NEWS')