aboutsummaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
authorDavid Robillard <d@drobilla.net>2022-07-07 18:59:06 -0400
committerDavid Robillard <d@drobilla.net>2022-07-17 18:13:53 -0400
commitd4a970f6962dda28133290194832b726b566ddab (patch)
treecfe9747042d55388705371a8ce95505ffb702470 /scripts
parent7f3a2651a3635232d94f7bf9ce23d6b575735732 (diff)
downloadlv2-d4a970f6962dda28133290194832b726b566ddab.tar.xz
Switch to meson build system
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/lv2_build_index.py252
-rwxr-xr-xscripts/lv2_check_specification.py248
-rwxr-xr-xscripts/lv2_check_syntax.py82
-rwxr-xr-xscripts/lv2_write_news.py258
-rw-r--r--scripts/meson.build9
5 files changed, 849 insertions, 0 deletions
diff --git a/scripts/lv2_build_index.py b/scripts/lv2_build_index.py
new file mode 100755
index 0000000..444e078
--- /dev/null
+++ b/scripts/lv2_build_index.py
@@ -0,0 +1,252 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: ISC
+
+"""
+Write an HTML index for a set of LV2 specifications.
+"""
+
+import datetime
+import json
+import os
+import time
+import sys
+import argparse
+import subprocess
+
+import rdflib
+
+
+doap = rdflib.Namespace("http://usefulinc.com/ns/doap#")
+lv2 = rdflib.Namespace("http://lv2plug.in/ns/lv2core#")
+owl = rdflib.Namespace("http://www.w3.org/2002/07/owl#")
+rdf = rdflib.Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")
+
+
+def _subst_file(template_path, output_file, substitutions):
+ "Replace keys with values in a template file and write the result."
+
+ with open(template_path, "r", encoding="utf-8") as template:
+ for line in template:
+ for key, value in substitutions.items():
+ line = line.replace(key, value)
+
+ output_file.write(line)
+
+
+def _load_ttl(data_paths, exclude=None):
+ "Load an RDF model from a Turtle file."
+
+ model = rdflib.ConjunctiveGraph()
+ for path in data_paths:
+ if exclude is None or path not in exclude:
+ model.parse(path, format="n3")
+
+ return model
+
+
+def _warn(message):
+ "Load a warning message."
+
+ assert not message.startswith("warning: ")
+ assert not message.endswith("\n")
+ sys.stderr.write(message)
+ sys.stderr.write("\n")
+
+
+def _spec_target(spec, root, online=False):
+ "Return the relative link target for a specification."
+
+ target = spec.removeprefix(root) if spec.startswith(root) else spec
+
+ return target if online else target + ".html"
+
+
+def _spec_date(model, spec, minor, micro):
+ "Return the date for a release of a specification as an RDF node."
+
+ # Get date
+ date = None
+ for release in model.objects(spec, doap.release):
+ revision = model.value(release, doap.revision, None, any=False)
+ if str(revision) == f"{minor}.{micro}":
+ date = model.value(release, doap.created, None)
+ break
+
+ # Verify that this date is the latest
+ if date is not None:
+ for other_release in model.objects(spec, doap.release):
+ for other_date in model.objects(other_release, doap.created):
+ if other_date is None:
+ _warn(f"{spec} has no doap:created date")
+ elif other_date > date:
+ _warn(f"{spec} {minor}.{micro} ({date}) is an old release")
+ break
+
+ return date
+
+
+def _spec_link_columns(spec, root, name, online):
+ "Return the first two link columns in a spec row as an HTML string."
+
+ # Find relative link target and stem
+ target = _spec_target(spec, root, online)
+ stem = os.path.splitext(os.path.basename(target))[0]
+
+ # Prefix with a comment to act as a sort key for the row
+ col = f"<!-- {stem} -->"
+
+ # Specification
+ col += f'<td><a rel="rdfs:seeAlso" href="{target}">{name}</a></td>'
+
+ # API
+ col += '<td><a rel="rdfs:seeAlso"'
+ col += f' href="../doc/html/group__{stem}.html">{name}'
+ col += "</a></td>"
+
+ return col
+
+
+def _spec_description_column(model, spec):
+ "Return the description column in a spec row as an HTML string."
+
+ shortdesc = model.value(spec, doap.shortdesc, None, any=False)
+
+ return "<td>" + str(shortdesc) + "</td>" if shortdesc else "<td></td>"
+
+
+def index_row(model, spec, root_uri, online):
+ "Return the row for a spec as an HTML string."
+
+ # Get version
+ minor = 0
+ micro = 0
+ try:
+ minor = int(model.value(spec, lv2.minorVersion, None, any=False))
+ micro = int(model.value(spec, lv2.microVersion, None, any=False))
+ except rdflib.exceptions.UniquenessError:
+ _warn(f"{spec} has no unique valid version")
+ return ""
+
+ # Check that date is present and valid
+ if _spec_date(model, spec, minor, micro) is None:
+ _warn(f"{spec} has no doap:created date")
+ return ""
+
+ row = "<tr>"
+
+ # Specification and API
+ row += _spec_link_columns(
+ spec,
+ root_uri,
+ model.value(spec, doap.name, None).removeprefix("LV2 "),
+ online,
+ )
+
+ # Description
+ row += _spec_description_column(model, spec)
+
+ # Version
+ row += f"<td>{minor}.{micro}</td>"
+
+ # Status
+ deprecated = model.value(spec, owl.deprecated, None)
+ deprecated = deprecated and str(deprecated) not in ["0", "false"]
+ if minor == 0:
+ row += '<td><span class="error">Experimental</span></td>'
+ elif deprecated:
+ row += '<td><span class="warning">Deprecated</span></td>'
+ elif micro % 2 == 0:
+ row += '<td><span class="success">Stable</span></td>'
+ else:
+ row += '<td><span class="warning">Development</span></td>'
+
+ row += "</tr>"
+
+ return row
+
+
+def build_index(
+ lv2_source_root,
+ lv2_version,
+ input_paths,
+ root_uri,
+ online,
+):
+ "Build the LV2 specification index and write it to stdout."
+
+ model = _load_ttl(input_paths)
+
+ # Get date for this version, and list of all LV2 distributions
+ proj = rdflib.URIRef("http://lv2plug.in/ns/lv2")
+ date = None
+ for row in model.triples([proj, doap.release, None]):
+ revision = model.value(row[2], doap.revision, None)
+ created = model.value(row[2], doap.created, None)
+ if str(revision) == lv2_version:
+ date = created
+
+ dist = model.value(row[2], doap["file-release"], None)
+ if not dist or not created:
+ _warn(f"{proj} has no file release")
+
+ rows = []
+ for spec in model.triples([None, rdf.type, lv2.Specification]):
+ rows += [index_row(model, spec[0], root_uri, online)]
+
+ if date is None:
+ now = int(os.environ.get("SOURCE_DATE_EPOCH", time.time()))
+ date = datetime.datetime.utcfromtimestamp(now).strftime("%F")
+
+ _subst_file(
+ os.path.join(lv2_source_root, "doc", "index.html.in"),
+ sys.stdout,
+ {
+ "@ROWS@": "\n".join(rows),
+ "@LV2_VERSION@": lv2_version,
+ "@DATE@": date,
+ },
+ )
+
+
+if __name__ == "__main__":
+ ap = argparse.ArgumentParser(
+ usage="%(prog)s [OPTION]... INPUT_PATH...",
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+
+ ap.add_argument("--lv2-version", help="LV2 release version")
+ ap.add_argument("--lv2-source-root", help="path to LV2 source root")
+ ap.add_argument(
+ "--root-uri",
+ default="http://lv2plug.in/ns/",
+ help="root URI for specifications",
+ )
+ ap.add_argument(
+ "--online",
+ action="store_true",
+ default=False,
+ help="build online documentation",
+ )
+ ap.add_argument("input_paths", nargs="+", help="path to Turtle input file")
+
+ args = ap.parse_args(sys.argv[1:])
+
+ if args.lv2_version is None or args.lv2_source_root is None:
+ introspect_command = ["meson", "introspect", "-a"]
+ project_info = json.loads(
+ subprocess.check_output(introspect_command).decode("utf-8")
+ )
+
+ if args.lv2_version is None:
+ args.lv2_version = project_info["projectinfo"]["version"]
+
+ if args.lv2_source_root is None:
+ meson_build_path = project_info["buildsystem_files"][0]
+ args.lv2_source_root = os.path.relpath(
+ os.path.dirname(meson_build_path)
+ )
+
+ build_index(**vars(args))
diff --git a/scripts/lv2_check_specification.py b/scripts/lv2_check_specification.py
new file mode 100755
index 0000000..0cd296e
--- /dev/null
+++ b/scripts/lv2_check_specification.py
@@ -0,0 +1,248 @@
+#!/usr/bin/env python3
+
+# Copyright 2020-2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: ISC
+
+"""
+Check an LV2 specification for issues.
+"""
+
+import argparse
+import os
+import sys
+
+import rdflib
+
+foaf = rdflib.Namespace("http://xmlns.com/foaf/0.1/")
+lv2 = rdflib.Namespace("http://lv2plug.in/ns/lv2core#")
+owl = rdflib.Namespace("http://www.w3.org/2002/07/owl#")
+rdf = rdflib.Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")
+rdfs = rdflib.Namespace("http://www.w3.org/2000/01/rdf-schema#")
+
+
+class Checker:
+ "A callable that checks conditions and records pass/fail counts."
+
+ def __init__(self, verbose=False):
+ self.num_checks = 0
+ self.num_errors = 0
+ self.verbose = verbose
+
+ def __call__(self, condition, name):
+ if not condition:
+ sys.stderr.write(f"error: Unmet condition: {name}\n")
+ self.num_errors += 1
+ elif self.verbose:
+ sys.stderr.write(f"note: {name}\n")
+
+ self.num_checks += 1
+ return condition
+
+ def print_summary(self):
+ "Print a summary (if verbose) when all checks are finished."
+
+ if self.verbose:
+ if self.num_errors:
+ sys.stderr.write(f"note: Failed {self.num_errors}/")
+ else:
+ sys.stderr.write("note: Passed all ")
+
+ sys.stderr.write(f"{self.num_checks} checks\n")
+
+
+def _check(condition, name):
+ "Check that condition is true, returning 1 on failure."
+
+ if not condition:
+ sys.stderr.write(f"error: Unmet condition: {name}\n")
+ return 1
+
+ return 0
+
+
+def _has_statement(model, pattern):
+ "Return true if model contains a triple matching pattern."
+
+ for _ in model.triples(pattern):
+ return True
+
+ return False
+
+
+def _has_property(model, subject, predicate):
+ "Return true if subject has any value for predicate in model."
+
+ return model.value(subject, predicate, None) is not None
+
+
+def _check_version(checker, model, spec, is_stable):
+ "Check that the version of a specification is present and valid."
+
+ minor = model.value(spec, lv2.minorVersion, None, any=False)
+ checker(minor is not None, f"{spec} has a lv2:minorVersion")
+
+ micro = model.value(spec, lv2.microVersion, None, any=False)
+ checker(micro is not None, f"{spec} has a lv2:microVersion")
+
+ if is_stable:
+ checker(int(minor) > 0, f"{spec} has a non-zero minor version")
+ checker(int(micro) % 2 == 0, f"{spec} has an even micro version")
+
+
+def _check_specification(checker, spec_dir, is_stable=False):
+ "Check all specification data for errors and omissions."
+
+ # Load manifest
+ manifest_path = os.path.join(spec_dir, "manifest.ttl")
+ model = rdflib.Graph()
+ model.parse(manifest_path, format="n3")
+
+ # Get the specification URI from the manifest
+ spec_uri = model.value(None, rdf.type, lv2.Specification, any=False)
+ if not checker(
+ spec_uri is not None,
+ manifest_path + " declares an lv2:Specification",
+ ):
+ return 1
+
+ # Check that the manifest declares a valid version
+ _check_version(checker, model, spec_uri, is_stable)
+
+ # Get the link to the main document from the manifest
+ document = model.value(spec_uri, rdfs.seeAlso, None, any=False)
+ if not checker(
+ document is not None,
+ manifest_path + " has one rdfs:seeAlso link to the definition",
+ ):
+ return 1
+
+ # Load main document into the model
+ model.parse(document, format="n3")
+
+ # Check that the main data files aren't bloated with extended documentation
+ checker(
+ not _has_statement(model, [None, lv2.documentation, None]),
+ f"{document} has no lv2:documentation",
+ )
+
+ # Load all other directly linked data files (for any other subjects)
+ for link in sorted(model.triples([None, rdfs.seeAlso, None])):
+ if link[2] != document and link[2].endswith(".ttl"):
+ model.parse(link[2], format="n3")
+
+ # Check that all properties have a more specific type
+ for typing in sorted(model.triples([None, rdf.type, rdf.Property])):
+ subject = typing[0]
+
+ checker(isinstance(subject, rdflib.term.URIRef), f"{subject} is a URI")
+
+ if str(subject) == "http://lv2plug.in/ns/ext/patch#value":
+ continue # patch:value is just a "promiscuous" rdf:Property
+
+ types = list(model.objects(subject, rdf.type))
+
+ checker(
+ (owl.DatatypeProperty in types)
+ or (owl.ObjectProperty in types)
+ or (owl.AnnotationProperty in types),
+ f"{subject} is a Datatype, Object, or Annotation property",
+ )
+
+ # Get all subjects that have an explicit rdf:type
+ typed_subjects = set()
+ for typing in model.triples([None, rdf.type, None]):
+ typed_subjects.add(typing[0])
+
+ # Check that all named and typed resources have labels and comments
+ for subject in typed_subjects:
+ if isinstance(
+ subject, rdflib.term.BNode
+ ) or foaf.Person in model.objects(subject, rdf.type):
+ continue
+
+ if checker(
+ _has_property(model, subject, rdfs.label),
+ f"{subject} has a rdfs:label",
+ ):
+ label = str(model.value(subject, rdfs.label, None))
+
+ checker(
+ not label.endswith("."),
+ f"{subject} label has no trailing '.'",
+ )
+ checker(
+ label.find("\n") == -1,
+ f"{subject} label is a single line",
+ )
+ checker(
+ label == label.strip(),
+ f"{subject} label has stripped whitespace",
+ )
+
+ if checker(
+ _has_property(model, subject, rdfs.comment),
+ f"{subject} has a rdfs:comment",
+ ):
+ comment = str(model.value(subject, rdfs.comment, None))
+
+ checker(
+ comment.endswith("."),
+ f"{subject} comment has a trailing '.'",
+ )
+ checker(
+ comment.find("\n") == -1 and comment.find("\r"),
+ f"{subject} comment is a single line",
+ )
+ checker(
+ comment == comment.strip(),
+ f"{subject} comment has stripped whitespace",
+ )
+
+ # Check that lv2:documentation, if present, is proper Markdown
+ documentation = model.value(subject, lv2.documentation, None)
+ if documentation is not None:
+ checker(
+ documentation.datatype == lv2.Markdown,
+ f"{subject} documentation is explicitly Markdown",
+ )
+ checker(
+ str(documentation).startswith("\n\n"),
+ f"{subject} documentation starts with blank line",
+ )
+ checker(
+ str(documentation).endswith("\n\n"),
+ f"{subject} documentation ends with blank line",
+ )
+
+ return checker.num_errors
+
+
+if __name__ == "__main__":
+ ap = argparse.ArgumentParser(
+ usage="%(prog)s [OPTION]... BUNDLE",
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+
+ ap.add_argument(
+ "--stable",
+ action="store_true",
+ help="enable checks for stable release versions",
+ )
+
+ ap.add_argument(
+ "-v", "--verbose", action="store_true", help="print successful checks"
+ )
+
+ ap.add_argument(
+ "BUNDLE", help="path to specification bundle or manifest.ttl"
+ )
+
+ args = ap.parse_args(sys.argv[1:])
+
+ if os.path.basename(args.BUNDLE):
+ args.BUNDLE = os.path.dirname(args.BUNDLE)
+
+ sys.exit(
+ _check_specification(Checker(args.verbose), args.BUNDLE, args.stable)
+ )
diff --git a/scripts/lv2_check_syntax.py b/scripts/lv2_check_syntax.py
new file mode 100755
index 0000000..d1b72dc
--- /dev/null
+++ b/scripts/lv2_check_syntax.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: ISC
+
+"""
+Check that a Turtle file has valid syntax and strict formatting.
+
+This is a strict tool that enforces machine formatting with serdi.
+"""
+
+import argparse
+import difflib
+import filecmp
+import sys
+import tempfile
+import os
+import subprocess
+
+
+def _show_diff(from_lines, to_lines, from_path, to_path):
+ "Show a diff between two files, returning non-zero if they differ."
+
+ differences = False
+ for line in difflib.unified_diff(
+ from_lines,
+ to_lines,
+ fromfile=from_path,
+ tofile=to_path,
+ ):
+ sys.stderr.write(line)
+ differences = True
+
+ return int(differences)
+
+
+def _check_file_equals(patha, pathb):
+ "Check that two files are equal, returning non-zero if they differ."
+
+ for path in (patha, pathb):
+ if not os.access(path, os.F_OK):
+ sys.stderr.write(f"error: missing file {path}")
+ return 1
+
+ if filecmp.cmp(patha, pathb, shallow=False):
+ return 0
+
+ with open(patha, "r", encoding="utf-8") as in_a:
+ with open(pathb, "r", encoding="utf-8") as in_b:
+ return _show_diff(in_a.readlines(), in_b.readlines(), patha, pathb)
+
+
+def run(serdi, filenames):
+ "Check that every file in filenames has valid formatted syntax."
+
+ status = 0
+
+ for filename in filenames:
+ rel_path = os.path.relpath(filename)
+ with tempfile.NamedTemporaryFile(mode="w") as out:
+ command = [serdi, "-o", "turtle", rel_path]
+ subprocess.check_call(command, stdout=out)
+
+ if _check_file_equals(rel_path, out.name):
+ status = 1
+
+ return status
+
+
+if __name__ == "__main__":
+ ap = argparse.ArgumentParser(
+ usage="%(prog)s [OPTION]... TURTLE_FILE...",
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+
+ ap.add_argument("--serdi", default="serdi", help="path to serdi")
+ ap.add_argument("TURTLE_FILE", nargs="+", help="input file to check")
+
+ args = ap.parse_args(sys.argv[1:])
+
+ sys.exit(run(args.serdi, args.TURTLE_FILE))
diff --git a/scripts/lv2_write_news.py b/scripts/lv2_write_news.py
new file mode 100755
index 0000000..6ce935c
--- /dev/null
+++ b/scripts/lv2_write_news.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python3
+
+# Copyright 2020-2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: ISC
+
+"""
+Write a NEWS file from RDF data.
+
+The output is in Debian changelog format, which can be parsed by
+dpkg-parsechangelog, among other things.
+"""
+
+import argparse
+import os
+import sys
+import datetime
+import textwrap
+import urllib
+import re
+
+import rdflib
+
+doap = rdflib.Namespace("http://usefulinc.com/ns/doap#")
+dcs = rdflib.Namespace("http://ontologi.es/doap-changeset#")
+rdfs = rdflib.Namespace("http://www.w3.org/2000/01/rdf-schema#")
+foaf = rdflib.Namespace("http://xmlns.com/foaf/0.1/")
+rdf = rdflib.Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#")
+
+
+def _is_release_version(version):
+ "Return true if `version` is a stable version number."
+
+ if len(version) not in [2, 3] or version[0] == 0:
+ return False
+
+ minor = version[len(version) - 2]
+ micro = version[len(version) - 1]
+
+ return micro % 2 == 0 and (len(version) == 2 or minor % 2 == 0)
+
+
+def _parse_datetime(string):
+ "Parse string as either a datetime or a date."
+
+ try:
+ return datetime.datetime.strptime(string, "%Y-%m-%dT%H:%M:%S%z")
+ except ValueError:
+ return datetime.datetime.strptime(string, "%Y-%m-%d")
+
+
+def _release_entry(graph, release):
+ "Return a news entry for a release."
+
+ revision = graph.value(release, doap.revision, None)
+ date = graph.value(release, doap.created, None)
+ blamee = graph.value(release, dcs.blame, None)
+ changeset = graph.value(release, dcs.changeset, None)
+ dist = graph.value(release, doap["file-release"], None)
+
+ if not revision or not date or not blamee or not changeset:
+ return None
+
+ version = tuple(map(int, revision.split(".")))
+
+ entry = {
+ "version": version,
+ "revision": str(revision),
+ "date": _parse_datetime(date),
+ "status": "stable" if _is_release_version(version) else "unstable",
+ "items": [],
+ }
+
+ if dist is not None:
+ entry["dist"] = dist
+
+ for j in graph.triples([changeset, dcs.item, None]):
+ item = str(graph.value(j[2], rdfs.label, None))
+ entry["items"] += [item]
+
+ entry["blamee_name"] = str(graph.value(blamee, foaf.name, None))
+ entry["blamee_mbox"] = str(graph.value(blamee, foaf.mbox, None))
+ return entry
+
+
+def _project_entries(graph, project):
+ "Return a map from version to news entries for a project"
+
+ entries = {}
+ for link in graph.triples([project, doap.release, None]):
+ entry = _release_entry(graph, link[2])
+ if entry is not None:
+ entries[entry["version"]] = entry
+ else:
+ sys.stderr.write(f"warning: Ignored partial {project} release\n")
+
+ return entries
+
+
+def _read_turtle_news(in_files):
+ "Read news entries from Turtle."
+
+ graph = rdflib.Graph()
+
+ # Parse input files
+ for i in in_files:
+ graph.parse(i)
+
+ # Read news for every project in the data
+ projects = {t[0] for t in graph.triples([None, rdf.type, doap.Project])}
+ entries_by_project = {}
+ for project in projects:
+ # Load any associated files
+ for uri in graph.triples([project, rdfs.seeAlso, None]):
+ if uri[2].endswith(".ttl"):
+ graph.parse(uri[2])
+
+ # Use the symbol from the URI as a name, or failing that, the doap:name
+ name = os.path.basename(urllib.parse.urlparse(str(project)).path)
+ if not name:
+ name = graph.value(project, doap.name, None)
+
+ entries = _project_entries(graph, project)
+ for _, entry in entries.items():
+ entry["name"] = name
+
+ entries_by_project[str(project)] = entries
+
+ return entries_by_project
+
+
+def _write_news_item(out, item):
+ "Write a single item (change) in NEWS format."
+
+ out.write("\n * " + "\n ".join(textwrap.wrap(item, width=74)))
+
+
+def _write_news_entry(out, entry):
+ "Write an entry (version) to out in NEWS format."
+
+ # Summary header
+ summary = f'{entry["name"]} ({entry["revision"]}) {entry["status"]}'
+ out.write(f"{summary}; urgency=medium\n")
+
+ # Individual change items
+ for item in sorted(entry["items"]):
+ _write_news_item(out, item)
+
+ # Trailer line
+ mbox = entry["blamee_mbox"].replace("mailto:", "")
+ author = f'{entry["blamee_name"]} <{mbox}>'
+ date = entry["date"]
+ if date.tzinfo is None: # Assume UTC (dpkg-parsechangelog requires it)
+ date = date.strftime("%a, %d %b %Y %H:%M:%S +0000")
+ else:
+ date = date.strftime("%a, %d %b %Y %H:%M:%S %z")
+
+ out.write(f"\n\n -- {author} {date}\n")
+
+
+def _write_single_project_news(out, entries):
+ "Write a NEWS file for entries of a single project to out."
+
+ revisions = sorted(entries.keys(), reverse=True)
+ for revision in revisions:
+ entry = entries[revision]
+ out.write("\n" if revision != revisions[0] else "")
+ _write_news_entry(out, entry)
+
+
+def _write_meta_project_news(out, top_project, entries_by_project):
+ "Write a NEWS file for a meta-project that contains others."
+
+ top_name = os.path.basename(urllib.parse.urlparse(str(top_project)).path)
+ release_pattern = rf".*/{top_name}-([0-9\.]*).tar.bz2"
+
+ # Pop the entries for the top project
+ top_entries = entries_by_project.pop(top_project)
+
+ # Add items from the other projects to the corresponding top entry
+ for _, entries in entries_by_project.items():
+ for version, entry in entries.items():
+ if "dist" in entry:
+ match = re.match(release_pattern, entry["dist"])
+ if match:
+ version = tuple(map(int, match.group(1).split(".")))
+ for item in entry["items"]:
+ top_entries[version]["items"] += [
+ f'{entry["name"]}: {item}'
+ ]
+
+ for version in sorted(top_entries.keys(), reverse=True):
+ out.write("\n" if version != max(top_entries.keys()) else "")
+ _write_news_entry(out, top_entries[version])
+
+
+def _write_text_news(out, entries_by_project, top_project=None):
+ "Write NEWS in standard Debian changelog format."
+
+ if len(entries_by_project) > 1:
+ if top_project is None:
+ sys.stderr.write("error: --top is required for multi-projects\n")
+ return 1
+
+ _write_meta_project_news(out, top_project, entries_by_project)
+ else:
+ project = next(iter(entries_by_project))
+ _write_single_project_news(out, entries_by_project[project])
+
+ return 0
+
+
+if __name__ == "__main__":
+ ap = argparse.ArgumentParser(
+ usage="%(prog)s [OPTION]... DATA_FILE...",
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+
+ ap.add_argument(
+ "-o",
+ "--output",
+ metavar="OUTPUT_FILE",
+ help="output file path",
+ )
+
+ ap.add_argument(
+ "-t",
+ "--top-project",
+ metavar="OUTPUT_FILE",
+ help="URI of parent meta-project with file releases",
+ )
+
+ ap.add_argument(
+ "DATA_FILE",
+ nargs="+",
+ help="path to a Turtle file with release data",
+ )
+
+ args = ap.parse_args(sys.argv[1:])
+
+ if not args.output and "MESON_DIST_ROOT" in os.environ:
+ args.output = os.path.join(os.getenv("MESON_DIST_ROOT"), "NEWS")
+
+ if not args.output:
+ sys.exit(
+ _write_text_news(
+ sys.stdout, _read_turtle_news(args.DATA_FILE), args.top_project
+ )
+ )
+ else:
+ with open(args.output, "w", encoding="utf-8") as output_file:
+ sys.exit(
+ _write_text_news(
+ output_file,
+ _read_turtle_news(args.DATA_FILE),
+ args.top_project,
+ )
+ )
diff --git a/scripts/meson.build b/scripts/meson.build
new file mode 100644
index 0000000..400d583
--- /dev/null
+++ b/scripts/meson.build
@@ -0,0 +1,9 @@
+# Copyright 2021-2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: CC0-1.0 OR ISC
+
+lv2_scripts = files(
+ 'lv2_build_index.py',
+ 'lv2_check_specification.py',
+ 'lv2_check_syntax.py',
+ 'lv2_write_news.py',
+)