build+ci: sync with ndn-cxx
Change-Id: Iff3ad63469eccb47571ba4abd3d42936f0cd8650
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ae09443..4fea6d1 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,11 +1,15 @@
name: CI
on:
push:
+ branches:
+ - '**'
paths-ignore:
- - 'docs/**'
- '*.conf.sample*'
- - '*.md'
+ - 'docs/**'
+ - '.editorconfig'
+ - '.gitignore'
- '.mailmap'
+ - '*.md'
workflow_dispatch:
permissions: {}
diff --git a/.jenkins b/.jenkins
index 92b9dae..1fa2f02 100755
--- a/.jenkins
+++ b/.jenkins
@@ -10,13 +10,17 @@
fi
export ID VERSION_ID
export ID_LIKE="${ID} ${ID_LIKE} linux"
- export PATH="${HOME}/.local/bin${PATH:+:}${PATH}"
+ if [[ -z $GITHUB_ACTIONS ]]; then
+ export PATH="${HOME}/.local/bin${PATH:+:}${PATH}"
+ fi
;;
Darwin)
# Emulate a subset of os-release(5)
export ID=macos
export VERSION_ID=$(sw_vers -productVersion)
- export PATH="/usr/local/bin${PATH:+:}${PATH}"
+ if [[ -z $GITHUB_ACTIONS ]]; then
+ export PATH="/usr/local/bin${PATH:+:}${PATH}"
+ fi
if [[ -x /opt/homebrew/bin/brew ]]; then
eval "$(/opt/homebrew/bin/brew shellenv)"
elif [[ -x /usr/local/bin/brew ]]; then
@@ -27,9 +31,9 @@
export CACHE_DIR=${CACHE_DIR:-/tmp}
-if [[ $JOB_NAME == *"code-coverage" ]]; then
- export DISABLE_ASAN=yes
- export DISABLE_HEADERS_CHECK=yes
+if [[ $JOB_NAME == *code-coverage ]]; then
+ export DISABLE_ASAN=1
+ export DISABLE_HEADERS_CHECK=1
fi
# https://reproducible-builds.org/docs/source-date-epoch/
diff --git a/.jenkins.d/00-deps.sh b/.jenkins.d/00-deps.sh
index 7e03e98..b64145e 100755
--- a/.jenkins.d/00-deps.sh
+++ b/.jenkins.d/00-deps.sh
@@ -2,7 +2,8 @@
set -eo pipefail
APT_PKGS=(
- build-essential
+ dpkg-dev
+ g++
libboost-chrono-dev
libboost-date-time-dev
libboost-dev
@@ -14,26 +15,42 @@
libboost-thread-dev
libsqlite3-dev
libssl-dev
- pkg-config
- python3-minimal
+ pkgconf
+ python3
)
-FORMULAE=(boost openssl pkg-config)
-PIP_PKGS=()
+DNF_PKGS=(
+ boost-devel
+ gcc-c++
+ libasan
+ lld
+ openssl-devel
+ pkgconf
+ python3
+ sqlite-devel
+)
+FORMULAE=(boost openssl pkgconf)
case $JOB_NAME in
*code-coverage)
- APT_PKGS+=(lcov python3-pip)
- PIP_PKGS+=('gcovr~=5.2')
+ APT_PKGS+=(lcov)
;;
*Docs)
- APT_PKGS+=(doxygen graphviz python3-pip)
+ APT_PKGS+=(doxygen graphviz)
FORMULAE+=(doxygen graphviz)
- PIP_PKGS+=(sphinx sphinxcontrib-doxylink)
;;
esac
+install_uv() {
+ if [[ -z $GITHUB_ACTIONS && $ID_LIKE == *debian* ]]; then
+ sudo apt-get install -qy --no-install-recommends pipx
+ pipx upgrade uv || pipx install uv
+ fi
+}
+
set -x
if [[ $ID == macos ]]; then
+ export HOMEBREW_COLOR=1
+ export HOMEBREW_NO_ENV_HINTS=1
if [[ -n $GITHUB_ACTIONS ]]; then
export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1
fi
@@ -43,10 +60,17 @@
sudo apt-get update -qq
sudo apt-get install -qy --no-install-recommends "${APT_PKGS[@]}"
elif [[ $ID_LIKE == *fedora* ]]; then
- sudo dnf install -y gcc-c++ libasan lld pkgconf-pkg-config python3 \
- boost-devel openssl-devel sqlite-devel
+ sudo dnf install -y "${DNF_PKGS[@]}"
fi
-if (( ${#PIP_PKGS[@]} )); then
- pip3 install --user --upgrade --upgrade-strategy=eager "${PIP_PKGS[@]}"
-fi
+case $JOB_NAME in
+ *code-coverage)
+ install_uv
+ ;;
+ *Docs)
+ install_uv
+ export FORCE_COLOR=1
+ export UV_NO_MANAGED_PYTHON=1
+ uv tool install sphinx --upgrade --with-requirements docs/requirements.txt
+ ;;
+esac
diff --git a/.jenkins.d/10-build.sh b/.jenkins.d/10-build.sh
index 199ed20..9cc4c78 100755
--- a/.jenkins.d/10-build.sh
+++ b/.jenkins.d/10-build.sh
@@ -4,13 +4,13 @@
if [[ -z $DISABLE_ASAN ]]; then
ASAN="--with-sanitizer=address"
fi
-if [[ $JOB_NAME == *"code-coverage" ]]; then
+if [[ $JOB_NAME == *code-coverage ]]; then
COVERAGE="--with-coverage"
fi
set -x
-if [[ $JOB_NAME != *"code-coverage" && $JOB_NAME != *"limited-build" ]]; then
+if [[ $JOB_NAME != *code-coverage && $JOB_NAME != *limited-build ]]; then
# Build in release mode with tests
./waf --color=yes configure --with-tests
./waf --color=yes build
@@ -30,7 +30,5 @@
./waf --color=yes configure --debug --with-tests $ASAN $COVERAGE
./waf --color=yes build
-# (tests will be run against the debug version)
-
# Install
sudo ./waf --color=yes install
diff --git a/.jenkins.d/30-coverage.sh b/.jenkins.d/30-coverage.sh
index eb100d3..decff99 100755
--- a/.jenkins.d/30-coverage.sh
+++ b/.jenkins.d/30-coverage.sh
@@ -1,30 +1,36 @@
#!/usr/bin/env bash
-set -exo pipefail
+set -eo pipefail
-if [[ $JOB_NAME == *"code-coverage" ]]; then
- # Generate an XML report (Cobertura format) and a detailed HTML report using gcovr
- # Note: trailing slashes are important in the paths below. Do not remove them!
- gcovr --object-directory build \
- --filter src/ \
- --exclude-throw-branches \
- --exclude-unreachable-branches \
- --cobertura build/coverage.xml \
- --html-details build/gcovr/ \
- --print-summary
+[[ $JOB_NAME == *code-coverage ]] || exit 0
- # Generate a detailed HTML report using lcov
- lcov --quiet \
- --capture \
- --directory . \
- --exclude "$PWD/tests/*" \
- --no-external \
- --rc lcov_branch_coverage=1 \
- --output-file build/coverage.info
+export FORCE_COLOR=1
+export UV_NO_MANAGED_PYTHON=1
- genhtml --branch-coverage \
- --demangle-cpp \
- --legend \
- --output-directory build/lcov \
- --title "NDNS unit tests" \
- build/coverage.info
-fi
+set -x
+
+# Generate an XML report (Cobertura format) and a detailed HTML report using gcovr
+# Note: trailing slashes are important in the paths below. Do not remove them!
+uvx gcovr@5.2 \
+ --object-directory build \
+ --filter src/ \
+ --exclude-throw-branches \
+ --exclude-unreachable-branches \
+ --cobertura build/coverage.xml \
+ --html-details build/gcovr/ \
+ --print-summary
+
+# Generate a detailed HTML report using lcov
+lcov --quiet \
+ --capture \
+ --directory . \
+ --exclude "$PWD/tests/*" \
+ --no-external \
+ --rc lcov_branch_coverage=1 \
+ --output-file build/coverage.info
+
+genhtml --branch-coverage \
+ --demangle-cpp \
+ --legend \
+ --output-directory build/lcov \
+ --title "NDNS unit tests" \
+ build/coverage.info
diff --git a/.waf-tools/default-compiler-flags.py b/.waf-tools/default-compiler-flags.py
index 681913f..c8ab164 100644
--- a/.waf-tools/default-compiler-flags.py
+++ b/.waf-tools/default-compiler-flags.py
@@ -11,29 +11,32 @@
conf.start_msg('Checking C++ compiler version')
cxx = conf.env.CXX_NAME # generic name of the compiler
- ccver = tuple(int(i) for i in conf.env.CC_VERSION)
+ ccver = get_compiler_ver(conf)
ccverstr = '.'.join(conf.env.CC_VERSION)
errmsg = ''
warnmsg = ''
if cxx == 'gcc':
- if ccver < (7, 4, 0):
+ if ccver < (9, 1, 0):
errmsg = ('The version of gcc you are using is too old.\n'
- 'The minimum supported gcc version is 9.3.')
- elif ccver < (9, 3, 0):
- warnmsg = ('Using a version of gcc older than 9.3 is not '
+ 'The minimum supported gcc version is 10.2.')
+ elif ccver < (10, 2, 0):
+ warnmsg = ('Using a version of gcc older than 10.2 is not '
'officially supported and may result in build failures.')
conf.flags = GccFlags()
elif cxx == 'clang':
if Utils.unversioned_sys_platform() == 'darwin':
- if ccver < (10, 0, 0):
+ if ccver < (11, 0, 0):
errmsg = ('The version of Xcode you are using is too old.\n'
- 'The minimum supported Xcode version is 12.4.')
- elif ccver < (12, 0, 0):
- warnmsg = ('Using a version of Xcode older than 12.4 is not '
+ 'The minimum supported Xcode version is 13.0.')
+ elif ccver < (13, 0, 0):
+ warnmsg = ('Using a version of Xcode older than 13.0 is not '
'officially supported and may result in build failures.')
elif ccver < (7, 0, 0):
errmsg = ('The version of clang you are using is too old.\n'
- 'The minimum supported clang version is 7.0.')
+ 'The minimum supported clang version is 10.0.')
+ elif ccver < (10, 0, 0):
+ warnmsg = ('Using a version of clang older than 10.0 is not '
+ 'officially supported and may result in build failures.')
conf.flags = ClangFlags()
else:
warnmsg = f'{cxx} compiler is unsupported'
@@ -57,6 +60,10 @@
conf.env.DEFINES += generalFlags['DEFINES']
+def get_compiler_ver(conf):
+ return tuple(int(i) for i in conf.env.CC_VERSION)
+
+
@Configure.conf
def check_compiler_flags(conf):
# Debug or optimized CXXFLAGS and LINKFLAGS are applied only if the
@@ -121,9 +128,6 @@
class CompilerFlags:
- def getCompilerVersion(self, conf):
- return tuple(int(i) for i in conf.env.CC_VERSION)
-
def getGeneralFlags(self, conf):
"""Get dict of CXXFLAGS, LINKFLAGS, and DEFINES that are always needed"""
return {'CXXFLAGS': [], 'LINKFLAGS': [], 'DEFINES': []}
@@ -223,7 +227,7 @@
elif Utils.unversioned_sys_platform() == 'freebsd':
# Bug #4790
flags['CXXFLAGS'] += [['-isystem', '/usr/local/include']]
- if self.getCompilerVersion(conf) >= (18, 0, 0):
+ if get_compiler_ver(conf) >= (18, 0, 0) and get_compiler_ver(conf) < (20, 1, 0):
# Bug #5300
flags['CXXFLAGS'] += ['-Wno-enum-constexpr-conversion']
return flags
@@ -237,10 +241,10 @@
flags = super().getDebugFlags(conf)
flags['CXXFLAGS'] += self.__cxxFlags
# Enable assertions in libc++
- if self.getCompilerVersion(conf) >= (18, 0, 0):
+ if get_compiler_ver(conf) >= (18, 0, 0):
# https://libcxx.llvm.org/Hardening.html
flags['DEFINES'] += ['_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_EXTENSIVE']
- elif self.getCompilerVersion(conf) >= (15, 0, 0):
+ elif get_compiler_ver(conf) >= (15, 0, 0):
# https://releases.llvm.org/15.0.0/projects/libcxx/docs/UsingLibcxx.html#enabling-the-safe-libc-mode
flags['DEFINES'] += ['_LIBCPP_ENABLE_ASSERTIONS=1']
# Tell libc++ to avoid including transitive headers
diff --git a/.waf-tools/sphinx.py b/.waf-tools/sphinx.py
new file mode 100644
index 0000000..7fccd64
--- /dev/null
+++ b/.waf-tools/sphinx.py
@@ -0,0 +1,77 @@
+# inspired by code by Hans-Martin von Gaudecker, 2012
+
+"""Support for Sphinx documentation"""
+
+import os
+from waflib import Task, TaskGen
+
+
+class sphinx_build(Task.Task):
+ color = 'BLUE'
+ run_str = '${SPHINX_BUILD} -q -b ${BUILDERNAME} -D ${VERSION} -D ${RELEASE} -d ${DOCTREEDIR} ${SRCDIR} ${OUTDIR}'
+
+ def keyword(self):
+ return f'Processing ({self.env.BUILDERNAME})'
+
+
+# from https://docs.python.org/3.12/whatsnew/3.12.html#imp
+def load_source(modname, filename):
+ import importlib.util
+ from importlib.machinery import SourceFileLoader
+ loader = SourceFileLoader(modname, filename)
+ spec = importlib.util.spec_from_file_location(modname, filename, loader=loader)
+ module = importlib.util.module_from_spec(spec)
+ loader.exec_module(module)
+ return module
+
+
+@TaskGen.feature('sphinx')
+@TaskGen.before_method('process_source')
+def process_sphinx(self):
+ """Set up the task generator with a Sphinx instance and create a task."""
+
+ conf = self.path.find_node(self.config)
+ if not conf:
+ self.bld.fatal(f'Sphinx configuration file {repr(self.config)} not found')
+
+ inputs = [conf] + self.to_nodes(self.source)
+ task = self.create_task('sphinx_build', inputs, always_run=getattr(self, 'always', False))
+
+ confdir = conf.parent.abspath()
+ buildername = getattr(self, 'builder', 'html')
+ srcdir = getattr(self, 'srcdir', confdir)
+ outdir = self.path.find_or_declare(getattr(self, 'outdir', buildername)).get_bld()
+ doctreedir = getattr(self, 'doctreedir', os.path.join(outdir.abspath(), '.doctrees'))
+ release = getattr(self, 'release', self.version)
+
+ task.env['BUILDERNAME'] = buildername
+ task.env['SRCDIR'] = srcdir
+ task.env['OUTDIR'] = outdir.abspath()
+ task.env['DOCTREEDIR'] = doctreedir
+ task.env['VERSION'] = f'version={self.version}'
+ task.env['RELEASE'] = f'release={release}'
+
+ if buildername == 'man':
+ confdata = load_source('sphinx_conf', conf.abspath())
+ for i in confdata.man_pages:
+ target = outdir.find_or_declare(f'{i[1]}.{i[4]}')
+ task.outputs.append(target)
+ if self.install_path:
+ self.bld.install_files(f'{self.install_path}/man{i[4]}/', target)
+ else:
+ task.outputs.append(outdir)
+
+ # prevent process_source from complaining that there is no extension mapping for .rst files
+ self.source = []
+
+
+def configure(conf):
+ """Check if sphinx-build program is available."""
+ conf.find_program('sphinx-build', var='SPHINX_BUILD', mandatory=False)
+
+
+# sphinx command
+from waflib.Build import BuildContext
+class sphinx(BuildContext):
+ cmd = 'sphinx'
+ fun = 'sphinx'
diff --git a/.waf-tools/sphinx_build.py b/.waf-tools/sphinx_build.py
deleted file mode 100644
index 8585352..0000000
--- a/.waf-tools/sphinx_build.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# inspired by code by Hans-Martin von Gaudecker, 2012
-
-import os
-from waflib import Node, Task, TaskGen, Errors, Logs, Build, Utils
-
-class sphinx_build(Task.Task):
- color = 'BLUE'
- run_str = '${SPHINX_BUILD} -D ${VERSION} -D ${RELEASE} -q -b ${BUILDERNAME} -d ${DOCTREEDIR} ${SRCDIR} ${OUTDIR}'
-
- def __str__(self):
- env = self.env
- src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs])
- tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
- if self.outputs: sep = ' -> '
- else: sep = ''
- return'%s [%s]: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),
- self.env['BUILDERNAME'], src_str, sep, tgt_str)
-
-@TaskGen.extension('.py', '.rst')
-def sig_hook(self, node):
- node.sig=Utils.h_file(node.abspath())
-
-@TaskGen.feature("sphinx")
-@TaskGen.before_method("process_source")
-def apply_sphinx(self):
- """Set up the task generator with a Sphinx instance and create a task."""
-
- inputs = []
- for i in Utils.to_list(self.source):
- if not isinstance(i, Node.Node):
- node = self.path.find_node(node)
- else:
- node = i
- if not node:
- raise ValueError('[%s] file not found' % i)
- inputs.append(node)
-
- task = self.create_task('sphinx_build', inputs)
-
- conf = self.path.find_node(self.config)
- task.inputs.append(conf)
-
- confdir = conf.parent.abspath()
- buildername = getattr(self, 'builder', 'html')
- srcdir = getattr(self, 'srcdir', confdir)
- outdir = self.path.find_or_declare(getattr(self, 'outdir', buildername)).get_bld()
- doctreedir = getattr(self, 'doctreedir', os.path.join(outdir.abspath(), '.doctrees'))
-
- task.env['BUILDERNAME'] = buildername
- task.env['SRCDIR'] = srcdir
- task.env['DOCTREEDIR'] = doctreedir
- task.env['OUTDIR'] = outdir.abspath()
- task.env['VERSION'] = 'version=%s' % self.version
- task.env['RELEASE'] = 'release=%s' % getattr(self, 'release', self.version)
-
- import imp
- confData = imp.load_source('sphinx_conf', conf.abspath())
-
- if buildername == 'man':
- for i in confData.man_pages:
- target = outdir.find_or_declare('%s.%d' % (i[1], i[4]))
- task.outputs.append(target)
-
- if self.install_path:
- self.bld.install_files('%s/man%d/' % (self.install_path, i[4]), target)
- else:
- task.outputs.append(outdir)
-
-def configure(conf):
- conf.find_program('sphinx-build', var='SPHINX_BUILD', mandatory=False)
-
-# sphinx docs
-from waflib.Build import BuildContext
-class sphinx(BuildContext):
- cmd = "sphinx"
- fun = "sphinx"
diff --git a/README.md b/README.md
index f44c178..c4c053f 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,8 @@
# NDNS: Domain Name Service for Named Data Networking
+
[](https://github.com/named-data/ndns/actions/workflows/ci.yml)
[](https://github.com/named-data/ndns/actions/workflows/docs.yml)
-
**NDNS** is a completely distributed database system that largely mimics the structure of
the DNS system in today's Internet but operates within the NDN architecture. Although the
diff --git a/docs/INSTALL.rst b/docs/INSTALL.rst
index b2a5a09..22e20d4 100644
--- a/docs/INSTALL.rst
+++ b/docs/INSTALL.rst
@@ -12,7 +12,7 @@
- doxygen
- graphviz
-- sphinx >= 4.0
+- sphinx
- sphinxcontrib-doxylink
Build
diff --git a/docs/conf.py b/docs/conf.py
index d1a6386..5fcfc13 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -10,7 +10,7 @@
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
project = 'NDNS: Domain Name Service for Named Data Networking'
-copyright = 'Copyright © 2014-2023 Named Data Networking Project.'
+copyright = 'Copyright © 2014-2025 Named Data Networking Project.'
author = 'Named Data Networking Project'
# The short X.Y version.
diff --git a/docs/doxygen.conf.in b/docs/doxygen.conf.in
index 78e40a2..f8fa0c3 100644
--- a/docs/doxygen.conf.in
+++ b/docs/doxygen.conf.in
@@ -1143,15 +1143,6 @@
HTML_COLORSTYLE_GAMMA = 91
-# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
-# page will contain the date and time when the page was generated. Setting this
-# to YES can help to show when doxygen was last run and thus if the
-# documentation is up to date.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_TIMESTAMP = NO
-
# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
# documentation will contain sections that can be hidden and shown after the
# page has loaded.
@@ -1422,17 +1413,6 @@
FORMULA_FONTSIZE = 10
-# Use the FORMULA_TRANPARENT tag to determine whether or not the images
-# generated for formulas are transparent PNGs. Transparent PNGs are not
-# supported properly for IE 6.0, but are supported on all modern browsers.
-#
-# Note that when changing this option you need to delete any form_*.png files in
-# the HTML output directory before the changes have effect.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_TRANSPARENT = YES
-
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
# http://www.mathjax.org) which uses client side Javascript for the rendering
# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
@@ -1719,14 +1699,6 @@
LATEX_BIB_STYLE = plain
-# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated
-# page will contain the date and time when the page was generated. Setting this
-# to NO can help when comparing the output of multiple runs.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_TIMESTAMP = NO
-
#---------------------------------------------------------------------------
# Configuration options related to the RTF output
#---------------------------------------------------------------------------
@@ -2057,15 +2029,6 @@
# Configuration options related to the dot tool
#---------------------------------------------------------------------------
-# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram
-# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
-# NO turns the diagrams off. Note that this option also works with HAVE_DOT
-# disabled, but it is recommended to install and use dot, since it yields more
-# powerful graphs.
-# The default value is: YES.
-
-CLASS_DIAGRAMS = YES
-
# You can include diagrams made with dia in doxygen documentation. Doxygen will
# then run dia to produce the diagram and insert it in the documentation. The
# DIA_PATH tag allows you to specify the directory where the dia binary resides.
@@ -2098,23 +2061,6 @@
DOT_NUM_THREADS = 0
-# When you want a differently looking font in the dot files that doxygen
-# generates you can specify the font name using DOT_FONTNAME. You need to make
-# sure dot is able to find the font, which can be done by putting it in a
-# standard location or by setting the DOTFONTPATH environment variable or by
-# setting DOT_FONTPATH to the directory containing the font.
-# The default value is: Helvetica.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTNAME = Helvetica
-
-# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
-# dot graphs.
-# Minimum value: 4, maximum value: 24, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTSIZE = 10
-
# By default doxygen will tell dot to use the default font as specified with
# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
# the path where dot can find it using this tag.
diff --git a/docs/requirements.txt b/docs/requirements.txt
new file mode 100644
index 0000000..1348feb
--- /dev/null
+++ b/docs/requirements.txt
@@ -0,0 +1,3 @@
+docutils>=0.20
+sphinx>=7.0.1,<9
+sphinxcontrib-doxylink~=1.13
diff --git a/wscript b/wscript
index 86973be..56ee0c3 100644
--- a/wscript
+++ b/wscript
@@ -12,7 +12,7 @@
opt.load(['compiler_cxx', 'gnu_dirs'])
opt.load(['default-compiler-flags',
'coverage', 'sanitizers', 'boost', 'sqlite3',
- 'doxygen', 'sphinx_build'],
+ 'doxygen', 'sphinx'],
tooldir=['.waf-tools'])
optgrp = opt.add_option_group('NDNS Options')
@@ -22,7 +22,7 @@
def configure(conf):
conf.load(['compiler_cxx', 'gnu_dirs',
'default-compiler-flags', 'boost', 'sqlite3',
- 'doxygen', 'sphinx_build'])
+ 'doxygen', 'sphinx'])
conf.env.WITH_TESTS = conf.options.with_tests
@@ -52,8 +52,8 @@
conf.load('sanitizers')
conf.define_cond('HAVE_TESTS', conf.env.WITH_TESTS)
- conf.define('CONFDIR', '%s/ndn/ndns' % conf.env.SYSCONFDIR)
- conf.define('DEFAULT_DBFILE', '%s/lib/ndn/ndns/ndns.db' % conf.env.LOCALSTATEDIR)
+ conf.define('CONFDIR', f'{conf.env.SYSCONFDIR}/ndn/ndns')
+ conf.define('DEFAULT_DBFILE', f'{conf.env.LOCALSTATEDIR}/lib/ndn/ndns/ndns.db')
conf.write_config_header('src/config.hpp', define_prefix='NDNS_')
def build(bld):
@@ -81,9 +81,10 @@
includes='src',
export_includes='src')
- bld.recurse('tools')
bld.recurse('tests')
+ bld.recurse('tools')
+ # Install sample configs
bld(features='subst',
name='conf-samples',
source=['validator.conf.sample.in', 'ndns.conf.sample.in'],
@@ -159,43 +160,43 @@
Context.g_module.VERSION_SPLIT = VERSION_BASE.split('.')
# first, try to get a version string from git
- gotVersionFromGit = False
+ version_from_git = ''
try:
- cmd = ['git', 'describe', '--always', '--match', f'{GIT_TAG_PREFIX}*']
- out = subprocess.run(cmd, capture_output=True, check=True, text=True).stdout.strip()
- if out:
- gotVersionFromGit = True
- if out.startswith(GIT_TAG_PREFIX):
- Context.g_module.VERSION = out.lstrip(GIT_TAG_PREFIX)
+ cmd = ['git', 'describe', '--abbrev=8', '--always', '--match', f'{GIT_TAG_PREFIX}*']
+ version_from_git = subprocess.run(cmd, capture_output=True, check=True, text=True).stdout.strip()
+ if version_from_git:
+ if GIT_TAG_PREFIX and version_from_git.startswith(GIT_TAG_PREFIX):
+ Context.g_module.VERSION = version_from_git[len(GIT_TAG_PREFIX):]
+ elif not GIT_TAG_PREFIX and ('.' in version_from_git or '-' in version_from_git):
+ Context.g_module.VERSION = version_from_git
else:
- # no tags matched
- Context.g_module.VERSION = f'{VERSION_BASE}-commit-{out}'
+ # no tags matched (or we are in a shallow clone)
+ Context.g_module.VERSION = f'{VERSION_BASE}+git.{version_from_git}'
except (OSError, subprocess.SubprocessError):
pass
- versionFile = ctx.path.find_node('VERSION.info')
- if not gotVersionFromGit and versionFile is not None:
+ # fallback to the VERSION.info file, if it exists and is not empty
+ version_from_file = ''
+ version_file = ctx.path.find_node('VERSION.info')
+ if version_file is not None:
try:
- Context.g_module.VERSION = versionFile.read()
- return
- except EnvironmentError:
- pass
+ version_from_file = version_file.read().strip()
+ except OSError as e:
+ Logs.warn(f'{e.filename} exists but is not readable ({e.strerror})')
+ if version_from_file and not version_from_git:
+ Context.g_module.VERSION = version_from_file
+ return
- # version was obtained from git, update VERSION file if necessary
- if versionFile is not None:
- try:
- if versionFile.read() == Context.g_module.VERSION:
- # already up-to-date
- return
- except EnvironmentError as e:
- Logs.warn(f'{versionFile} exists but is not readable ({e.strerror})')
- else:
- versionFile = ctx.path.make_node('VERSION.info')
-
+ # update VERSION.info if necessary
+ if version_from_file == Context.g_module.VERSION:
+ # already up-to-date
+ return
+ if version_file is None:
+ version_file = ctx.path.make_node('VERSION.info')
try:
- versionFile.write(Context.g_module.VERSION)
- except EnvironmentError as e:
- Logs.warn(f'{versionFile} is not writable ({e.strerror})')
+ version_file.write(Context.g_module.VERSION)
+ except OSError as e:
+ Logs.warn(f'{e.filename} is not writable ({e.strerror})')
def dist(ctx):
ctx.algo = 'tar.xz'