build: Add build system
Change-Id: I073a9213d13c50451be5938e4e15c4a41a1166aa
diff --git a/.waf-tools/boost.py b/.waf-tools/boost.py
new file mode 100644
index 0000000..305945a
--- /dev/null
+++ b/.waf-tools/boost.py
@@ -0,0 +1,378 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# partially based on boost.py written by Gernot Vormayr
+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
+# modified by Bjoern Michaelsen, 2008
+# modified by Luca Fossati, 2008
+# rewritten for waf 1.5.1, Thomas Nagy, 2008
+# rewritten for waf 1.6.2, Sylvain Rouquette, 2011
+
+'''
+
+This is an extra tool, not bundled with the default waf binary.
+To add the boost tool to the waf file:
+$ ./waf-light --tools=compat15,boost
+ or, if you have waf >= 1.6.2
+$ ./waf update --files=boost
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('compiler_cxx boost')
+
+ def configure(conf):
+ conf.load('compiler_cxx boost')
+ conf.check_boost(lib='system filesystem')
+
+ def build(bld):
+ bld(source='main.cpp', target='app', use='BOOST')
+
+Options are generated, in order to specify the location of boost includes/libraries.
+The `check_boost` configuration function allows to specify the used boost libraries.
+It can also provide default arguments to the --boost-static and --boost-mt command-line arguments.
+Everything will be packaged together in a BOOST component that you can use.
+
+When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
+ - you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
+ Errors: C4530
+ - boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
+ So before calling `conf.check_boost` you might want to disabling by adding:
+ conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
+ Errors:
+ - boost might also be compiled with /MT, which links the runtime statically.
+ If you have problems with redefined symbols,
+ self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+ self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
+Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.
+
+'''
+
+import sys
+import re
+from waflib import Utils, Logs, Errors
+from waflib.Configure import conf
+
+BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib', '/usr/lib/x86_64-linux-gnu', '/usr/lib/i386-linux-gnu', '/usr/local/ndn/lib']
+BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include', '/usr/local/ndn/include']
+BOOST_VERSION_FILE = 'boost/version.hpp'
+BOOST_VERSION_CODE = '''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; }
+'''
+BOOST_SYSTEM_CODE = '''
+#include <boost/system/error_code.hpp>
+int main() { boost::system::error_code c; }
+'''
+BOOST_THREAD_CODE = '''
+#include <boost/thread.hpp>
+int main() { boost::thread t; }
+'''
+
+# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
+PLATFORM = Utils.unversioned_sys_platform()
+detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
+detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
+detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
+BOOST_TOOLSETS = {
+ 'borland': 'bcb',
+ 'clang': detect_clang,
+ 'como': 'como',
+ 'cw': 'cw',
+ 'darwin': 'xgcc',
+ 'edg': 'edg',
+ 'g++': detect_mingw,
+ 'gcc': detect_mingw,
+ 'icpc': detect_intel,
+ 'intel': detect_intel,
+ 'kcc': 'kcc',
+ 'kylix': 'bck',
+ 'mipspro': 'mp',
+ 'mingw': 'mgw',
+ 'msvc': 'vc',
+ 'qcc': 'qcc',
+ 'sun': 'sw',
+ 'sunc++': 'sw',
+ 'tru64cxx': 'tru',
+ 'vacpp': 'xlc'
+}
+
+
+def options(opt):
+ opt = opt.add_option_group('Boost Options')
+
+ opt.add_option('--boost-includes', type='string',
+ default='', dest='boost_includes',
+ help='''path to the directory where the boost includes are, e.g., /path/to/boost_1_55_0/stage/include''')
+ opt.add_option('--boost-libs', type='string',
+ default='', dest='boost_libs',
+ help='''path to the directory where the boost libs are, e.g., /path/to/boost_1_55_0/stage/lib''')
+ opt.add_option('--boost-static', action='store_true',
+ default=False, dest='boost_static',
+ help='link with static boost libraries (.lib/.a)')
+ opt.add_option('--boost-mt', action='store_true',
+ default=False, dest='boost_mt',
+ help='select multi-threaded libraries')
+ opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
+ help='''select libraries with tags (dgsyp, d for debug), see doc Boost, Getting Started, chapter 6.1''')
+ opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
+ help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
+ opt.add_option('--boost-toolset', type='string',
+ default='', dest='boost_toolset',
+ help='force a toolset e.g. msvc, vc90, gcc, mingw, mgw45 (default: auto)')
+ py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
+ opt.add_option('--boost-python', type='string',
+ default=py_version, dest='boost_python',
+ help='select the lib python with this version (default: %s)' % py_version)
+
+
+@conf
+def __boost_get_version_file(self, d):
+ dnode = self.root.find_dir(d)
+ if dnode:
+ return dnode.find_node(BOOST_VERSION_FILE)
+ return None
+
+@conf
+def boost_get_version(self, d):
+ """silently retrieve the boost version number"""
+ node = self.__boost_get_version_file(d)
+ if node:
+ try:
+ txt = node.read()
+ except (OSError, IOError):
+ Logs.error("Could not read the file %r" % node.abspath())
+ else:
+ re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M)
+ m1 = re_but1.search(txt)
+
+ re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M)
+ m2 = re_but2.search(txt)
+
+ if m1 and m2:
+ return (m1.group(1), m2.group(1))
+
+ return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":")
+
+@conf
+def boost_get_includes(self, *k, **kw):
+ includes = k and k[0] or kw.get('includes', None)
+ if includes and self.__boost_get_version_file(includes):
+ return includes
+ for d in Utils.to_list(self.environ.get('INCLUDE', '')) + BOOST_INCLUDES:
+ if self.__boost_get_version_file(d):
+ return d
+ if includes:
+ self.end_msg('headers not found in %s' % includes)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
+ self.fatal('The configuration failed')
+
+
+@conf
+def boost_get_toolset(self, cc):
+ toolset = cc
+ if not cc:
+ build_platform = Utils.unversioned_sys_platform()
+ if build_platform in BOOST_TOOLSETS:
+ cc = build_platform
+ else:
+ cc = self.env.CXX_NAME
+ if cc in BOOST_TOOLSETS:
+ toolset = BOOST_TOOLSETS[cc]
+ return isinstance(toolset, str) and toolset or toolset(self.env)
+
+
+@conf
+def __boost_get_libs_path(self, *k, **kw):
+ ''' return the lib path and all the files in it '''
+ if 'files' in kw:
+ return self.root.find_dir('.'), Utils.to_list(kw['files'])
+ libs = k and k[0] or kw.get('libs', None)
+ if libs:
+ path = self.root.find_dir(libs)
+ files = path.ant_glob('*boost_*')
+ if not libs or not files:
+ for d in Utils.to_list(self.environ.get('LIB', [])) + BOOST_LIBS:
+ path = self.root.find_dir(d)
+ if path:
+ files = path.ant_glob('*boost_*')
+ if files:
+ break
+ path = self.root.find_dir(d + '64')
+ if path:
+ files = path.ant_glob('*boost_*')
+ if files:
+ break
+ if not path:
+ if libs:
+ self.end_msg('libs not found in %s' % libs)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
+ self.fatal('The configuration failed')
+
+ self.to_log('Found the boost path in %r with the libraries:' % path)
+ for x in files:
+ self.to_log(' %r' % x)
+ return path, files
+
+@conf
+def boost_get_libs(self, *k, **kw):
+ '''
+ return the lib path and the required libs
+ according to the parameters
+ '''
+ path, files = self.__boost_get_libs_path(**kw)
+ t = []
+ if kw.get('mt', False):
+ t.append('mt')
+ if kw.get('abi', None):
+ t.append(kw['abi'])
+ tags = t and '(-%s)+' % '-'.join(t) or ''
+ toolset = self.boost_get_toolset(kw.get('toolset', ''))
+ toolset_pat = '(-%s[0-9]{0,3})+' % toolset
+ version = '(-%s)+' % self.env.BOOST_VERSION
+
+ def find_lib(re_lib, files):
+ for file in files:
+ if re_lib.search(file.name):
+ self.to_log('Found boost lib %s' % file)
+ return file
+ return None
+
+ def format_lib_name(name):
+ if name.startswith('lib') and self.env.CC_NAME != 'msvc':
+ name = name[3:]
+ return name[:name.rfind('.')]
+
+ libs = []
+ for lib in Utils.to_list(k and k[0] or kw.get('lib', None)):
+ py = (lib == 'python') and '(-py%s)+' % kw['python'] or ''
+ # Trying libraries, from most strict match to least one
+ for pattern in ['boost_%s%s%s%s%s' % (lib, toolset_pat, tags, py, version),
+ 'boost_%s%s%s%s' % (lib, tags, py, version),
+ 'boost_%s%s%s' % (lib, tags, version),
+ # Give up trying to find the right version
+ 'boost_%s%s%s%s' % (lib, toolset_pat, tags, py),
+ 'boost_%s%s%s' % (lib, tags, py),
+ 'boost_%s%s' % (lib, tags)]:
+ self.to_log('Trying pattern %s' % pattern)
+ file = find_lib(re.compile(pattern), files)
+ if file:
+ libs.append(format_lib_name(file.name))
+ break
+ else:
+ self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
+ self.fatal('The configuration failed')
+
+ return path.abspath(), libs
+
+
+@conf
+def check_boost(self, *k, **kw):
+ """
+ Initialize boost libraries to be used.
+
+ Keywords: you can pass the same parameters as with the command line (without "--boost-").
+ Note that the command line has the priority, and should preferably be used.
+ """
+ if not self.env['CXX']:
+ self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
+
+ params = {'lib': k and k[0] or kw.get('lib', None)}
+ for key, value in self.options.__dict__.items():
+ if not key.startswith('boost_'):
+ continue
+ key = key[len('boost_'):]
+ params[key] = value and value or kw.get(key, '')
+
+ var = kw.get('uselib_store', 'BOOST')
+
+ self.start_msg('Checking boost includes')
+ self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
+ versions = self.boost_get_version(inc)
+ self.env.BOOST_VERSION = versions[0]
+ self.env.BOOST_VERSION_NUMBER = int(versions[1])
+ self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000,
+ int(versions[1]) / 100 % 1000,
+ int(versions[1]) % 100))
+ if Logs.verbose:
+ Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var])
+
+ if not params['lib']:
+ return
+ self.start_msg('Checking boost libs')
+ suffix = params.get('static', None) and 'ST' or ''
+ path, libs = self.boost_get_libs(**params)
+ self.env['%sLIBPATH_%s' % (suffix, var)] = [path]
+ self.env['%sLIB_%s' % (suffix, var)] = libs
+ self.end_msg('ok')
+ if Logs.verbose:
+ Logs.pprint('CYAN', ' path : %s' % path)
+ Logs.pprint('CYAN', ' libs : %s' % libs)
+
+
+ def try_link():
+ if 'system' in params['lib']:
+ self.check_cxx(
+ fragment=BOOST_SYSTEM_CODE,
+ use=var,
+ execute=False,
+ )
+ if 'thread' in params['lib']:
+ self.check_cxx(
+ fragment=BOOST_THREAD_CODE,
+ use=var,
+ execute=False,
+ )
+
+ if params.get('linkage_autodetect', False):
+ self.start_msg("Attempting to detect boost linkage flags")
+ toolset = self.boost_get_toolset(kw.get('toolset', ''))
+ if toolset in ['vc']:
+ # disable auto-linking feature, causing error LNK1181
+ # because the code wants to be linked against
+ self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+
+ # if no dlls are present, we guess the .lib files are not stubs
+ has_dlls = False
+ for x in Utils.listdir(path):
+ if x.endswith(self.env.cxxshlib_PATTERN % ''):
+ has_dlls = True
+ break
+ if not has_dlls:
+ self.env['STLIBPATH_%s' % var] = [path]
+ self.env['STLIB_%s' % var] = libs
+ del self.env['LIB_%s' % var]
+ del self.env['LIBPATH_%s' % var]
+
+ # we attempt to play with some known-to-work CXXFLAGS combinations
+ for cxxflags in (['/MD', '/EHsc'], []):
+ self.env.stash()
+ self.env["CXXFLAGS_%s" % var] += cxxflags
+ try:
+ try_link()
+ self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
+ e = None
+ break
+ except Errors.ConfigurationError as exc:
+ self.env.revert()
+ e = exc
+
+ if e is not None:
+ self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=e)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
+ self.fatal('The configuration failed')
+ else:
+ self.start_msg('Checking for boost linkage')
+ try:
+ try_link()
+ except Errors.ConfigurationError as e:
+ self.end_msg("Could not link against boost libraries using supplied options")
+ self.fatal('The configuration failed')
+ self.end_msg('ok')
diff --git a/.waf-tools/coverage.py b/.waf-tools/coverage.py
new file mode 100644
index 0000000..0a3db65
--- /dev/null
+++ b/.waf-tools/coverage.py
@@ -0,0 +1,24 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+#
+# Copyright (c) 2014, Regents of the University of California
+#
+# GPL 3.0 license, see the COPYING.md file for more information
+
+from waflib import TaskGen
+
+def options(opt):
+ opt.add_option('--with-coverage', action='store_true', default=False, dest='with_coverage',
+ help='''Set compiler flags for gcc to enable code coverage information''')
+
+def configure(conf):
+ if conf.options.with_coverage:
+ conf.check_cxx(cxxflags=['-fprofile-arcs', '-ftest-coverage', '-fPIC'],
+ linkflags=['-fprofile-arcs'], uselib_store='GCOV', mandatory=True)
+
+@TaskGen.feature('cxx','cc')
+@TaskGen.after('process_source')
+def add_coverage(self):
+ if getattr(self, 'use', ''):
+ self.use += ' GCOV'
+ else:
+ self.use = 'GCOV'
diff --git a/.waf-tools/default-compiler-flags.py b/.waf-tools/default-compiler-flags.py
new file mode 100644
index 0000000..97921e2
--- /dev/null
+++ b/.waf-tools/default-compiler-flags.py
@@ -0,0 +1,62 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+#
+# Copyright (c) 2014, Regents of the University of California
+#
+# GPL 3.0 license, see the COPYING.md file for more information
+
+from waflib import Logs, Configure
+
+def options(opt):
+ opt.add_option('--debug', '--with-debug', action='store_true', default=False, dest='debug',
+ help='''Compile in debugging mode without all optimizations (-O0)''')
+ opt.add_option('--with-c++11', action='store_true', default=False, dest='use_cxx11',
+ help='''Enable C++11 mode (experimental, may not work)''')
+
+def configure(conf):
+ areCustomCxxflagsPresent = (len(conf.env.CXXFLAGS) > 0)
+ defaultFlags = []
+
+ if conf.options.use_cxx11:
+ defaultFlags += ['-std=c++0x', '-std=c++11']
+ else:
+ defaultFlags += ['-std=c++03', '-Wno-variadic-macros', '-Wno-c99-extensions']
+
+ defaultFlags += ['-pedantic', '-Wall', '-Wno-long-long', '-Wno-unneeded-internal-declaration']
+
+ if conf.options.debug:
+ conf.define('_DEBUG', 1)
+ defaultFlags += ['-O0',
+ '-Og', # gcc >= 4.8
+ '-g3',
+ '-fcolor-diagnostics', # clang
+ '-fdiagnostics-color', # gcc >= 4.9
+ '-Werror',
+ '-Wno-error=maybe-uninitialized', # Bug #1560
+ ]
+ if areCustomCxxflagsPresent:
+ missingFlags = [x for x in defaultFlags if x not in conf.env.CXXFLAGS]
+ if len(missingFlags) > 0:
+ Logs.warn("Selected debug mode, but CXXFLAGS is set to a custom value '%s'"
+ % " ".join(conf.env.CXXFLAGS))
+ Logs.warn("Default flags '%s' are not activated" % " ".join(missingFlags))
+ else:
+ conf.add_supported_cxxflags(defaultFlags)
+ else:
+ defaultFlags += ['-O2', '-g']
+ if not areCustomCxxflagsPresent:
+ conf.add_supported_cxxflags(defaultFlags)
+
+@Configure.conf
+def add_supported_cxxflags(self, cxxflags):
+ """
+ Check which cxxflags are supported by compiler and add them to env.CXXFLAGS variable
+ """
+ self.start_msg('Checking allowed flags for c++ compiler')
+
+ supportedFlags = []
+ for flag in cxxflags:
+ if self.check_cxx(cxxflags=['-Werror', flag], mandatory=False):
+ supportedFlags += [flag]
+
+ self.end_msg(' '.join(supportedFlags))
+ self.env.CXXFLAGS = supportedFlags + self.env.CXXFLAGS
diff --git a/.waf-tools/dependency-checker.py b/.waf-tools/dependency-checker.py
new file mode 100644
index 0000000..629fbfd
--- /dev/null
+++ b/.waf-tools/dependency-checker.py
@@ -0,0 +1,28 @@
+# encoding: utf-8
+
+from waflib import Options, Logs
+from waflib.Configure import conf
+
+def addDependencyOptions(self, opt, name, extraHelp=''):
+ opt.add_option('--with-%s' % name, type='string', default=None,
+ dest='with_%s' % name,
+ help='Path to %s, e.g., /usr/local %s' % (name, extraHelp))
+setattr(Options.OptionsContext, "addDependencyOptions", addDependencyOptions)
+
+@conf
+def checkDependency(self, name, **kw):
+ root = kw.get('path', getattr(Options.options, 'with_%s' % name))
+ kw['msg'] = kw.get('msg', 'Checking for %s library' % name)
+ kw['uselib_store'] = kw.get('uselib_store', name.upper())
+ kw['define_name'] = kw.get('define_name', 'HAVE_%s' % kw['uselib_store'])
+ kw['mandatory'] = kw.get('mandatory', True)
+
+ if root:
+ isOk = self.check_cxx(includes="%s/include" % root,
+ libpath="%s/lib" % root,
+ **kw)
+ else:
+ isOk = self.check_cxx(**kw)
+
+ if isOk:
+ self.env[kw['define_name']] = True
diff --git a/.waf-tools/doxygen.py b/.waf-tools/doxygen.py
new file mode 100644
index 0000000..ac8c70b
--- /dev/null
+++ b/.waf-tools/doxygen.py
@@ -0,0 +1,214 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy 2008-2010 (ita)
+
+"""
+
+Doxygen support
+
+Variables passed to bld():
+* doxyfile -- the Doxyfile to use
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('doxygen')
+
+ def configure(conf):
+ conf.load('doxygen')
+ # check conf.env.DOXYGEN, if it is mandatory
+
+ def build(bld):
+ if bld.env.DOXYGEN:
+ bld(features="doxygen", doxyfile='Doxyfile', ...)
+
+ def doxygen(bld):
+ if bld.env.DOXYGEN:
+ bld(features="doxygen", doxyfile='Doxyfile', ...)
+"""
+
+from fnmatch import fnmatchcase
+import os, os.path, re, stat
+from waflib import Task, Utils, Node, Logs, Errors, Build
+from waflib.TaskGen import feature
+
+DOXY_STR = '"${DOXYGEN}" - '
+DOXY_FMTS = 'html latex man rft xml'.split()
+DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
+c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
+inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
+'''.split())
+
+re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
+re_nl = re.compile('\r*\n', re.M)
+def parse_doxy(txt):
+ tbl = {}
+ txt = re_rl.sub('', txt)
+ lines = re_nl.split(txt)
+ for x in lines:
+ x = x.strip()
+ if not x or x.startswith('#') or x.find('=') < 0:
+ continue
+ if x.find('+=') >= 0:
+ tmp = x.split('+=')
+ key = tmp[0].strip()
+ if key in tbl:
+ tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
+ else:
+ tbl[key] = '+='.join(tmp[1:]).strip()
+ else:
+ tmp = x.split('=')
+ tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
+ return tbl
+
+class doxygen(Task.Task):
+ vars = ['DOXYGEN', 'DOXYFLAGS']
+ color = 'BLUE'
+
+ def runnable_status(self):
+ '''
+ self.pars are populated in runnable_status - because this function is being
+ run *before* both self.pars "consumers" - scan() and run()
+
+ set output_dir (node) for the output
+ '''
+
+ for x in self.run_after:
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'pars', None):
+ txt = self.inputs[0].read()
+ self.pars = parse_doxy(txt)
+ if not self.pars.get('OUTPUT_DIRECTORY'):
+ self.pars['OUTPUT_DIRECTORY'] = self.inputs[0].parent.get_bld().abspath()
+
+ # Override with any parameters passed to the task generator
+ if getattr(self.generator, 'pars', None):
+ for k, v in self.generator.pars.iteritems():
+ self.pars[k] = v
+
+ self.doxy_inputs = getattr(self, 'doxy_inputs', [])
+ if not self.pars.get('INPUT'):
+ self.doxy_inputs.append(self.inputs[0].parent)
+ else:
+ for i in self.pars.get('INPUT').split():
+ if os.path.isabs(i):
+ node = self.generator.bld.root.find_node(i)
+ else:
+ node = self.generator.path.find_node(i)
+ if not node:
+ self.generator.bld.fatal('Could not find the doxygen input %r' % i)
+ self.doxy_inputs.append(node)
+
+ if not getattr(self, 'output_dir', None):
+ bld = self.generator.bld
+ # First try to find an absolute path, then find or declare a relative path
+ self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
+ if not self.output_dir:
+ self.output_dir = bld.path.find_or_declare(self.pars['OUTPUT_DIRECTORY'])
+
+ self.signature()
+ return Task.Task.runnable_status(self)
+
+ def scan(self):
+ exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
+ file_patterns = self.pars.get('FILE_PATTERNS','').split()
+ if not file_patterns:
+ file_patterns = DOXY_FILE_PATTERNS
+ if self.pars.get('RECURSIVE') == 'YES':
+ file_patterns = ["**/%s" % pattern for pattern in file_patterns]
+ nodes = []
+ names = []
+ for node in self.doxy_inputs:
+ if os.path.isdir(node.abspath()):
+ for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
+ nodes.append(m)
+ else:
+ nodes.append(node)
+ return (nodes, names)
+
+ def run(self):
+ dct = self.pars.copy()
+ dct['INPUT'] = ' '.join(['"%s"' % x.abspath() for x in self.doxy_inputs])
+ code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
+ code = code.encode() # for python 3
+ #fmt = DOXY_STR % (self.inputs[0].parent.abspath())
+ cmd = Utils.subst_vars(DOXY_STR, self.env)
+ env = self.env.env or None
+ proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.generator.bld.path.get_bld().abspath())
+ proc.communicate(code)
+ return proc.returncode
+
+ def post_run(self):
+ nodes = self.output_dir.ant_glob('**/*', quiet=True)
+ for x in nodes:
+ x.sig = Utils.h_file(x.abspath())
+ self.outputs += nodes
+ return Task.Task.post_run(self)
+
+class tar(Task.Task):
+ "quick tar creation"
+ run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
+ color = 'RED'
+ after = ['doxygen']
+ def runnable_status(self):
+ for x in getattr(self, 'input_tasks', []):
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'tar_done_adding', None):
+ # execute this only once
+ self.tar_done_adding = True
+ for x in getattr(self, 'input_tasks', []):
+ self.set_inputs(x.outputs)
+ if not self.inputs:
+ return Task.SKIP_ME
+ return Task.Task.runnable_status(self)
+
+ def __str__(self):
+ tgt_str = ' '.join([a.nice_path(self.env) for a in self.outputs])
+ return '%s: %s\n' % (self.__class__.__name__, tgt_str)
+
+@feature('doxygen')
+def process_doxy(self):
+ if not getattr(self, 'doxyfile', None):
+ self.generator.bld.fatal('no doxyfile??')
+
+ node = self.doxyfile
+ if not isinstance(node, Node.Node):
+ node = self.path.find_resource(node)
+ if not node:
+ raise ValueError('doxygen file not found')
+
+ # the task instance
+ dsk = self.create_task('doxygen', node)
+
+ if getattr(self, 'doxy_tar', None):
+ tsk = self.create_task('tar')
+ tsk.input_tasks = [dsk]
+ tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
+ if self.doxy_tar.endswith('bz2'):
+ tsk.env['TAROPTS'] = ['cjf']
+ elif self.doxy_tar.endswith('gz'):
+ tsk.env['TAROPTS'] = ['czf']
+ else:
+ tsk.env['TAROPTS'] = ['cf']
+
+def configure(conf):
+ '''
+ Check if doxygen and tar commands are present in the system
+
+ If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
+ variables will be set. Detection can be controlled by setting DOXYGEN and
+ TAR environmental variables.
+ '''
+
+ conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
+ conf.find_program('tar', var='TAR', mandatory=False)
+
+# doxygen docs
+from waflib.Build import BuildContext
+class doxy(BuildContext):
+ cmd = "doxygen"
+ fun = "doxygen"
diff --git a/.waf-tools/pch.py b/.waf-tools/pch.py
new file mode 100644
index 0000000..08cc5de
--- /dev/null
+++ b/.waf-tools/pch.py
@@ -0,0 +1,148 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# Alexander Afanasyev (UCLA), 2014
+
+"""
+Enable precompiled C++ header support (currently only clang++ and g++ are supported)
+
+To use this tool, wscript should look like:
+
+ def options(opt):
+ opt.load('pch')
+ # This will add `--with-pch` configure option.
+ # Unless --with-pch during configure stage specified, the precompiled header support is disabled
+
+ def configure(conf):
+ conf.load('pch')
+ # this will set conf.env.WITH_PCH if --with-pch is specified and the supported compiler is used
+ # Unless conf.env.WITH_PCH is set, the precompiled header support is disabled
+
+ def build(bld):
+ bld(features='cxx pch',
+ target='precompiled-headers',
+ name='precompiled-headers',
+ headers='a.h b.h c.h', # headers to pre-compile into `precompiled-headers`
+
+ # Other parameters to compile precompiled headers
+ # includes=...,
+ # export_includes=...,
+ # use=...,
+ # ...
+
+ # Exported parameters will be propagated even if precompiled headers are disabled
+ )
+
+ bld(
+ target='test',
+ features='cxx cxxprogram',
+ source='a.cpp b.cpp d.cpp main.cpp',
+ use='precompiled-headers',
+ )
+
+ # or
+
+ bld(
+ target='test',
+ features='pch cxx cxxprogram',
+ source='a.cpp b.cpp d.cpp main.cpp',
+ headers='a.h b.h c.h',
+ )
+
+Note that precompiled header must have multiple inclusion guards. If the guards are missing, any benefit of precompiled header will be voided and compilation may fail in some cases.
+"""
+
+import os
+from waflib import Task, TaskGen, Logs, Utils
+from waflib.Tools import c_preproc, cxx
+
+
+PCH_COMPILER_OPTIONS = {
+ 'clang++': [['-include'], '.pch', ['-x', 'c++-header']],
+ 'g++': [['-include'], '.gch', ['-x', 'c++-header']],
+}
+
+
+def options(opt):
+ opt.add_option('--without-pch', action='store_false', default=True, dest='with_pch', help='''Try to use precompiled header to speed up compilation (only g++ and clang++)''')
+
+def configure(conf):
+ if (conf.options.with_pch and conf.env['COMPILER_CXX'] in PCH_COMPILER_OPTIONS.keys()):
+ conf.env.WITH_PCH = True
+ flags = PCH_COMPILER_OPTIONS[conf.env['COMPILER_CXX']]
+ conf.env.CXXPCH_F = flags[0]
+ conf.env.CXXPCH_EXT = flags[1]
+ conf.env.CXXPCH_FLAGS = flags[2]
+
+
+@TaskGen.feature('pch')
+@TaskGen.before('process_source')
+def apply_pch(self):
+ if not self.env.WITH_PCH:
+ return
+
+ if getattr(self.bld, 'pch_tasks', None) is None:
+ self.bld.pch_tasks = {}
+
+ if getattr(self, 'headers', None) is None:
+ return
+
+ self.headers = self.to_nodes(self.headers)
+
+ if getattr(self, 'name', None):
+ try:
+ task = self.bld.pch_tasks[self.name]
+ self.bld.fatal("Duplicated 'pch' task with name %r" % self.name)
+ except KeyError:
+ pass
+
+ out = '%s.%d%s' % (self.target, self.idx, self.env['CXXPCH_EXT'])
+ out = self.path.find_or_declare(out)
+ task = self.create_task('gchx', self.headers, out)
+
+ # target should be an absolute path of `out`, but without precompiled header extension
+ task.target = out.abspath()[:-len(out.suffix())]
+
+ self.pch_task = task
+ if getattr(self, 'name', None):
+ self.bld.pch_tasks[self.name] = task
+
+@TaskGen.feature('cxx')
+@TaskGen.after_method('process_source', 'propagate_uselib_vars')
+def add_pch(self):
+ if not (self.env['WITH_PCH'] and getattr(self, 'use', None) and getattr(self, 'compiled_tasks', None) and getattr(self.bld, 'pch_tasks', None)):
+ return
+
+ pch = None
+ # find pch task, if any
+
+ if getattr(self, 'pch_task', None):
+ pch = self.pch_task
+ else:
+ for use in Utils.to_list(self.use):
+ try:
+ pch = self.bld.pch_tasks[use]
+ except KeyError:
+ pass
+
+ if pch:
+ for x in self.compiled_tasks:
+ x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target])
+
+class gchx(Task.Task):
+ run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()}'
+ scan = c_preproc.scan
+ color = 'BLUE'
+ ext_out=['.h']
+
+ def runnable_status(self):
+ try:
+ node_deps = self.generator.bld.node_deps[self.uid()]
+ except KeyError:
+ node_deps = []
+ ret = Task.Task.runnable_status(self)
+ if ret == Task.SKIP_ME and self.env.CXX_NAME == 'clang':
+ t = os.stat(self.outputs[0].abspath()).st_mtime
+ for n in self.inputs + node_deps:
+ if os.stat(n.abspath()).st_mtime > t:
+ return Task.RUN_ME
+ return ret
diff --git a/.waf-tools/sphinx_build.py b/.waf-tools/sphinx_build.py
new file mode 100644
index 0000000..e61da6e
--- /dev/null
+++ b/.waf-tools/sphinx_build.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# inspired by code by Hans-Martin von Gaudecker, 2012
+
+import os
+from waflib import Node, Task, TaskGen, Errors, Logs, Build, Utils
+
+class sphinx_build(Task.Task):
+ color = 'BLUE'
+ run_str = '${SPHINX_BUILD} -D ${VERSION} -D ${RELEASE} -q -b ${BUILDERNAME} -d ${DOCTREEDIR} ${SRCDIR} ${OUTDIR}'
+
+ def __str__(self):
+ env = self.env
+ src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs])
+ tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
+ if self.outputs: sep = ' -> '
+ else: sep = ''
+ return'%s [%s]: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),
+ self.env['BUILDERNAME'], src_str, sep, tgt_str)
+
+@TaskGen.extension('.py', '.rst')
+def sig_hook(self, node):
+ node.sig=Utils.h_file(node.abspath())
+
+@TaskGen.feature("sphinx")
+@TaskGen.before_method("process_source")
+def apply_sphinx(self):
+ """Set up the task generator with a Sphinx instance and create a task."""
+
+ inputs = []
+ for i in Utils.to_list(self.source):
+ if not isinstance(i, Node.Node):
+ node = self.path.find_node(node)
+ else:
+ node = i
+ if not node:
+ raise ValueError('[%s] file not found' % i)
+ inputs.append(node)
+
+ task = self.create_task('sphinx_build', inputs)
+
+ conf = self.path.find_node(self.config)
+ task.inputs.append(conf)
+
+ confdir = conf.parent.abspath()
+ buildername = getattr(self, "builder", "html")
+ srcdir = getattr(self, "srcdir", confdir)
+ outdir = self.path.find_or_declare(getattr(self, "outdir", buildername)).get_bld()
+ doctreedir = getattr(self, "doctreedir", os.path.join(outdir.abspath(), ".doctrees"))
+
+ task.env['BUILDERNAME'] = buildername
+ task.env['SRCDIR'] = srcdir
+ task.env['DOCTREEDIR'] = doctreedir
+ task.env['OUTDIR'] = outdir.abspath()
+ task.env['VERSION'] = "version=%s" % self.VERSION
+ task.env['RELEASE'] = "release=%s" % self.VERSION
+
+ import imp
+ confData = imp.load_source('sphinx_conf', conf.abspath())
+
+ if buildername == "man":
+ for i in confData.man_pages:
+ target = outdir.find_or_declare('%s.%d' % (i[1], i[4]))
+ task.outputs.append(target)
+
+ if self.install_path:
+ self.bld.install_files("%s/man%d/" % (self.install_path, i[4]), target)
+ else:
+ task.outputs.append(outdir)
+
+def configure(conf):
+ conf.find_program('sphinx-build', var='SPHINX_BUILD', mandatory=False)
+
+# sphinx docs
+from waflib.Build import BuildContext
+class sphinx(BuildContext):
+ cmd = "sphinx"
+ fun = "sphinx"
diff --git a/.waf-tools/sqlite3.py b/.waf-tools/sqlite3.py
new file mode 100644
index 0000000..28dd57c
--- /dev/null
+++ b/.waf-tools/sqlite3.py
@@ -0,0 +1,41 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+from waflib import Options
+from waflib.Configure import conf
+
+def options(opt):
+ opt.add_option('--with-sqlite3', type='string', default=None,
+ dest='with_sqlite3', help='''Path to SQLite3, e.g., /usr/local''')
+ opt.add_option('--without-sqlite-locking', action='store_false', default=True,
+ dest='with_sqlite_locking',
+ help='''Disable filesystem locking in sqlite3 database '''
+ '''(use unix-dot locking mechanism instead). '''
+ '''This option may be necessary if home directory is hosted on NFS.''')
+
+@conf
+def check_sqlite3(self, *k, **kw):
+ root = k and k[0] or kw.get('path', None) or Options.options.with_sqlite3
+ mandatory = kw.get('mandatory', True)
+ var = kw.get('uselib_store', 'SQLITE3')
+
+ if root:
+ self.check_cxx(lib='sqlite3',
+ msg='Checking for SQLite3 library',
+ define_name='HAVE_%s' % var,
+ uselib_store=var,
+ mandatory=mandatory,
+ includes="%s/include" % root,
+ libpath="%s/lib" % root)
+ else:
+ try:
+ self.check_cfg(package='sqlite3',
+ args=['--cflags', '--libs'],
+ uselib_store='SQLITE3',
+ mandatory=True)
+ except:
+ self.check_cxx(lib='sqlite3',
+ msg='Checking for SQLite3 library',
+ define_name='HAVE_%s' % var,
+ uselib_store=var,
+ mandatory=mandatory)