build: Adding waf as a build system
Change-Id: If5074b252bc30c2fdcc28ae94cb7ad2858a25d9f
diff --git a/.gitignore b/.gitignore
index 35c2f84..85f6715 100644
--- a/.gitignore
+++ b/.gitignore
@@ -83,3 +83,9 @@
# pkg-config file
libndn-cpp-dev.pc
+
+# waf build system
+.waf-1*
+.lock*
+build/
+.waf-tools/*.pyc
diff --git a/.waf-tools/boost.py b/.waf-tools/boost.py
new file mode 100644
index 0000000..c714b5b
--- /dev/null
+++ b/.waf-tools/boost.py
@@ -0,0 +1,373 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# partially based on boost.py written by Gernot Vormayr
+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
+# modified by Bjoern Michaelsen, 2008
+# modified by Luca Fossati, 2008
+# rewritten for waf 1.5.1, Thomas Nagy, 2008
+# rewritten for waf 1.6.2, Sylvain Rouquette, 2011
+
+'''
+
+This is an extra tool, not bundled with the default waf binary.
+To add the boost tool to the waf file:
+$ ./waf-light --tools=compat15,boost
+ or, if you have waf >= 1.6.2
+$ ./waf update --files=boost
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('compiler_cxx boost')
+
+ def configure(conf):
+ conf.load('compiler_cxx boost')
+ conf.check_boost(lib='system filesystem')
+
+ def build(bld):
+ bld(source='main.cpp', target='app', use='BOOST')
+
+Options are generated, in order to specify the location of boost includes/libraries.
+The `check_boost` configuration function allows to specify the used boost libraries.
+It can also provide default arguments to the --boost-static and --boost-mt command-line arguments.
+Everything will be packaged together in a BOOST component that you can use.
+
+When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
+ - you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
+ Errors: C4530
+ - boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
+ So before calling `conf.check_boost` you might want to disabling by adding:
+ conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
+ Errors:
+ - boost might also be compiled with /MT, which links the runtime statically.
+ If you have problems with redefined symbols,
+ self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+ self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
+Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.
+
+'''
+
+import sys
+import re
+from waflib import Utils, Logs, Errors
+from waflib.Configure import conf
+
+BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib', '/usr/lib/x86_64-linux-gnu', '/usr/lib/i386-linux-gnu', '/usr/local/ndn/lib']
+BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include', '/usr/local/ndn/include']
+BOOST_VERSION_FILE = 'boost/version.hpp'
+BOOST_VERSION_CODE = '''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_LIB_VERSION << std::endl; }
+'''
+
+# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
+PLATFORM = Utils.unversioned_sys_platform()
+detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
+detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
+detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
+BOOST_TOOLSETS = {
+ 'borland': 'bcb',
+ 'clang': detect_clang,
+ 'como': 'como',
+ 'cw': 'cw',
+ 'darwin': 'xgcc',
+ 'edg': 'edg',
+ 'g++': detect_mingw,
+ 'gcc': detect_mingw,
+ 'icpc': detect_intel,
+ 'intel': detect_intel,
+ 'kcc': 'kcc',
+ 'kylix': 'bck',
+ 'mipspro': 'mp',
+ 'mingw': 'mgw',
+ 'msvc': 'vc',
+ 'qcc': 'qcc',
+ 'sun': 'sw',
+ 'sunc++': 'sw',
+ 'tru64cxx': 'tru',
+ 'vacpp': 'xlc'
+}
+
+
+def options(opt):
+ opt = opt.add_option_group('Boost Options')
+
+ opt.add_option('--boost-includes', type='string',
+ default='', dest='boost_includes',
+ help='''path to the boost includes root (~boost root)
+ e.g. /path/to/boost_1_47_0''')
+ opt.add_option('--boost-libs', type='string',
+ default='', dest='boost_libs',
+ help='''path to the directory where the boost libs are
+ e.g. /path/to/boost_1_47_0/stage/lib''')
+ opt.add_option('--boost-static', action='store_true',
+ default=False, dest='boost_static',
+ help='link with static boost libraries (.lib/.a)')
+ opt.add_option('--boost-mt', action='store_true',
+ default=False, dest='boost_mt',
+ help='select multi-threaded libraries')
+ opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
+ help='''select libraries with tags (dgsyp, d for debug),
+ see doc Boost, Getting Started, chapter 6.1''')
+ opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
+ help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
+ opt.add_option('--boost-toolset', type='string',
+ default='', dest='boost_toolset',
+ help='force a toolset e.g. msvc, vc90, \
+ gcc, mingw, mgw45 (default: auto)')
+ py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
+ opt.add_option('--boost-python', type='string',
+ default=py_version, dest='boost_python',
+ help='select the lib python with this version \
+ (default: %s)' % py_version)
+
+
+@conf
+def __boost_get_version_file(self, d):
+ dnode = self.root.find_dir(d)
+ if dnode:
+ return dnode.find_node(BOOST_VERSION_FILE)
+ return None
+
+@conf
+def boost_get_version(self, d):
+ """silently retrieve the boost version number"""
+ node = self.__boost_get_version_file(d)
+ if node:
+ try:
+ txt = node.read()
+ except (OSError, IOError):
+ Logs.error("Could not read the file %r" % node.abspath())
+ else:
+ re_but = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.*)"', re.M)
+ m = re_but.search(txt)
+ if m:
+ return m.group(1)
+ return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True)
+
+@conf
+def boost_get_includes(self, *k, **kw):
+ includes = k and k[0] or kw.get('includes', None)
+ if includes and self.__boost_get_version_file(includes):
+ return includes
+ for d in Utils.to_list(self.environ.get('INCLUDE', '')) + BOOST_INCLUDES:
+ if self.__boost_get_version_file(d):
+ return d
+ if includes:
+ self.end_msg('headers not found in %s' % includes)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
+ self.fatal('The configuration failed')
+
+
+@conf
+def boost_get_toolset(self, cc):
+ toolset = cc
+ if not cc:
+ build_platform = Utils.unversioned_sys_platform()
+ if build_platform in BOOST_TOOLSETS:
+ cc = build_platform
+ else:
+ cc = self.env.CXX_NAME
+ if cc in BOOST_TOOLSETS:
+ toolset = BOOST_TOOLSETS[cc]
+ return isinstance(toolset, str) and toolset or toolset(self.env)
+
+
+@conf
+def __boost_get_libs_path(self, *k, **kw):
+ ''' return the lib path and all the files in it '''
+ if 'files' in kw:
+ return self.root.find_dir('.'), Utils.to_list(kw['files'])
+ libs = k and k[0] or kw.get('libs', None)
+ if libs:
+ path = self.root.find_dir(libs)
+ files = path.ant_glob('*boost_*')
+ if not libs or not files:
+ for d in Utils.to_list(self.environ.get('LIB', [])) + BOOST_LIBS:
+ path = self.root.find_dir(d)
+ if path:
+ files = path.ant_glob('*boost_*')
+ if files:
+ break
+ path = self.root.find_dir(d + '64')
+ if path:
+ files = path.ant_glob('*boost_*')
+ if files:
+ break
+ if not path:
+ if libs:
+ self.end_msg('libs not found in %s' % libs)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
+ self.fatal('The configuration failed')
+
+ self.to_log('Found the boost path in %r with the libraries:' % path)
+ for x in files:
+ self.to_log(' %r' % x)
+ return path, files
+
+@conf
+def boost_get_libs(self, *k, **kw):
+ '''
+ return the lib path and the required libs
+ according to the parameters
+ '''
+ path, files = self.__boost_get_libs_path(**kw)
+ t = []
+ if kw.get('mt', False):
+ t.append('mt')
+ if kw.get('abi', None):
+ t.append(kw['abi'])
+ tags = t and '(-%s)+' % '-'.join(t) or ''
+ toolset = self.boost_get_toolset(kw.get('toolset', ''))
+ toolset_pat = '(-%s[0-9]{0,3})+' % toolset
+ version = '(-%s)+' % self.env.BOOST_VERSION
+
+ def find_lib(re_lib, files):
+ for file in files:
+ if re_lib.search(file.name):
+ self.to_log('Found boost lib %s' % file)
+ return file
+ return None
+
+ def format_lib_name(name):
+ if name.startswith('lib') and self.env.CC_NAME != 'msvc':
+ name = name[3:]
+ return name[:name.rfind('.')]
+
+ libs = []
+ for lib in Utils.to_list(k and k[0] or kw.get('lib', None)):
+ py = (lib == 'python') and '(-py%s)+' % kw['python'] or ''
+ # Trying libraries, from most strict match to least one
+ for pattern in ['boost_%s%s%s%s%s' % (lib, toolset_pat, tags, py, version),
+ 'boost_%s%s%s%s' % (lib, tags, py, version),
+ 'boost_%s%s%s' % (lib, tags, version),
+ # Give up trying to find the right version
+ 'boost_%s%s%s%s' % (lib, toolset_pat, tags, py),
+ 'boost_%s%s%s' % (lib, tags, py),
+ 'boost_%s%s' % (lib, tags)]:
+ self.to_log('Trying pattern %s' % pattern)
+ file = find_lib(re.compile(pattern), files)
+ if file:
+ libs.append(format_lib_name(file.name))
+ break
+ else:
+ self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
+ self.fatal('The configuration failed')
+
+ return path.abspath(), libs
+
+
+@conf
+def check_boost(self, *k, **kw):
+ """
+ Initialize boost libraries to be used.
+
+ Keywords: you can pass the same parameters as with the command line (without "--boost-").
+ Note that the command line has the priority, and should preferably be used.
+ """
+ if not self.env['CXX']:
+ self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
+
+ params = {'lib': k and k[0] or kw.get('lib', None)}
+ for key, value in self.options.__dict__.items():
+ if not key.startswith('boost_'):
+ continue
+ key = key[len('boost_'):]
+ params[key] = value and value or kw.get(key, '')
+
+ var = kw.get('uselib_store', 'BOOST')
+
+ self.start_msg('Checking boost includes')
+ self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
+ self.env.BOOST_VERSION = self.boost_get_version(inc)
+ self.end_msg(self.env.BOOST_VERSION)
+ if Logs.verbose:
+ Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var])
+
+ if not params['lib']:
+ return
+ self.start_msg('Checking boost libs')
+ suffix = params.get('static', None) and 'ST' or ''
+ path, libs = self.boost_get_libs(**params)
+ self.env['%sLIBPATH_%s' % (suffix, var)] = [path]
+ self.env['%sLIB_%s' % (suffix, var)] = libs
+ self.end_msg('ok')
+ if Logs.verbose:
+ Logs.pprint('CYAN', ' path : %s' % path)
+ Logs.pprint('CYAN', ' libs : %s' % libs)
+
+
+ def try_link():
+ if 'system' in params['lib']:
+ self.check_cxx(
+ fragment="\n".join([
+ '#include <boost/system/error_code.hpp>',
+ 'int main() { boost::system::error_code c; }',
+ ]),
+ use=var,
+ execute=False,
+ )
+ if 'thread' in params['lib']:
+ self.check_cxx(
+ fragment="\n".join([
+ '#include <boost/thread.hpp>',
+ 'int main() { boost::thread t; }',
+ ]),
+ use=var,
+ execute=False,
+ )
+
+ if params.get('linkage_autodetect', False):
+ self.start_msg("Attempting to detect boost linkage flags")
+ toolset = self.boost_get_toolset(kw.get('toolset', ''))
+ if toolset in ['vc']:
+ # disable auto-linking feature, causing error LNK1181
+ # because the code wants to be linked against
+ self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+
+ # if no dlls are present, we guess the .lib files are not stubs
+ has_dlls = False
+ for x in Utils.listdir(path):
+ if x.endswith(self.env.cxxshlib_PATTERN % ''):
+ has_dlls = True
+ break
+ if not has_dlls:
+ self.env['STLIBPATH_%s' % var] = [path]
+ self.env['STLIB_%s' % var] = libs
+ del self.env['LIB_%s' % var]
+ del self.env['LIBPATH_%s' % var]
+
+ # we attempt to play with some known-to-work CXXFLAGS combinations
+ for cxxflags in (['/MD', '/EHsc'], []):
+ self.env.stash()
+ self.env["CXXFLAGS_%s" % var] += cxxflags
+ try:
+ try_link()
+ self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
+ e = None
+ break
+ except Errors.ConfigurationError as exc:
+ self.env.revert()
+ e = exc
+
+ if e is not None:
+ self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=e)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
+ self.fatal('The configuration failed')
+ else:
+ self.start_msg('Checking for boost linkage')
+ try:
+ try_link()
+ except Errors.ConfigurationError as e:
+ self.end_msg("Could not link against boost libraries using supplied options")
+ self.fatal('The configuration failed')
+ self.end_msg('ok')
+
diff --git a/.waf-tools/cryptopp.py b/.waf-tools/cryptopp.py
new file mode 100644
index 0000000..a05326b
--- /dev/null
+++ b/.waf-tools/cryptopp.py
@@ -0,0 +1,77 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+'''
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.tool_options('cryptopp', tooldir=["waf-tools"])
+
+ def configure(conf):
+ conf.load('compiler_cxx cryptopp')
+
+ def build(bld):
+ bld(source='main.cpp', target='app', use='CRYPTOPP')
+
+Options are generated, in order to specify the location of cryptopp includes/libraries.
+
+
+'''
+import sys
+import re
+from waflib import Utils,Logs,Errors
+from waflib.Configure import conf
+CRYPTOPP_DIR=['/usr','/usr/local','/opt/local','/sw']
+CRYPTOPP_VERSION_FILE='config.h'
+CRYPTOPP_VERSION_CODE='''
+#include <iostream>
+#include <cryptopp/config.h>
+int main() { std::cout << CRYPTOPP_VERSION; }
+'''
+
+def options(opt):
+ opt.add_option('--cryptopp',type='string',default='',dest='cryptopp_dir',help='''path to where cryptopp is installed, e.g. /opt/local''')
+@conf
+def __cryptopp_get_version_file(self,dir):
+ try:
+ return self.root.find_dir(dir).find_node('%s/%s' % ('include/cryptopp', CRYPTOPP_VERSION_FILE))
+ except:
+ return None
+@conf
+def cryptopp_get_version(self,dir):
+ val=self.check_cxx(fragment=CRYPTOPP_VERSION_CODE,includes=['%s/%s' % (dir, 'include')], execute=True, define_ret = True, mandatory=True)
+ return val
+@conf
+def cryptopp_get_root(self,*k,**kw):
+ root=k and k[0]or kw.get('path',None)
+ # Logs.pprint ('RED', ' %s' %root)
+ if root and self.__cryptopp_get_version_file(root):
+ return root
+ for dir in CRYPTOPP_DIR:
+ if self.__cryptopp_get_version_file(dir):
+ return dir
+ if root:
+ self.fatal('CryptoPP not found in %s'%root)
+ else:
+ self.fatal('CryptoPP not found, please provide a --cryptopp argument (see help)')
+@conf
+def check_cryptopp(self,*k,**kw):
+ if not self.env['CXX']:
+ self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
+
+ var=kw.get('uselib_store','CRYPTOPP')
+ self.start_msg('Checking Crypto++ lib')
+ root = self.cryptopp_get_root(*k,**kw)
+ self.env.CRYPTOPP_VERSION=self.cryptopp_get_version(root)
+
+ self.env['INCLUDES_%s'%var]= '%s/%s' % (root, "include")
+ self.env['LIB_%s'%var] = "cryptopp"
+ self.env['LIBPATH_%s'%var] = '%s/%s' % (root, "lib")
+
+ self.end_msg(self.env.CRYPTOPP_VERSION)
+ if Logs.verbose:
+ Logs.pprint('CYAN',' CRYPTOPP include : %s'%self.env['INCLUDES_%s'%var])
+ Logs.pprint('CYAN',' CRYPTOPP lib : %s'%self.env['LIB_%s'%var])
+ Logs.pprint('CYAN',' CRYPTOPP libpath : %s'%self.env['LIBPATH_%s'%var])
+
diff --git a/.waf-tools/doxygen.py b/.waf-tools/doxygen.py
new file mode 100644
index 0000000..07014ee
--- /dev/null
+++ b/.waf-tools/doxygen.py
@@ -0,0 +1,184 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy 2008-2010 (ita)
+
+"""
+
+Doxygen support
+
+Variables passed to bld():
+* doxyfile -- the Doxyfile to use
+
+ported from waf 1.5 (incomplete)
+"""
+
+from fnmatch import fnmatchcase
+import os, os.path, re, stat
+from waflib import Task, Utils, Node, Logs
+from waflib.TaskGen import feature
+
+DOXY_STR = '${DOXYGEN} - '
+DOXY_FMTS = 'html latex man rft xml'.split()
+DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
+c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
+inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
+'''.split())
+
+re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
+re_nl = re.compile('\r*\n', re.M)
+def parse_doxy(txt):
+ tbl = {}
+ txt = re_rl.sub('', txt)
+ lines = re_nl.split(txt)
+ for x in lines:
+ x = x.strip()
+ if not x or x.startswith('#') or x.find('=') < 0:
+ continue
+ if x.find('+=') >= 0:
+ tmp = x.split('+=')
+ key = tmp[0].strip()
+ if key in tbl:
+ tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
+ else:
+ tbl[key] = '+='.join(tmp[1:]).strip()
+ else:
+ tmp = x.split('=')
+ tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
+ return tbl
+
+class doxygen(Task.Task):
+ vars = ['DOXYGEN', 'DOXYFLAGS']
+ color = 'BLUE'
+
+ def runnable_status(self):
+ '''
+ self.pars are populated in runnable_status - because this function is being
+ run *before* both self.pars "consumers" - scan() and run()
+
+ set output_dir (node) for the output
+ '''
+
+ for x in self.run_after:
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'pars', None):
+ txt = self.inputs[0].read()
+ self.pars = parse_doxy(txt)
+ if not self.pars.get('OUTPUT_DIRECTORY'):
+ self.pars['OUTPUT_DIRECTORY'] = self.inputs[0].parent.get_bld().abspath()
+
+ self.doxy_inputs = getattr(self, 'doxy_inputs', [])
+ if not self.pars.get('INPUT'):
+ self.doxy_inputs.append(self.inputs[0].parent)
+ else:
+ for i in self.pars.get('INPUT').split():
+ if os.path.isabs(i):
+ node = self.generator.bld.root.find_node(i)
+ else:
+ node = self.generator.path.find_node(i)
+ if not node:
+ self.generator.bld.fatal('Could not find the doxygen input %r' % i)
+ self.doxy_inputs.append(node)
+
+ if not getattr(self, 'output_dir', None):
+ bld = self.generator.bld
+ # First try to find an absolute path, then find or declare a relative path
+ self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
+ if not self.output_dir:
+ self.output_dir = bld.path.find_or_declare(self.pars['OUTPUT_DIRECTORY'])
+
+ self.signature()
+ return Task.Task.runnable_status(self)
+
+ def scan(self):
+ if self.pars.get('RECURSIVE') == 'YES':
+ Logs.warn("Doxygen RECURSIVE dependencies are not supported")
+
+ exclude_patterns = self.pars.get('EXCLUDE_PATTERNS', '').split()
+ file_patterns = self.pars.get('FILE_PATTERNS', '').split()
+ if not file_patterns:
+ file_patterns = DOXY_FILE_PATTERNS
+
+ nodes = []
+ names = []
+ for node in self.doxy_inputs:
+ if os.path.isdir(node.abspath()):
+ for m in node.ant_glob(file_patterns):
+ nodes.append(m)
+ else:
+ nodes.append(node)
+ return (nodes, names)
+
+ def run(self):
+ dct = self.pars.copy()
+ # TODO will break if paths have spaces
+ dct['INPUT'] = ' '.join([x.abspath() for x in self.doxy_inputs])
+ code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
+ code = code.encode() # for python 3
+ #fmt = DOXY_STR % (self.inputs[0].parent.abspath())
+ cmd = Utils.subst_vars(DOXY_STR, self.env)
+ env = self.env.env or None
+ proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.generator.bld.path.get_bld().abspath())
+ proc.communicate(code)
+ return proc.returncode
+
+ def post_run(self):
+ nodes = self.output_dir.ant_glob('**/*', quiet=True)
+ for x in nodes:
+ x.sig = Utils.h_file(x.abspath())
+ self.outputs += nodes
+ return Task.Task.post_run(self)
+
+class tar(Task.Task):
+ "quick tar creation"
+ run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
+ color = 'RED'
+ after = ['doxygen']
+ def runnable_status(self):
+ for x in getattr(self, 'input_tasks', []):
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'tar_done_adding', None):
+ # execute this only once
+ self.tar_done_adding = True
+ for x in getattr(self, 'input_tasks', []):
+ self.set_inputs(x.outputs)
+ if not self.inputs:
+ return Task.SKIP_ME
+ return Task.Task.runnable_status(self)
+
+ def __str__(self):
+ tgt_str = ' '.join([a.nice_path(self.env) for a in self.outputs])
+ return '%s: %s\n' % (self.__class__.__name__, tgt_str)
+
+@feature('doxygen')
+def process_doxy(self):
+ if not getattr(self, 'doxyfile', None):
+ self.generator.bld.fatal('no doxyfile??')
+
+ node = self.doxyfile
+ if not isinstance(node, Node.Node):
+ node = self.path.find_resource(node)
+ if not node:
+ raise ValueError('doxygen file not found')
+
+ # the task instance
+ dsk = self.create_task('doxygen', node)
+
+ if getattr(self, 'doxy_tar', None):
+ tsk = self.create_task('tar')
+ tsk.input_tasks = [dsk]
+ tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
+ if self.doxy_tar.endswith('bz2'):
+ tsk.env['TAROPTS'] = ['cjf']
+ elif self.doxy_tar.endswith('gz'):
+ tsk.env['TAROPTS'] = ['czf']
+ else:
+ tsk.env['TAROPTS'] = ['cf']
+
+def configure(conf):
+ conf.find_program('doxygen', var='DOXYGEN')
+ conf.find_program('tar', var='TAR')
+
diff --git a/.waf-tools/openssl.py b/.waf-tools/openssl.py
new file mode 100644
index 0000000..7f599a9
--- /dev/null
+++ b/.waf-tools/openssl.py
@@ -0,0 +1,59 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+'''
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.tool_options('openssl')
+
+ def configure(conf):
+ conf.load('compiler_c openssl')
+
+ conf.check_openssl()
+
+ def build(bld):
+ bld(source='main.cpp', target='app', use='OPENSSL')
+
+'''
+
+from waflib import Options
+from waflib.Configure import conf
+
+@conf
+def check_openssl(self,*k,**kw):
+ root = k and k[0] or kw.get('path',None) or Options.options.with_openssl
+ mandatory = kw.get('mandatory', True)
+ var = kw.get('var', 'OPENSSL')
+
+ CODE = """
+#include <openssl/crypto.h>
+#include <stdio.h>
+
+int main(int argc, char **argv) {
+ (void)argc;
+ printf ("%s", argv[0]);
+
+ return 0;
+}
+"""
+ if root:
+ libcrypto = self.check_cc (lib=['ssl', 'crypto'],
+ header_name='openssl/crypto.h',
+ define_name='HAVE_%s' % var,
+ uselib_store=var,
+ mandatory = mandatory,
+ cflags="-I%s/include" % root,
+ linkflags="-L%s/lib" % root,
+ execute = True, fragment = CODE, define_ret = True)
+ else:
+ libcrypto = self.check_cc (lib=['ssl', 'crypto'],
+ header_name='openssl/crypto.h',
+ define_name='HAVE_%s' % var,
+ uselib_store=var,
+ mandatory = mandatory,
+ execute = True, fragment = CODE, define_ret = True)
+
+def options(opt):
+ opt.add_option('--with-openssl',type='string',default='',dest='with_openssl',help='''Path to OpenSSL''')
diff --git a/.waf-tools/sphinx_build.py b/.waf-tools/sphinx_build.py
new file mode 100644
index 0000000..e1155d1
--- /dev/null
+++ b/.waf-tools/sphinx_build.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Hans-Martin von Gaudecker, 2012
+
+"""
+Create Sphinx documentation. Currently only LaTeX and HTML are supported.
+
+The source file **must** be the conf.py file used by Sphinx. Everything
+else has defaults, passing in the parameters is optional.
+
+Usage for getting both html and pdf docs:
+
+ ctx(features='sphinx', source='docs/conf.py')
+ ctx(features='sphinx', source='docs/conf.py', buildername='latex')
+
+Optional parameters and their defaults:
+
+ * buildername: html
+ * srcdir: confdir (the directory where conf.py lives)
+ * outdir: confdir/buildername (in the build directory tree)
+ * doctreedir: outdir/.doctrees
+ * type: pdflatex (only applies to 'latex' builder)
+
+"""
+
+
+import os
+from waflib import Task, TaskGen, Errors, Logs
+
+class RunSphinxBuild(Task.Task):
+ def scan(self):
+ """Use Sphinx' internal environment to find the dependencies."""
+ s = self.sphinx_instance
+ msg, dummy, iterator = s.env.update(s.config, s.srcdir, s.doctreedir, s)
+ s.info(msg)
+ dep_nodes = []
+ for docname in s.builder.status_iterator(iterator, "reading sources... "):
+ filename = docname + s.config.source_suffix
+ dep_nodes.append(self.srcdir.find_node(filename))
+ for dep in s.env.dependencies.values():
+ # Need the 'str' call because Sphinx might return Unicode strings.
+ [dep_nodes.append(self.srcdir.find_node(str(d))) for d in dep]
+ return (dep_nodes, [])
+
+ def run(self):
+ """Run the Sphinx build."""
+ self.sphinx_instance.build(force_all=False, filenames=None)
+ return None
+
+ def post_run(self):
+ """Add everything found in the output directory tree as an output.
+ Not elegant, but pragmatic."""
+ for n in self.outdir.ant_glob("**", quiet=True, remove=False):
+ if n not in self.outputs: self.set_outputs(n)
+ super(RunSphinxBuild, self).post_run()
+
+
+def _get_main_targets(tg, s):
+ """Return some easy targets known from the Sphinx build environment **s.env**."""
+ out_dir = tg.bld.root.find_node(s.outdir)
+ tgt_nodes = []
+ if s.builder.name == "latex":
+ for tgt_info in s.env.config.latex_documents:
+ tgt_nodes.append(out_dir.find_or_declare(tgt_info[1]))
+ elif s.builder.name == "html":
+ suffix = getattr(s.env.config, "html_file_suffix", ".html")
+ tgt_name = s.env.config.master_doc + suffix
+ tgt_nodes.append(out_dir.find_or_declare(tgt_name))
+ else:
+ raise Errors.WafError("Sphinx builder not implemented: %s" % s.builder.name)
+ return tgt_nodes
+
+
+@TaskGen.feature("sphinx")
+@TaskGen.before_method("process_source")
+def apply_sphinx(tg):
+ """Set up the task generator with a Sphinx instance and create a task."""
+
+ from sphinx.application import Sphinx
+
+ # Put together the configuration based on defaults and tg attributes.
+ conf = tg.path.find_node(tg.source)
+ confdir = conf.parent.abspath()
+ buildername = getattr(tg, "buildername", "html")
+ srcdir = getattr(tg, "srcdir", confdir)
+ outdir = tg.path.find_or_declare (getattr(tg, "outdir", os.path.join(conf.parent.get_bld().abspath(), buildername))).abspath ()
+
+ doctreedir = getattr(tg, "doctreedir", os.path.join(outdir, ".doctrees"))
+
+ # Set up the Sphinx instance.
+ s = Sphinx (srcdir, confdir, outdir, doctreedir, buildername, status=None)
+
+ # Get the main targets of the Sphinx build.
+ tgt_nodes = _get_main_targets(tg, s)
+
+ # Create the task and set the required attributes.
+ task = tg.create_task("RunSphinxBuild", src=conf, tgt=tgt_nodes)
+ task.srcdir = tg.bld.root.find_node(s.srcdir)
+ task.outdir = tg.bld.root.find_node(s.outdir)
+ task.sphinx_instance = s
+
+ # Build pdf if we have the LaTeX builder, allow for building with xelatex.
+ if s.builder.name == "latex":
+ compile_type = getattr(tg, "type", "pdflatex")
+ tg.bld(features="tex", type=compile_type, source=tgt_nodes, name="sphinx_pdf", prompt=0)
+
+ # Bypass the execution of process_source by setting the source to an empty list
+ tg.source = []
diff --git a/examples/wscript b/examples/wscript
new file mode 100644
index 0000000..bafda9c
--- /dev/null
+++ b/examples/wscript
@@ -0,0 +1,12 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+
+top = '..'
+
+def build(bld):
+ for app in bld.path.ant_glob('*.cpp'):
+ bld(features=['cxx', 'cxxprogram'],
+ target = '%s' % (str(app.change_ext('','.cpp'))),
+ source = app,
+ use = 'ndn-cpp-dev',
+ includes = "../include",
+ )
diff --git a/tests/wscript b/tests/wscript
new file mode 100644
index 0000000..bafda9c
--- /dev/null
+++ b/tests/wscript
@@ -0,0 +1,12 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+
+top = '..'
+
+def build(bld):
+ for app in bld.path.ant_glob('*.cpp'):
+ bld(features=['cxx', 'cxxprogram'],
+ target = '%s' % (str(app.change_ext('','.cpp'))),
+ source = app,
+ use = 'ndn-cpp-dev',
+ includes = "../include",
+ )
diff --git a/tools/wscript b/tools/wscript
new file mode 100644
index 0000000..bafda9c
--- /dev/null
+++ b/tools/wscript
@@ -0,0 +1,12 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+
+top = '..'
+
+def build(bld):
+ for app in bld.path.ant_glob('*.cpp'):
+ bld(features=['cxx', 'cxxprogram'],
+ target = '%s' % (str(app.change_ext('','.cpp'))),
+ source = app,
+ use = 'ndn-cpp-dev',
+ includes = "../include",
+ )
diff --git a/waf b/waf
new file mode 100755
index 0000000..f916f71
--- /dev/null
+++ b/waf
Binary files differ
diff --git a/wscript b/wscript
new file mode 100644
index 0000000..b381eaa
--- /dev/null
+++ b/wscript
@@ -0,0 +1,187 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+VERSION='0.3~dev0'
+NAME="ndn-cpp-dev"
+
+from waflib import Build, Logs, Utils, Task, TaskGen, Configure
+
+def options(opt):
+ opt.load('compiler_c compiler_cxx gnu_dirs c_osx')
+ opt.load('boost doxygen openssl cryptopp', tooldir=['.waf-tools'])
+
+ opt = opt.add_option_group('NDN-CPP Options')
+
+ opt.add_option('--debug',action='store_true',default=False,dest='debug',help='''debugging mode''')
+
+ opt.add_option('--with-tests', action='store_true',default=False,dest='with_tests',
+ help='''build unit tests''')
+ opt.add_option('--with-log4cxx', action='store_true',default=False,dest='log4cxx',
+ help='''Compile with log4cxx logging support''')
+
+ opt.add_option('--with-c++11', action='store_true', default=False, dest='use_cxx11',
+ help='''Use C++11 features, even if available in the compiler''')
+ opt.add_option('--without-system-boost', action='store_false', default=True, dest='use_system_boost',
+ help='''Use system's boost libraries''')
+
+
+def configure(conf):
+ conf.load("compiler_c compiler_cxx boost gnu_dirs c_osx openssl cryptopp")
+ try:
+ conf.load("doxygen")
+ except:
+ pass
+
+ if conf.options.with_tests:
+ conf.env['WITH_TESTS'] = True
+
+ # Optional functions
+ for func in ['memcmp', 'memcpy', 'memset']:
+ conf.check(function_name=func, header_name='string.h', mandatory=False)
+
+ # Mandatory functions
+ for func in ['strchr', 'sscanf']:
+ conf.check(function_name=func, header_name=['string.h', 'stdio.h'])
+
+ # Mandatory headers
+ for header in ['time.h', 'sys/time.h']:
+ conf.check(header_name=header)
+
+ conf.check(function_name='gettimeofday', header_name=['time.h', 'sys/time.h'])
+
+ conf.check_openssl()
+
+ if conf.options.debug:
+ conf.define ('_DEBUG', 1)
+ flags = ['-O0',
+ '-Wall',
+ # '-Werror',
+ '-Wno-unused-variable',
+ '-g3',
+ '-Wno-unused-private-field', # only clang supports
+ '-fcolor-diagnostics', # only clang supports
+ '-Qunused-arguments', # only clang supports
+ '-Wno-tautological-compare', # suppress warnings from CryptoPP
+ '-Wno-unused-function', # another annoying warning from CryptoPP
+
+ '-Wno-deprecated-declarations',
+ ]
+
+ conf.add_supported_cxxflags (cxxflags = flags)
+ else:
+ flags = ['-O3', '-g', '-Wno-tautological-compare', '-Wno-unused-function', '-Wno-deprecated-declarations']
+ conf.add_supported_cxxflags (cxxflags = flags)
+
+ if Utils.unversioned_sys_platform () == "darwin":
+ conf.check_cxx(framework_name='CoreFoundation', uselib_store='OSX_COREFOUNDATION', mandatory=True)
+
+ conf.define ("PACKAGE_BUGREPORT", "ndn-lib@lists.cs.ucla.edu")
+ conf.define ("PACKAGE_NAME", NAME)
+ conf.define ("PACKAGE_VERSION", VERSION)
+ conf.define ("PACKAGE_URL", "https://github.com/named-data/ndn-cpp")
+
+ conf.check_cfg(package='sqlite3', args=['--cflags', '--libs'], uselib_store='SQLITE3', mandatory=True)
+
+ if conf.options.log4cxx:
+ conf.check_cfg(package='liblog4cxx', args=['--cflags', '--libs'], uselib_store='LOG4CXX', mandatory=True)
+ conf.define ("HAVE_LOG4CXX", 1)
+
+ conf.check_cryptopp(path=conf.options.cryptopp_dir, mandatory=True)
+
+ if conf.options.use_cxx11:
+ conf.add_supported_cxxflags(cxxflags = ['-std=c++11', '-std=c++0x'])
+
+ conf.check(msg='Checking for type std::shared_ptr',
+ type_name="std::shared_ptr<int>", header_name="memory", define_name='HAVE_STD_SHARED_PTR')
+ conf.check(msg='Checking for type std::function',
+ type_name="std::function<void()>", header_name="functional", define_name='HAVE_STD_FUNCTION')
+ conf.define('HAVE_CXX11', 1)
+ else:
+ if conf.options.use_system_boost:
+ USED_BOOST_LIBS = 'system filesystem iostreams'
+ if conf.env['WITH_TESTS']:
+ USED_BOOST_LIBS += " unit_test_framework"
+
+ conf.check_boost(lib=USED_BOOST_LIBS)
+
+ boost_version = conf.env.BOOST_VERSION.split('_')
+ if int(boost_version[0]) > 1 or (int(boost_version[0]) == 1 and int(boost_version[1]) >= 46):
+ conf.env['USE_SYSTEM_BOOST'] = True
+ conf.define('USE_SYSTEM_BOOST', 1)
+
+ conf.write_config_header('include/ndn-cpp/ndn-cpp-config.h', define_prefix='NDN_CPP_')
+
+def build (bld):
+ libndn_cpp = bld (
+ target="ndn-cpp-dev",
+ vnum = "0.3.0",
+ features=['cxx', 'cxxshlib', 'cxxstlib'],
+ source = bld.path.ant_glob(['src/**/*.cpp',
+ 'new/**/*.cpp']),
+ use = 'BOOST OPENSSL LOG4CXX CRYPTOPP SQLITE3',
+ includes = ". include",
+ )
+
+ if Utils.unversioned_sys_platform () == "darwin":
+ libndn_cpp.mac_app = True
+ libndn_cpp.use += " OSX_COREFOUNDATION"
+
+ # Unit tests
+ if bld.env['WITH_TESTS']:
+ unittests = bld.program (
+ target="unit-tests",
+ features = "cxx cxxprogram",
+ source = bld.path.ant_glob(['tests_boost/*.cpp']),
+ use = 'ndn-cpp-dev',
+ includes = ".",
+ install_prefix = None,
+ )
+
+ bld.recurse("tools examples tests")
+
+ headers = bld.path.ant_glob(['src/**/*.hpp',
+ 'src/**/*.h'])
+ bld.install_files("%s/ndn-cpp" % bld.env['INCLUDEDIR'], headers, relative_trick=True, cwd=bld.path.find_node('src'))
+
+ bld.install_files("%s/ndn-cpp" % bld.env['INCLUDEDIR'], bld.path.find_resource('include/ndn-cpp/ndn-cpp-config.h'))
+
+ headers = bld.path.ant_glob(['include/**/*.hpp', 'include/**/*.h'])
+ bld.install_files("%s" % bld.env['INCLUDEDIR'], headers, relative_trick=True, cwd=bld.path.find_node('include'))
+
+@Configure.conf
+def add_supported_cxxflags(self, cxxflags):
+ """
+ Check which cxxflags are supported by compiler and add them to env.CXXFLAGS variable
+ """
+ self.start_msg('Checking allowed flags for c++ compiler')
+
+ supportedFlags = []
+ for flag in cxxflags:
+ if self.check_cxx (cxxflags=[flag], mandatory=False):
+ supportedFlags += [flag]
+
+ self.end_msg (' '.join (supportedFlags))
+ self.env.CXXFLAGS += supportedFlags
+
+# doxygen docs
+from waflib.Build import BuildContext
+class doxy (BuildContext):
+ cmd = "doxygen"
+ fun = "doxygen"
+
+def doxygen (bld):
+ if not bld.env.DOXYGEN:
+ bld.fatal ("ERROR: cannot build documentation (`doxygen' is not found in $PATH)")
+ bld (features="doxygen",
+ doxyfile='Doxyfile')
+
+# doxygen docs
+from waflib.Build import BuildContext
+class sphinx (BuildContext):
+ cmd = "sphinx"
+ fun = "sphinx"
+
+def sphinx (bld):
+ bld.load('sphinx_build', tooldir=['waf-tools'])
+
+ bld (features="sphinx",
+ outdir = "doc/html",
+ source = "doc/source/conf.py")