build: Upgrade build system
This commit also disables almost all build targets, as they are broken now
diff --git a/.gitignore b/.gitignore
index 176dcee..3e8ce4a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,15 +1,6 @@
-.waf-*
+.waf-1*
+.waf3-1*
*.pyc
build/
.lock*
-chronoshare.db
-Makefile
-*.o
-moc_*
-ui_*
-*.app
-*.DS_Store
-osx/Frameworks/Sparkle.framework
-Sparkle*
-*priv.pem
gui/html.qrc
diff --git a/.waf-tools/boost.py b/.waf-tools/boost.py
new file mode 100644
index 0000000..9b9395e
--- /dev/null
+++ b/.waf-tools/boost.py
@@ -0,0 +1,518 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# partially based on boost.py written by Gernot Vormayr
+# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
+# modified by Bjoern Michaelsen, 2008
+# modified by Luca Fossati, 2008
+# rewritten for waf 1.5.1, Thomas Nagy, 2008
+# rewritten for waf 1.6.2, Sylvain Rouquette, 2011
+
+'''
+
+This is an extra tool, not bundled with the default waf binary.
+To add the boost tool to the waf file:
+$ ./waf-light --tools=compat15,boost
+ or, if you have waf >= 1.6.2
+$ ./waf update --files=boost
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('compiler_cxx boost')
+
+ def configure(conf):
+ conf.load('compiler_cxx boost')
+ conf.check_boost(lib='system filesystem')
+
+ def build(bld):
+ bld(source='main.cpp', target='app', use='BOOST')
+
+Options are generated, in order to specify the location of boost includes/libraries.
+The `check_boost` configuration function allows to specify the used boost libraries.
+It can also provide default arguments to the --boost-mt command-line arguments.
+Everything will be packaged together in a BOOST component that you can use.
+
+When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
+ - you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
+ Errors: C4530
+ - boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
+ So before calling `conf.check_boost` you might want to disabling by adding
+ conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
+ Errors:
+ - boost might also be compiled with /MT, which links the runtime statically.
+ If you have problems with redefined symbols,
+ self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+ self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
+Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.
+
+'''
+
+import sys
+import re
+from waflib import Utils, Logs, Errors
+from waflib.Configure import conf
+from waflib.TaskGen import feature, after_method
+
+BOOST_LIBS = ['/usr/lib/x86_64-linux-gnu', '/usr/lib/i386-linux-gnu',
+ '/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
+BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
+BOOST_VERSION_FILE = 'boost/version.hpp'
+BOOST_VERSION_CODE = '''
+#include <iostream>
+#include <boost/version.hpp>
+int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; }
+'''
+
+BOOST_ERROR_CODE = '''
+#include <boost/system/error_code.hpp>
+int main() { boost::system::error_code c; }
+'''
+
+PTHREAD_CODE = '''
+#include <pthread.h>
+static void* f(void*) { return 0; }
+int main() {
+ pthread_t th;
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_create(&th, &attr, &f, 0);
+ pthread_join(th, 0);
+ pthread_cleanup_push(0, 0);
+ pthread_cleanup_pop(0);
+ pthread_attr_destroy(&attr);
+}
+'''
+
+BOOST_THREAD_CODE = '''
+#include <boost/thread.hpp>
+int main() { boost::thread t; }
+'''
+
+BOOST_LOG_CODE = '''
+#include <boost/log/trivial.hpp>
+#include <boost/log/utility/setup/console.hpp>
+#include <boost/log/utility/setup/common_attributes.hpp>
+int main() {
+ using namespace boost::log;
+ add_common_attributes();
+ add_console_log(std::clog, keywords::format = "%Message%");
+ BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl;
+}
+'''
+
+# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
+PLATFORM = Utils.unversioned_sys_platform()
+detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
+detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
+detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
+BOOST_TOOLSETS = {
+ 'borland': 'bcb',
+ 'clang': detect_clang,
+ 'como': 'como',
+ 'cw': 'cw',
+ 'darwin': 'xgcc',
+ 'edg': 'edg',
+ 'g++': detect_mingw,
+ 'gcc': detect_mingw,
+ 'icpc': detect_intel,
+ 'intel': detect_intel,
+ 'kcc': 'kcc',
+ 'kylix': 'bck',
+ 'mipspro': 'mp',
+ 'mingw': 'mgw',
+ 'msvc': 'vc',
+ 'qcc': 'qcc',
+ 'sun': 'sw',
+ 'sunc++': 'sw',
+ 'tru64cxx': 'tru',
+ 'vacpp': 'xlc'
+}
+
+
+def options(opt):
+ opt = opt.add_option_group('Boost Options')
+ opt.add_option('--boost-includes', type='string',
+ default='', dest='boost_includes',
+ help='''path to the directory where the boost includes are,
+ e.g., /path/to/boost_1_55_0/stage/include''')
+ opt.add_option('--boost-libs', type='string',
+ default='', dest='boost_libs',
+ help='''path to the directory where the boost libs are,
+ e.g., path/to/boost_1_55_0/stage/lib''')
+ opt.add_option('--boost-mt', action='store_true',
+ default=False, dest='boost_mt',
+ help='select multi-threaded libraries')
+ opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
+ help='''select libraries with tags (gd for debug, static is automatically added),
+ see doc Boost, Getting Started, chapter 6.1''')
+ opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
+ help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
+ opt.add_option('--boost-toolset', type='string',
+ default='', dest='boost_toolset',
+ help='force a toolset e.g. msvc, vc90, \
+ gcc, mingw, mgw45 (default: auto)')
+ py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
+ opt.add_option('--boost-python', type='string',
+ default=py_version, dest='boost_python',
+ help='select the lib python with this version \
+ (default: %s)' % py_version)
+
+
+@conf
+def __boost_get_version_file(self, d):
+ if not d:
+ return None
+ dnode = self.root.find_dir(d)
+ if dnode:
+ return dnode.find_node(BOOST_VERSION_FILE)
+ return None
+
+@conf
+def boost_get_version(self, d):
+ """silently retrieve the boost version number"""
+ node = self.__boost_get_version_file(d)
+ if node:
+ try:
+ txt = node.read()
+ except EnvironmentError:
+ Logs.error("Could not read the file %r" % node.abspath())
+ else:
+ re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M)
+ m1 = re_but1.search(txt)
+ re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M)
+ m2 = re_but2.search(txt)
+ if m1 and m2:
+ return (m1.group(1), m2.group(1))
+ return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":")
+
+@conf
+def boost_get_includes(self, *k, **kw):
+ includes = k and k[0] or kw.get('includes', None)
+ if includes and self.__boost_get_version_file(includes):
+ return includes
+ for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES:
+ if self.__boost_get_version_file(d):
+ return d
+ if includes:
+ self.end_msg('headers not found in %s' % includes)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
+ self.fatal('The configuration failed')
+
+
+@conf
+def boost_get_toolset(self, cc):
+ toolset = cc
+ if not cc:
+ build_platform = Utils.unversioned_sys_platform()
+ if build_platform in BOOST_TOOLSETS:
+ cc = build_platform
+ else:
+ cc = self.env.CXX_NAME
+ if cc in BOOST_TOOLSETS:
+ toolset = BOOST_TOOLSETS[cc]
+ return isinstance(toolset, str) and toolset or toolset(self.env)
+
+
+@conf
+def __boost_get_libs_path(self, *k, **kw):
+ ''' return the lib path and all the files in it '''
+ if 'files' in kw:
+ return self.root.find_dir('.'), Utils.to_list(kw['files'])
+ libs = k and k[0] or kw.get('libs', None)
+ if libs:
+ path = self.root.find_dir(libs)
+ files = path.ant_glob('*boost_*')
+ if not libs or not files:
+ for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS:
+ if not d:
+ continue
+ path = self.root.find_dir(d)
+ if path:
+ files = path.ant_glob('*boost_*')
+ if files:
+ break
+ path = self.root.find_dir(d + '64')
+ if path:
+ files = path.ant_glob('*boost_*')
+ if files:
+ break
+ if not path:
+ if libs:
+ self.end_msg('libs not found in %s' % libs)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
+ self.fatal('The configuration failed')
+
+ self.to_log('Found the boost path in %r with the libraries:' % path)
+ for x in files:
+ self.to_log(' %r' % x)
+ return path, files
+
+@conf
+def boost_get_libs(self, *k, **kw):
+ '''
+ return the lib path and the required libs
+ according to the parameters
+ '''
+ path, files = self.__boost_get_libs_path(**kw)
+ files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True)
+ toolset = self.boost_get_toolset(kw.get('toolset', ''))
+ toolset_pat = '(-%s[0-9]{0,3})' % toolset
+ version = '-%s' % self.env.BOOST_VERSION
+
+ def find_lib(re_lib, files):
+ for file in files:
+ if re_lib.search(file.name):
+ self.to_log('Found boost lib %s' % file)
+ return file
+ return None
+
+ def format_lib_name(name):
+ if name.startswith('lib') and self.env.CC_NAME != 'msvc':
+ name = name[3:]
+ return name[:name.rfind('.')]
+
+ def match_libs(lib_names, is_static):
+ libs = []
+ lib_names = Utils.to_list(lib_names)
+ if not lib_names:
+ return libs
+ t = []
+ if kw.get('mt', False):
+ t.append('-mt')
+ if kw.get('abi', None):
+ t.append('%s%s' % (is_static and '-s' or '-', kw['abi']))
+ elif is_static:
+ t.append('-s')
+ tags_pat = t and ''.join(t) or ''
+ ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN
+ ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN
+
+ for lib in lib_names:
+ if lib == 'python':
+ # for instance, with python='27',
+ # accepts '-py27', '-py2', '27' and '2'
+ # but will reject '-py3', '-py26', '26' and '3'
+ tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'])
+ else:
+ tags = tags_pat
+ # Trying libraries, from most strict match to least one
+ for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext),
+ 'boost_%s%s%s%s$' % (lib, tags, version, ext),
+ # Give up trying to find the right version
+ 'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext),
+ 'boost_%s%s%s$' % (lib, tags, ext),
+ 'boost_%s%s$' % (lib, ext),
+ 'boost_%s' % lib]:
+ self.to_log('Trying pattern %s' % pattern)
+ file = find_lib(re.compile(pattern), files)
+ if file:
+ libs.append(format_lib_name(file.name))
+ break
+ else:
+ self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
+ self.fatal('The configuration failed')
+ return libs
+
+ return path.abspath(), match_libs(kw.get('lib', None), False), match_libs(kw.get('stlib', None), True)
+
+@conf
+def _check_pthread_flag(self, *k, **kw):
+ '''
+ Computes which flags should be added to CXXFLAGS and LINKFLAGS to compile in multi-threading mode
+
+ Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3,
+ boost/thread.hpp will trigger a #error if -pthread isn't used:
+ boost/config/requires_threads.hpp:47:5: #error "Compiler threading support
+ is not turned on. Please set the correct command line options for
+ threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)"
+
+ Based on _BOOST_PTHREAD_FLAG(): https://github.com/tsuna/boost.m4/blob/master/build-aux/boost.m4
+ '''
+
+ var = kw.get('uselib_store', 'BOOST')
+
+ self.start_msg('Checking the flags needed to use pthreads')
+
+ # The ordering *is* (sometimes) important. Some notes on the
+ # individual items follow:
+ # (none): in case threads are in libc; should be tried before -Kthread and
+ # other compiler flags to prevent continual compiler warnings
+ # -lpthreads: AIX (must check this before -lpthread)
+ # -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
+ # -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
+ # -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
+ # -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads)
+ # -pthreads: Solaris/GCC
+ # -mthreads: MinGW32/GCC, Lynx/GCC
+ # -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
+ # doesn't hurt to check since this sometimes defines pthreads too;
+ # also defines -D_REENTRANT)
+ # ... -mt is also the pthreads flag for HP/aCC
+ # -lpthread: GNU Linux, etc.
+ # --thread-safe: KAI C++
+ if Utils.unversioned_sys_platform() == "sunos":
+ # On Solaris (at least, for some versions), libc contains stubbed
+ # (non-functional) versions of the pthreads routines, so link-based
+ # tests will erroneously succeed. (We need to link with -pthreads/-mt/
+ # -lpthread.) (The stubs are missing pthread_cleanup_push, or rather
+ # a function called by this macro, so we could check for that, but
+ # who knows whether they'll stub that too in a future libc.) So,
+ # we'll just look for -pthreads and -lpthread first:
+ boost_pthread_flags = ["-pthreads", "-lpthread", "-mt", "-pthread"]
+ else:
+ boost_pthread_flags = ["", "-lpthreads", "-Kthread", "-kthread", "-llthread", "-pthread",
+ "-pthreads", "-mthreads", "-lpthread", "--thread-safe", "-mt"]
+
+ for boost_pthread_flag in boost_pthread_flags:
+ try:
+ self.env.stash()
+ self.env['CXXFLAGS_%s' % var] += [boost_pthread_flag]
+ self.env['LINKFLAGS_%s' % var] += [boost_pthread_flag]
+ self.check_cxx(code=PTHREAD_CODE, msg=None, use=var, execute=False)
+
+ self.end_msg(boost_pthread_flag)
+ return
+ except self.errors.ConfigurationError:
+ self.env.revert()
+ self.end_msg('None')
+
+@conf
+def check_boost(self, *k, **kw):
+ """
+ Initialize boost libraries to be used.
+
+ Keywords: you can pass the same parameters as with the command line (without "--boost-").
+ Note that the command line has the priority, and should preferably be used.
+ """
+ if not self.env['CXX']:
+ self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
+
+ params = {
+ 'lib': k and k[0] or kw.get('lib', None),
+ 'stlib': kw.get('stlib', None)
+ }
+ for key, value in self.options.__dict__.items():
+ if not key.startswith('boost_'):
+ continue
+ key = key[len('boost_'):]
+ params[key] = value and value or kw.get(key, '')
+
+ var = kw.get('uselib_store', 'BOOST')
+
+ self.start_msg('Checking boost includes')
+ self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
+ versions = self.boost_get_version(inc)
+ self.env.BOOST_VERSION = versions[0]
+ self.env.BOOST_VERSION_NUMBER = int(versions[1])
+ self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000,
+ int(versions[1]) / 100 % 1000,
+ int(versions[1]) % 100))
+ if Logs.verbose:
+ Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var])
+
+ if not params['lib'] and not params['stlib']:
+ return
+ if 'static' in kw or 'static' in params:
+ Logs.warn('boost: static parameter is deprecated, use stlib instead.')
+ self.start_msg('Checking boost libs')
+ path, libs, stlibs = self.boost_get_libs(**params)
+ self.env['LIBPATH_%s' % var] = [path]
+ self.env['STLIBPATH_%s' % var] = [path]
+ self.env['LIB_%s' % var] = libs
+ self.env['STLIB_%s' % var] = stlibs
+ self.end_msg('ok')
+ if Logs.verbose:
+ Logs.pprint('CYAN', ' path : %s' % path)
+ Logs.pprint('CYAN', ' shared libs : %s' % libs)
+ Logs.pprint('CYAN', ' static libs : %s' % stlibs)
+
+ def has_shlib(lib):
+ return params['lib'] and lib in params['lib']
+ def has_stlib(lib):
+ return params['stlib'] and lib in params['stlib']
+ def has_lib(lib):
+ return has_shlib(lib) or has_stlib(lib)
+ if has_lib('thread'):
+ # not inside try_link to make check visible in the output
+ self._check_pthread_flag(k, kw)
+
+ def try_link():
+ if has_lib('system'):
+ self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False)
+ if has_lib('thread'):
+ self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False)
+ if has_lib('log'):
+ if not has_lib('thread'):
+ self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
+ if has_shlib('log'):
+ self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK']
+ self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)
+
+ if params.get('linkage_autodetect', False):
+ self.start_msg("Attempting to detect boost linkage flags")
+ toolset = self.boost_get_toolset(kw.get('toolset', ''))
+ if toolset in ('vc',):
+ # disable auto-linking feature, causing error LNK1181
+ # because the code wants to be linked against
+ self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
+
+ # if no dlls are present, we guess the .lib files are not stubs
+ has_dlls = False
+ for x in Utils.listdir(path):
+ if x.endswith(self.env.cxxshlib_PATTERN % ''):
+ has_dlls = True
+ break
+ if not has_dlls:
+ self.env['STLIBPATH_%s' % var] = [path]
+ self.env['STLIB_%s' % var] = libs
+ del self.env['LIB_%s' % var]
+ del self.env['LIBPATH_%s' % var]
+
+ # we attempt to play with some known-to-work CXXFLAGS combinations
+ for cxxflags in (['/MD', '/EHsc'], []):
+ self.env.stash()
+ self.env["CXXFLAGS_%s" % var] += cxxflags
+ try:
+ try_link()
+ self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
+ exc = None
+ break
+ except Errors.ConfigurationError as e:
+ self.env.revert()
+ exc = e
+
+ if exc is not None:
+ self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc)
+ self.fatal('The configuration failed')
+ else:
+ self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
+ self.fatal('The configuration failed')
+ else:
+ self.start_msg('Checking for boost linkage')
+ try:
+ try_link()
+ except Errors.ConfigurationError as e:
+ self.end_msg("Could not link against boost libraries using supplied options")
+ self.fatal('The configuration failed')
+ self.end_msg('ok')
+
+
+@feature('cxx')
+@after_method('apply_link')
+def install_boost(self):
+ if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'):
+ return
+ install_boost.done = True
+ inst_to = getattr(self, 'install_path', '${BINDIR}')
+ for lib in self.env.LIB_BOOST:
+ try:
+ file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST)
+ self.bld.install_files(inst_to, self.bld.root.find_node(file))
+ except:
+ continue
+install_boost.done = False
diff --git a/.waf-tools/coverage.py b/.waf-tools/coverage.py
new file mode 100644
index 0000000..ce92883
--- /dev/null
+++ b/.waf-tools/coverage.py
@@ -0,0 +1,22 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+
+from waflib import TaskGen, Logs
+
+def options(opt):
+ opt.add_option('--with-coverage', action='store_true', default=False, dest='with_coverage',
+ help='''Set compiler flags for gcc to enable code coverage information''')
+
+def configure(conf):
+ if conf.options.with_coverage:
+ if not conf.options.debug:
+ conf.fatal("Code coverage flags require debug mode compilation (add --debug)")
+ conf.check_cxx(cxxflags=['-fprofile-arcs', '-ftest-coverage', '-fPIC'],
+ linkflags=['-fprofile-arcs'], uselib_store='GCOV', mandatory=True)
+
+@TaskGen.feature('cxx','cc')
+@TaskGen.after('process_source')
+def add_coverage(self):
+ if getattr(self, 'use', ''):
+ self.use += ' GCOV'
+ else:
+ self.use = 'GCOV'
diff --git a/.waf-tools/cryptopp.py b/.waf-tools/cryptopp.py
new file mode 100644
index 0000000..1632ab6
--- /dev/null
+++ b/.waf-tools/cryptopp.py
@@ -0,0 +1,122 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+'''
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('compiler_cxx cryptopp')
+
+ def configure(conf):
+ conf.load('compiler_cxx cryptopp')
+ conf.check_cryptopp()
+
+ def build(bld):
+ bld(source='main.cpp', target='app', use='CRYPTOPP')
+
+Options are generated, in order to specify the location of cryptopp includes/libraries.
+
+
+'''
+import sys
+import re
+from waflib import Utils,Logs,Errors
+from waflib.Configure import conf
+CRYPTOPP_DIR = ['/usr', '/usr/local', '/opt/local', '/sw']
+CRYPTOPP_VERSION_FILE = 'config.h'
+
+CRYPTOPP_CHECK_FRAGMENT = '''
+#include "../../src/security/v1/cryptopp.hpp"
+#include <iostream>
+
+int
+main()
+{
+ using namespace CryptoPP;
+
+ std::string buffer = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.";
+ SHA256 hash;
+ StringSource(buffer, true, new HashFilter(hash, new FileSink(std::cout)));
+ StringSource(reinterpret_cast<const uint8_t*>(buffer.c_str()), buffer.size(),
+ true, new HashFilter(hash, new FileSink(std::cout)));
+}
+'''
+
+def options(opt):
+ opt.add_option('--with-cryptopp', type='string', default=None, dest='cryptopp_dir',
+ help='''Path to where CryptoPP is installed, e.g., /usr/local''')
+
+@conf
+def __cryptopp_get_version_file(self, dir):
+ try:
+ return self.root.find_dir(dir).find_node('%s/%s' % ('include/cryptopp',
+ CRYPTOPP_VERSION_FILE))
+ except:
+ return None
+
+@conf
+def __cryptopp_find_root_and_version_file(self, *k, **kw):
+ root = k and k[0] or kw.get('path', self.options.cryptopp_dir)
+
+ file = self.__cryptopp_get_version_file(root)
+ if root and file:
+ return (root, file)
+ for dir in CRYPTOPP_DIR:
+ file = self.__cryptopp_get_version_file(dir)
+ if file:
+ return (dir, file)
+
+ if root:
+ self.fatal('CryptoPP not found in %s' % root)
+ else:
+ self.fatal('CryptoPP not found, please provide a --with-cryptopp=PATH argument (see help)')
+
+@conf
+def check_cryptopp(self, *k, **kw):
+ if not self.env['CXX']:
+ self.fatal('Load a c++ compiler first, e.g., conf.load("compiler_cxx")')
+
+ var = kw.get('uselib_store', 'CRYPTOPP')
+ mandatory = kw.get('mandatory', True)
+
+ use = kw.get('use', 'PTHREAD')
+
+ self.start_msg('Checking Crypto++ lib')
+ (root, file) = self.__cryptopp_find_root_and_version_file(*k, **kw)
+
+ try:
+ txt = file.read()
+ re_version = re.compile('^#define\\s+CRYPTOPP_VERSION\\s+([0-9]+)', re.M)
+ match = re_version.search(txt)
+
+ if match:
+ self.env.CRYPTOPP_VERSION = match.group(1)
+ v = int(self.env.CRYPTOPP_VERSION)
+ (major, minor, patch) = (int(v / 100), int(v % 100 / 10), int(v % 10))
+ self.end_msg("%d.%d.%d" % (major, minor, patch))
+ else:
+ self.fatal('CryptoPP files are present, but are not recognizable')
+ except:
+ self.fatal('CryptoPP not found or is not usable')
+
+ isLibWorking = False
+ for defines in [[], ['CRYPTOPP_DISABLE_ASM']]:
+ try:
+ self.check_cxx(msg='Checking if CryptoPP library works',
+ fragment=CRYPTOPP_CHECK_FRAGMENT,
+ lib='cryptopp',
+ includes="%s/include" % root,
+ libpath="%s/lib" % root,
+ mandatory=True,
+ use=use,
+ defines=defines,
+ uselib_store=var)
+ isLibWorking = True
+ break
+ except:
+ # try another flags
+ pass
+
+ if mandatory and not isLibWorking:
+ self.fatal('CryptoPP is present, but is not usable')
diff --git a/.waf-tools/default-compiler-flags.py b/.waf-tools/default-compiler-flags.py
new file mode 100644
index 0000000..2999e8f
--- /dev/null
+++ b/.waf-tools/default-compiler-flags.py
@@ -0,0 +1,181 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+
+from waflib import Logs, Configure, Utils
+
+def options(opt):
+ opt.add_option('--debug', '--with-debug', action='store_true', default=False, dest='debug',
+ help='''Compile in debugging mode without optimizations (-O0 or -Og)''')
+
+def configure(conf):
+ cxx = conf.env['CXX_NAME'] # CXX_NAME represents generic name of the compiler
+ if cxx == 'gcc':
+ flags = GccFlags()
+ elif cxx == 'clang':
+ flags = ClangFlags()
+ else:
+ flags = CompilerFlags()
+ Logs.warn('The code has not yet been tested with %s compiler' % cxx)
+
+ areCustomCxxflagsPresent = (len(conf.env.CXXFLAGS) > 0)
+
+ # General flags are always applied (e.g., selecting C++11 mode)
+ generalFlags = flags.getGeneralFlags(conf)
+ conf.add_supported_cxxflags(generalFlags['CXXFLAGS'])
+ conf.add_supported_linkflags(generalFlags['LINKFLAGS'])
+ conf.env.DEFINES += generalFlags['DEFINES']
+
+ # Debug or optimized CXXFLAGS and LINKFLAGS are applied only if the
+ # corresponding environment variables are not set.
+ # DEFINES are always applied.
+ if conf.options.debug:
+ extraFlags = flags.getDebugFlags(conf)
+ if areCustomCxxflagsPresent:
+ missingFlags = [x for x in extraFlags['CXXFLAGS'] if x not in conf.env.CXXFLAGS]
+ if len(missingFlags) > 0:
+ Logs.warn("Selected debug mode, but CXXFLAGS is set to a custom value '%s'"
+ % " ".join(conf.env.CXXFLAGS))
+ Logs.warn("Default flags '%s' are not activated" % " ".join(missingFlags))
+ else:
+ extraFlags = flags.getOptimizedFlags(conf)
+
+ if not areCustomCxxflagsPresent:
+ conf.add_supported_cxxflags(extraFlags['CXXFLAGS'])
+ conf.add_supported_linkflags(extraFlags['LINKFLAGS'])
+
+ conf.env.DEFINES += extraFlags['DEFINES']
+
+@Configure.conf
+def add_supported_cxxflags(self, cxxflags):
+ """
+ Check which cxxflags are supported by compiler and add them to env.CXXFLAGS variable
+ """
+ if len(cxxflags) == 0:
+ return
+
+ self.start_msg('Checking supported CXXFLAGS')
+
+ supportedFlags = []
+ for flag in cxxflags:
+ if self.check_cxx(cxxflags=['-Werror', flag], mandatory=False):
+ supportedFlags += [flag]
+
+ self.end_msg(' '.join(supportedFlags))
+ self.env.prepend_value('CXXFLAGS', supportedFlags)
+
+@Configure.conf
+def add_supported_linkflags(self, linkflags):
+ """
+ Check which linkflags are supported by compiler and add them to env.LINKFLAGS variable
+ """
+ if len(linkflags) == 0:
+ return
+
+ self.start_msg('Checking supported LINKFLAGS')
+
+ supportedFlags = []
+ for flag in linkflags:
+ if self.check_cxx(linkflags=['-Werror', flag], mandatory=False):
+ supportedFlags += [flag]
+
+ self.end_msg(' '.join(supportedFlags))
+ self.env.prepend_value('LINKFLAGS', supportedFlags)
+
+
+class CompilerFlags(object):
+ def getGeneralFlags(self, conf):
+ """Get dict of CXXFLAGS, LINKFLAGS, and DEFINES that are always needed"""
+ return {'CXXFLAGS': [], 'LINKFLAGS': [], 'DEFINES': []}
+
+ def getDebugFlags(self, conf):
+ """Get dict of CXXFLAGS, LINKFLAGS, and DEFINES that are needed only in debug mode"""
+ return {'CXXFLAGS': [], 'LINKFLAGS': [], 'DEFINES': ['_DEBUG']}
+
+ def getOptimizedFlags(self, conf):
+ """Get dict of CXXFLAGS, LINKFLAGS, and DEFINES that are needed only in optimized mode"""
+ return {'CXXFLAGS': [], 'LINKFLAGS': [], 'DEFINES': ['NDEBUG']}
+
+class GccBasicFlags(CompilerFlags):
+ """
+ This class defines basic flags that work for both gcc and clang compilers
+ """
+ def getDebugFlags(self, conf):
+ flags = super(GccBasicFlags, self).getDebugFlags(conf)
+ flags['CXXFLAGS'] += ['-O0',
+ '-g3',
+ '-pedantic',
+ '-Wall',
+ '-Wextra',
+ '-Werror',
+ '-Wno-unused-parameter',
+ '-Wno-error=maybe-uninitialized', # Bug #1615
+ '-Wno-error=deprecated-declarations', # Bug #3795
+ ]
+ return flags
+
+ def getOptimizedFlags(self, conf):
+ flags = super(GccBasicFlags, self).getOptimizedFlags(conf)
+ flags['CXXFLAGS'] += ['-O2',
+ '-g',
+ '-pedantic',
+ '-Wall',
+ '-Wextra',
+ '-Wno-unused-parameter',
+ ]
+ return flags
+
+class GccFlags(GccBasicFlags):
+ def getGeneralFlags(self, conf):
+ flags = super(GccFlags, self).getGeneralFlags(conf)
+ version = tuple(int(i) for i in conf.env['CC_VERSION'])
+ if version < (4, 8, 2):
+ conf.fatal('The version of gcc you are using (%s) is too old.\n' %
+ '.'.join(conf.env['CC_VERSION']) +
+ 'The minimum supported gcc version is 4.8.2.')
+ else:
+ flags['CXXFLAGS'] += ['-std=c++11']
+ return flags
+
+ def getDebugFlags(self, conf):
+ flags = super(GccFlags, self).getDebugFlags(conf)
+ version = tuple(int(i) for i in conf.env['CC_VERSION'])
+ if version < (5, 1, 0):
+ flags['CXXFLAGS'] += ['-Wno-missing-field-initializers']
+ flags['CXXFLAGS'] += ['-Og', # gcc >= 4.8
+ '-fdiagnostics-color', # gcc >= 4.9
+ ]
+ return flags
+
+ def getOptimizedFlags(self, conf):
+ flags = super(GccFlags, self).getOptimizedFlags(conf)
+ version = tuple(int(i) for i in conf.env['CC_VERSION'])
+ if version < (5, 1, 0):
+ flags['CXXFLAGS'] += ['-Wno-missing-field-initializers']
+ flags['CXXFLAGS'] += ['-fdiagnostics-color'] # gcc >= 4.9
+ return flags
+
+class ClangFlags(GccBasicFlags):
+ def getGeneralFlags(self, conf):
+ flags = super(ClangFlags, self).getGeneralFlags(conf)
+ flags['CXXFLAGS'] += ['-std=c++11']
+ if Utils.unversioned_sys_platform() == 'darwin':
+ flags['CXXFLAGS'] += ['-stdlib=libc++']
+ flags['LINKFLAGS'] += ['-stdlib=libc++']
+ return flags
+
+ def getDebugFlags(self, conf):
+ flags = super(ClangFlags, self).getDebugFlags(conf)
+ flags['CXXFLAGS'] += ['-fcolor-diagnostics',
+ '-Wno-unused-local-typedef', # Bugs #2657 and #3209
+ '-Wno-error=unneeded-internal-declaration', # Bug #1588
+ '-Wno-error=deprecated-register',
+ '-Wno-error=keyword-macro', # Bug #3235
+ '-Wno-error=infinite-recursion', # Bug #3358
+ ]
+ return flags
+
+ def getOptimizedFlags(self, conf):
+ flags = super(ClangFlags, self).getOptimizedFlags(conf)
+ flags['CXXFLAGS'] += ['-fcolor-diagnostics',
+ '-Wno-unused-local-typedef', # Bugs #2657 and #3209
+ ]
+ return flags
diff --git a/.waf-tools/doxygen.py b/.waf-tools/doxygen.py
new file mode 100644
index 0000000..6d8066b
--- /dev/null
+++ b/.waf-tools/doxygen.py
@@ -0,0 +1,214 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy 2008-2010 (ita)
+
+"""
+
+Doxygen support
+
+Variables passed to bld():
+* doxyfile -- the Doxyfile to use
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('doxygen')
+
+ def configure(conf):
+ conf.load('doxygen')
+ # check conf.env.DOXYGEN, if it is mandatory
+
+ def build(bld):
+ if bld.env.DOXYGEN:
+ bld(features="doxygen", doxyfile='Doxyfile', ...)
+
+ def doxygen(bld):
+ if bld.env.DOXYGEN:
+ bld(features="doxygen", doxyfile='Doxyfile', ...)
+"""
+
+from fnmatch import fnmatchcase
+import os, os.path, re, stat
+from waflib import Task, Utils, Node, Logs, Errors, Build
+from waflib.TaskGen import feature
+
+DOXY_STR = '"${DOXYGEN}" - '
+DOXY_FMTS = 'html latex man rft xml'.split()
+DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
+c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
+inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
+'''.split())
+
+re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
+re_nl = re.compile('\r*\n', re.M)
+def parse_doxy(txt):
+ tbl = {}
+ txt = re_rl.sub('', txt)
+ lines = re_nl.split(txt)
+ for x in lines:
+ x = x.strip()
+ if not x or x.startswith('#') or x.find('=') < 0:
+ continue
+ if x.find('+=') >= 0:
+ tmp = x.split('+=')
+ key = tmp[0].strip()
+ if key in tbl:
+ tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
+ else:
+ tbl[key] = '+='.join(tmp[1:]).strip()
+ else:
+ tmp = x.split('=')
+ tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
+ return tbl
+
+class doxygen(Task.Task):
+ vars = ['DOXYGEN', 'DOXYFLAGS']
+ color = 'BLUE'
+
+ def runnable_status(self):
+ '''
+ self.pars are populated in runnable_status - because this function is being
+ run *before* both self.pars "consumers" - scan() and run()
+
+ set output_dir (node) for the output
+ '''
+
+ for x in self.run_after:
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'pars', None):
+ txt = self.inputs[0].read()
+ self.pars = parse_doxy(txt)
+ if not self.pars.get('OUTPUT_DIRECTORY'):
+ self.pars['OUTPUT_DIRECTORY'] = self.inputs[0].parent.get_bld().abspath()
+
+ # Override with any parameters passed to the task generator
+ if getattr(self.generator, 'pars', None):
+ for k, v in self.generator.pars.items():
+ self.pars[k] = v
+
+ self.doxy_inputs = getattr(self, 'doxy_inputs', [])
+ if not self.pars.get('INPUT'):
+ self.doxy_inputs.append(self.inputs[0].parent)
+ else:
+ for i in self.pars.get('INPUT').split():
+ if os.path.isabs(i):
+ node = self.generator.bld.root.find_node(i)
+ else:
+ node = self.generator.path.find_node(i)
+ if not node:
+ self.generator.bld.fatal('Could not find the doxygen input %r' % i)
+ self.doxy_inputs.append(node)
+
+ if not getattr(self, 'output_dir', None):
+ bld = self.generator.bld
+ # First try to find an absolute path, then find or declare a relative path
+ self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
+ if not self.output_dir:
+ self.output_dir = bld.path.find_or_declare(self.pars['OUTPUT_DIRECTORY'])
+
+ self.signature()
+ return Task.Task.runnable_status(self)
+
+ def scan(self):
+ exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
+ file_patterns = self.pars.get('FILE_PATTERNS','').split()
+ if not file_patterns:
+ file_patterns = DOXY_FILE_PATTERNS
+ if self.pars.get('RECURSIVE') == 'YES':
+ file_patterns = ["**/%s" % pattern for pattern in file_patterns]
+ nodes = []
+ names = []
+ for node in self.doxy_inputs:
+ if os.path.isdir(node.abspath()):
+ for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
+ nodes.append(m)
+ else:
+ nodes.append(node)
+ return (nodes, names)
+
+ def run(self):
+ dct = self.pars.copy()
+ dct['INPUT'] = ' '.join(['"%s"' % x.abspath() for x in self.doxy_inputs])
+ code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
+ code = code.encode() # for python 3
+ #fmt = DOXY_STR % (self.inputs[0].parent.abspath())
+ cmd = Utils.subst_vars(DOXY_STR, self.env)
+ env = self.env.env or None
+ proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.generator.bld.path.get_bld().abspath())
+ proc.communicate(code)
+ return proc.returncode
+
+ def post_run(self):
+ nodes = self.output_dir.ant_glob('**/*', quiet=True)
+ for x in nodes:
+ x.sig = Utils.h_file(x.abspath())
+ self.outputs += nodes
+ return Task.Task.post_run(self)
+
+class tar(Task.Task):
+ "quick tar creation"
+ run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
+ color = 'RED'
+ after = ['doxygen']
+ def runnable_status(self):
+ for x in getattr(self, 'input_tasks', []):
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'tar_done_adding', None):
+ # execute this only once
+ self.tar_done_adding = True
+ for x in getattr(self, 'input_tasks', []):
+ self.set_inputs(x.outputs)
+ if not self.inputs:
+ return Task.SKIP_ME
+ return Task.Task.runnable_status(self)
+
+ def __str__(self):
+ tgt_str = ' '.join([a.nice_path(self.env) for a in self.outputs])
+ return '%s: %s\n' % (self.__class__.__name__, tgt_str)
+
+@feature('doxygen')
+def process_doxy(self):
+ if not getattr(self, 'doxyfile', None):
+ self.generator.bld.fatal('no doxyfile??')
+
+ node = self.doxyfile
+ if not isinstance(node, Node.Node):
+ node = self.path.find_resource(node)
+ if not node:
+ raise ValueError('doxygen file not found')
+
+ # the task instance
+ dsk = self.create_task('doxygen', node)
+
+ if getattr(self, 'doxy_tar', None):
+ tsk = self.create_task('tar')
+ tsk.input_tasks = [dsk]
+ tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
+ if self.doxy_tar.endswith('bz2'):
+ tsk.env['TAROPTS'] = ['cjf']
+ elif self.doxy_tar.endswith('gz'):
+ tsk.env['TAROPTS'] = ['czf']
+ else:
+ tsk.env['TAROPTS'] = ['cf']
+
+def configure(conf):
+ '''
+ Check if doxygen and tar commands are present in the system
+
+ If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
+ variables will be set. Detection can be controlled by setting DOXYGEN and
+ TAR environmental variables.
+ '''
+
+ conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
+ conf.find_program('tar', var='TAR', mandatory=False)
+
+# doxygen docs
+from waflib.Build import BuildContext
+class doxy(BuildContext):
+ cmd = "doxygen"
+ fun = "doxygen"
diff --git a/.waf-tools/osx-frameworks.py b/.waf-tools/osx-frameworks.py
new file mode 100644
index 0000000..7830f8f
--- /dev/null
+++ b/.waf-tools/osx-frameworks.py
@@ -0,0 +1,70 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+from waflib import Logs, Utils
+from waflib.Configure import conf
+
+OSX_SECURITY_CODE='''
+#include <CoreFoundation/CoreFoundation.h>
+#include <Security/Security.h>
+#include <Security/SecRandom.h>
+#include <CoreServices/CoreServices.h>
+#include <Security/SecDigestTransform.h>
+
+int main(int argc, char **argv) {
+ (void)argc; (void)argv;
+ return 0;
+}
+'''
+
+@conf
+def configure(conf):
+ if Utils.unversioned_sys_platform () == "darwin":
+ conf.check_cxx(framework_name='Foundation', uselib_store='OSX_FOUNDATION', mandatory=False, compile_filename='test.mm')
+ conf.check_cxx(framework_name='AppKit', uselib_store='OSX_APPKIT', mandatory=False, compile_filename='test.mm')
+ conf.check_cxx(framework_name='CoreWLAN', uselib_store='OSX_COREWLAN', define_name='HAVE_COREWLAN',
+ use="OSX_FOUNDATION", mandatory=False, compile_filename='test.mm')
+
+ if conf.options.autoupdate:
+ def check_sparkle(**kwargs):
+ conf.check_cxx (framework_name="Sparkle", header_name=["Foundation/Foundation.h", "AppKit/AppKit.h"],
+ uselib_store='OSX_SPARKLE', define_name='HAVE_SPARKLE', mandatory=True,
+ compile_filename='test.mm', use="OSX_FOUNDATION OSX_APPKIT",
+ **kwargs
+ )
+ try:
+ # Try standard paths first
+ check_sparkle()
+ except:
+ try:
+ # Try local path
+ Logs.info ("Check local version of Sparkle framework")
+ check_sparkle(cxxflags="-F%s/osx/Frameworks/" % conf.path.abspath(),
+ linkflags="-F%s/osx/Frameworks/" % conf.path.abspath())
+ conf.env.HAVE_LOCAL_SPARKLE = 1
+ except:
+ import urllib, subprocess, os, shutil
+ if not os.path.exists('osx/Frameworks/Sparkle.framework'):
+ # Download to local path and retry
+ Logs.info ("Sparkle framework not found, trying to download it to 'build/'")
+
+ urllib.urlretrieve ("http://sparkle.andymatuschak.org/files/Sparkle%201.5b6.zip", "build/Sparkle.zip")
+ if os.path.exists('build/Sparkle.zip'):
+ try:
+ subprocess.check_call (['unzip', '-qq', 'build/Sparkle.zip', '-d', 'build/Sparkle'])
+ os.remove ("build/Sparkle.zip")
+ if not os.path.exists("osx/Frameworks"):
+ os.mkdir ("osx/Frameworks")
+ os.rename ("build/Sparkle/Sparkle.framework", "osx/Frameworks/Sparkle.framework")
+ shutil.rmtree("build/Sparkle", ignore_errors=True)
+
+ check_sparkle(cxxflags="-F%s/osx/Frameworks/" % conf.path.abspath(),
+ linkflags="-F%s/osx/Frameworks/" % conf.path.abspath())
+ conf.env.HAVE_LOCAL_SPARKLE = 1
+ except subprocess.CalledProcessError as e:
+ conf.fatal("Cannot find Sparkle framework. Auto download failed: '%s' returned %s" % (' '.join(e.cmd), e.returncode))
+ except:
+ conf.fatal("Unknown Error happened when auto downloading Sparkle framework")
+
+ if conf.is_defined('HAVE_SPARKLE'):
+ conf.env.HAVE_SPARKLE = 1 # small cheat for wscript
diff --git a/.waf-tools/protoc.py b/.waf-tools/protoc.py
new file mode 100644
index 0000000..dc3bed4
--- /dev/null
+++ b/.waf-tools/protoc.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# encoding: utf-8
+# Philipp Bender, 2012
+# Matt Clarkson, 2012
+
+import re
+from waflib.Task import Task
+from waflib.TaskGen import extension
+
+"""
+A simple tool to integrate protocol buffers into your build system.
+
+Example::
+
+ def configure(conf):
+ conf.load('compiler_cxx cxx protoc')
+
+ def build(bld):
+ bld(
+ features = 'cxx cxxprogram'
+ source = 'main.cpp file1.proto proto/file2.proto',
+ include = '. proto',
+ target = 'executable')
+
+Notes when using this tool:
+
+- protoc command line parsing is tricky.
+
+ The generated files can be put in subfolders which depend on
+ the order of the include paths.
+
+ Try to be simple when creating task generators
+ containing protoc stuff.
+
+"""
+
+class protoc(Task):
+ # protoc expects the input proto file to be an absolute path.
+ run_str = '${PROTOC} ${PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${SRC[0].abspath()}'
+ color = 'BLUE'
+ ext_out = ['.h', 'pb.cc']
+ def scan(self):
+ """
+ Scan .proto dependencies
+ """
+ node = self.inputs[0]
+
+ nodes = []
+ names = []
+ seen = []
+
+ if not node: return (nodes, names)
+
+ def parse_node(node):
+ if node in seen:
+ return
+ seen.append(node)
+ code = node.read().splitlines()
+ for line in code:
+ m = re.search(r'^import\s+"(.*)";.*(//)?.*', line)
+ if m:
+ dep = m.groups()[0]
+ for incpath in self.env.INCPATHS:
+ found = incpath.find_resource(dep)
+ if found:
+ nodes.append(found)
+ parse_node(found)
+ else:
+ names.append(dep)
+
+ parse_node(node)
+ return (nodes, names)
+
+@extension('.proto')
+def process_protoc(self, node):
+ cpp_node = node.change_ext('.pb.cc')
+ hpp_node = node.change_ext('.pb.h')
+ self.create_task('protoc', node, [cpp_node, hpp_node])
+ self.source.append(cpp_node)
+
+ if 'cxx' in self.features and not self.env.PROTOC_FLAGS:
+ self.env.PROTOC_FLAGS = ['--cpp_out=%s' % node.parent.get_bld().abspath(),
+ '--proto_path=%s' % node.parent.get_src().abspath()]
+
+ use = getattr(self, 'use', '')
+ if not 'PROTOBUF' in use:
+ self.use = self.to_list(use) + ['PROTOBUF']
+
+def configure(conf):
+ conf.check_cfg(package="protobuf", uselib_store="PROTOBUF", args=['--cflags', '--libs'])
+ conf.find_program('protoc', var='PROTOC')
+ conf.env.PROTOC_ST = '-I%s'
diff --git a/.waf-tools/sanitizers.py b/.waf-tools/sanitizers.py
new file mode 100644
index 0000000..a8fe55d
--- /dev/null
+++ b/.waf-tools/sanitizers.py
@@ -0,0 +1,22 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+
+def options(opt):
+ opt.add_option('--with-sanitizer', action='store', default='', dest='sanitizers',
+ help='Comma-separated list of compiler sanitizers to enable [default=none]')
+
+def configure(conf):
+ for san in conf.options.sanitizers.split(','):
+ if not san:
+ continue
+
+ sanflag = '-fsanitize=%s' % san
+ conf.start_msg('Checking if compiler supports %s' % sanflag)
+
+ if conf.check_cxx(cxxflags=['-Werror', sanflag, '-fno-omit-frame-pointer'],
+ linkflags=[sanflag], mandatory=False):
+ conf.end_msg('yes')
+ conf.env.append_unique('CXXFLAGS', [sanflag, '-fno-omit-frame-pointer'])
+ conf.env.append_unique('LINKFLAGS', [sanflag])
+ else:
+ conf.end_msg('no', color='RED')
+ conf.fatal('%s sanitizer is not supported by the current compiler' % san)
diff --git a/.waf-tools/sphinx_build.py b/.waf-tools/sphinx_build.py
new file mode 100644
index 0000000..e61da6e
--- /dev/null
+++ b/.waf-tools/sphinx_build.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# inspired by code by Hans-Martin von Gaudecker, 2012
+
+import os
+from waflib import Node, Task, TaskGen, Errors, Logs, Build, Utils
+
+class sphinx_build(Task.Task):
+ color = 'BLUE'
+ run_str = '${SPHINX_BUILD} -D ${VERSION} -D ${RELEASE} -q -b ${BUILDERNAME} -d ${DOCTREEDIR} ${SRCDIR} ${OUTDIR}'
+
+ def __str__(self):
+ env = self.env
+ src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs])
+ tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
+ if self.outputs: sep = ' -> '
+ else: sep = ''
+ return'%s [%s]: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),
+ self.env['BUILDERNAME'], src_str, sep, tgt_str)
+
+@TaskGen.extension('.py', '.rst')
+def sig_hook(self, node):
+ node.sig=Utils.h_file(node.abspath())
+
+@TaskGen.feature("sphinx")
+@TaskGen.before_method("process_source")
+def apply_sphinx(self):
+ """Set up the task generator with a Sphinx instance and create a task."""
+
+ inputs = []
+ for i in Utils.to_list(self.source):
+ if not isinstance(i, Node.Node):
+ node = self.path.find_node(node)
+ else:
+ node = i
+ if not node:
+ raise ValueError('[%s] file not found' % i)
+ inputs.append(node)
+
+ task = self.create_task('sphinx_build', inputs)
+
+ conf = self.path.find_node(self.config)
+ task.inputs.append(conf)
+
+ confdir = conf.parent.abspath()
+ buildername = getattr(self, "builder", "html")
+ srcdir = getattr(self, "srcdir", confdir)
+ outdir = self.path.find_or_declare(getattr(self, "outdir", buildername)).get_bld()
+ doctreedir = getattr(self, "doctreedir", os.path.join(outdir.abspath(), ".doctrees"))
+
+ task.env['BUILDERNAME'] = buildername
+ task.env['SRCDIR'] = srcdir
+ task.env['DOCTREEDIR'] = doctreedir
+ task.env['OUTDIR'] = outdir.abspath()
+ task.env['VERSION'] = "version=%s" % self.VERSION
+ task.env['RELEASE'] = "release=%s" % self.VERSION
+
+ import imp
+ confData = imp.load_source('sphinx_conf', conf.abspath())
+
+ if buildername == "man":
+ for i in confData.man_pages:
+ target = outdir.find_or_declare('%s.%d' % (i[1], i[4]))
+ task.outputs.append(target)
+
+ if self.install_path:
+ self.bld.install_files("%s/man%d/" % (self.install_path, i[4]), target)
+ else:
+ task.outputs.append(outdir)
+
+def configure(conf):
+ conf.find_program('sphinx-build', var='SPHINX_BUILD', mandatory=False)
+
+# sphinx docs
+from waflib.Build import BuildContext
+class sphinx(BuildContext):
+ cmd = "sphinx"
+ fun = "sphinx"
diff --git a/.waf-tools/sqlite3.py b/.waf-tools/sqlite3.py
new file mode 100644
index 0000000..3d4e46e
--- /dev/null
+++ b/.waf-tools/sqlite3.py
@@ -0,0 +1,38 @@
+#! /usr/bin/env python
+# encoding: utf-8
+
+from waflib import Options, Logs
+from waflib.Configure import conf
+
+def options(opt):
+ opt.add_option('--with-sqlite3', type='string', default=None,
+ dest='with_sqlite3', help='''Path to SQLite3, e.g., /usr/local''')
+
+@conf
+def check_sqlite3(self, *k, **kw):
+ root = k and k[0] or kw.get('path', None) or Options.options.with_sqlite3
+ mandatory = kw.get('mandatory', True)
+ var = kw.get('uselib_store', 'SQLITE3')
+
+ if root:
+ self.check_cxx(lib='sqlite3',
+ msg='Checking for SQLite3 library',
+ define_name='HAVE_%s' % var,
+ uselib_store=var,
+ mandatory=mandatory,
+ includes="%s/include" % root,
+ libpath="%s/lib" % root)
+ else:
+ try:
+ self.check_cfg(package='sqlite3',
+ args=['--cflags', '--libs'],
+ global_define=True,
+ define_name='HAVE_%s' % var,
+ uselib_store='SQLITE3',
+ mandatory=True)
+ except:
+ self.check_cxx(lib='sqlite3',
+ msg='Checking for SQLite3 library',
+ define_name='HAVE_%s' % var,
+ uselib_store=var,
+ mandatory=mandatory)
diff --git a/waf-tools/tinyxml.py b/.waf-tools/tinyxml.py
similarity index 100%
rename from waf-tools/tinyxml.py
rename to .waf-tools/tinyxml.py
diff --git a/Makefile b/Makefile
deleted file mode 100644
index a7496ec..0000000
--- a/Makefile
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/make -f
-# Waf Makefile wrapper
-
-all:
- @./waf build
-
-all-debug:
- @./waf -v build
-
-all-progress:
- @./waf -p build
-
-install:
- ./waf install --yes;
-
-uninstall:
- ./waf uninstall
-
-clean:
- @./waf clean
-
-distclean:
- @./waf distclean
- @-rm -rf build
-
-check:
- @./waf check
-
-dist:
- @./waf dist
-
-.PHONY: clean dist distclean check uninstall install all
-
diff --git a/adhoc/adhoc-osx.mm b/adhoc/adhoc-osx.mm
index 7d2b1df..df2ce1c 100644
--- a/adhoc/adhoc-osx.mm
+++ b/adhoc/adhoc-osx.mm
@@ -21,7 +21,7 @@
*/
#include "adhoc.h"
-#include "config.h"
+#include "core/chronoshare-config.hpp"
#if (__APPLE__ && HAVE_COREWLAN)
diff --git a/adhoc/adhoc.h b/adhoc/adhoc.h
index 6181ecb..a420fc3 100644
--- a/adhoc/adhoc.h
+++ b/adhoc/adhoc.h
@@ -22,7 +22,7 @@
#ifndef CHRONOSHARE_ADHOC_H
#define CHRONOSHARE_ADHOC_H
-#include "config.h"
+#include "core/chronoshare-config.hpp"
#if (__APPLE__ && HAVE_COREWLAN)
#define ADHOC_SUPPORTED 1
diff --git a/gui/chronosharegui.cpp b/gui/chronosharegui.cpp
index 8405928..ef28d5e 100644
--- a/gui/chronosharegui.cpp
+++ b/gui/chronosharegui.cpp
@@ -22,7 +22,7 @@
*/
#include "chronosharegui.h"
-#include "config.h"
+#include "core/chronoshare-config.hpp"
#include "logging.h"
#include "ndnx-wrapper.h"
diff --git a/src/logging.h b/src/logging.h
index 975153d..a75c0f4 100644
--- a/src/logging.h
+++ b/src/logging.h
@@ -22,7 +22,7 @@
#ifndef LOGGING_H
#define LOGGING_H
-#include "config.h"
+#include "core/chronoshare-config.hpp"
#ifdef HAVE_LOG4CXX
diff --git a/test/main.cpp b/test/main.cpp
new file mode 100644
index 0000000..9aeb059
--- /dev/null
+++ b/test/main.cpp
@@ -0,0 +1,24 @@
+/* -*- Mode:C++; c-file-style:"gnu"; indent-tabs-mode:nil; -*- */
+/**
+ * Copyright (c) 2013-2016, Regents of the University of California.
+ *
+ * This file is part of ChronoShare, a decentralized file sharing application over NDN.
+ *
+ * ChronoShare is free software: you can redistribute it and/or modify it under the terms
+ * of the GNU General Public License as published by the Free Software Foundation, either
+ * version 3 of the License, or (at your option) any later version.
+ *
+ * ChronoShare is distributed in the hope that it will be useful, but WITHOUT ANY
+ * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+ * PARTICULAR PURPOSE. See the GNU General Public License for more details.
+ *
+ * You should have received copies of the GNU General Public License along with
+ * ChronoShare, e.g., in COPYING.md file. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * See AUTHORS.md for complete list of ChronoShare authors and contributors.
+ */
+
+#define BOOST_TEST_MAIN 1
+#define BOOST_TEST_DYN_LINK 1
+
+#include <boost/test/unit_test.hpp>
diff --git a/test/test-action-log.cc b/test/unit-tests/test-action-log.cc
similarity index 100%
rename from test/test-action-log.cc
rename to test/unit-tests/test-action-log.cc
diff --git a/test/test-dispatcher.cc b/test/unit-tests/test-dispatcher.cc
similarity index 100%
rename from test/test-dispatcher.cc
rename to test/unit-tests/test-dispatcher.cc
diff --git a/test/test-event-scheduler.cc b/test/unit-tests/test-event-scheduler.cc
similarity index 100%
rename from test/test-event-scheduler.cc
rename to test/unit-tests/test-event-scheduler.cc
diff --git a/test/test-executor.cc b/test/unit-tests/test-executor.cc
similarity index 100%
rename from test/test-executor.cc
rename to test/unit-tests/test-executor.cc
diff --git a/test/test-fetch-manager.cc b/test/unit-tests/test-fetch-manager.cc
similarity index 100%
rename from test/test-fetch-manager.cc
rename to test/unit-tests/test-fetch-manager.cc
diff --git a/test/test-fetch-task-db.cc b/test/unit-tests/test-fetch-task-db.cc
similarity index 100%
rename from test/test-fetch-task-db.cc
rename to test/unit-tests/test-fetch-task-db.cc
diff --git a/test/test-fs-watcher-delay.cc b/test/unit-tests/test-fs-watcher-delay.cc
similarity index 98%
rename from test/test-fs-watcher-delay.cc
rename to test/unit-tests/test-fs-watcher-delay.cc
index 162d1d4..a44bde1 100644
--- a/test/test-fs-watcher-delay.cc
+++ b/test/unit-tests/test-fs-watcher-delay.cc
@@ -41,7 +41,7 @@
void SlowWrite(fs::path & file)
{
fs::ofstream off(file, std::ios::out);
-
+
for (int i = 0; i < 10; i++){
off << i << endl;
usleep(200000);
@@ -62,12 +62,12 @@
FsWatcher::LocalFile_Change_Callback fileDelete = boost::bind(onDelete, _1);
fs::path file = dir / "test.text";
-
+
thread watcherThread(run, dir, fileChange, fileDelete);
thread writeThread(SlowWrite, file);
-
-
+
+
usleep(10000000);
diff --git a/test/test-fs-watcher.cc b/test/unit-tests/test-fs-watcher.cc
similarity index 100%
rename from test/test-fs-watcher.cc
rename to test/unit-tests/test-fs-watcher.cc
diff --git a/test/test-ndnx-name.cc b/test/unit-tests/test-ndnx-name.cc
similarity index 90%
rename from test/test-ndnx-name.cc
rename to test/unit-tests/test-ndnx-name.cc
index 6022ec2..a876ba4 100644
--- a/test/test-ndnx-name.cc
+++ b/test/unit-tests/test-ndnx-name.cc
@@ -1,17 +1,17 @@
-#include "ndnx-name.h"
+#include "ccnx-name.h"
#define BOOST_TEST_MAIN 1
#include <boost/test/unit_test.hpp>
-using namespace Ndnx;
+using namespace Ccnx;
using namespace std;
using namespace boost;
-BOOST_AUTO_TEST_SUITE(NdnxNameTests)
+BOOST_AUTO_TEST_SUITE(CcnxNameTests)
-BOOST_AUTO_TEST_CASE (NdnxNameTest)
+BOOST_AUTO_TEST_CASE (CcnxNameTest)
{
Name empty = Name();
Name root = Name("/");
diff --git a/test/test-ndnx-wrapper.cc b/test/unit-tests/test-ndnx-wrapper.cc
similarity index 91%
rename from test/test-ndnx-wrapper.cc
rename to test/unit-tests/test-ndnx-wrapper.cc
index eaf0ef3..9989cdb 100644
--- a/test/test-ndnx-wrapper.cc
+++ b/test/unit-tests/test-ndnx-wrapper.cc
@@ -19,25 +19,25 @@
* Alexander Afanasyev <alexander.afanasyev@ucla.edu>
*/
-#include "ndnx-wrapper.h"
-#include "ndnx-closure.h"
-#include "ndnx-name.h"
-#include "ndnx-selectors.h"
-#include "ndnx-pco.h"
+#include "ccnx-wrapper.h"
+#include "ccnx-closure.h"
+#include "ccnx-name.h"
+#include "ccnx-selectors.h"
+#include "ccnx-pco.h"
#include <unistd.h>
#include <boost/date_time/posix_time/posix_time.hpp>
#include <boost/test/unit_test.hpp>
#include <boost/make_shared.hpp>
-using namespace Ndnx;
+using namespace Ccnx;
using namespace std;
using namespace boost;
-BOOST_AUTO_TEST_SUITE(TestNdnxWrapper)
+BOOST_AUTO_TEST_SUITE(TestCcnxWrapper)
-NdnxWrapperPtr c1;
-NdnxWrapperPtr c2;
+CcnxWrapperPtr c1;
+CcnxWrapperPtr c2;
int g_timeout_counter = 0;
int g_dataCallback_counter = 0;
@@ -53,7 +53,7 @@
c2->publishData(name, (const unsigned char*)content.c_str(), content.size(), 5);
}
-void dataCallback(const Name &name, Ndnx::PcoPtr pco)
+void dataCallback(const Name &name, Ccnx::PcoPtr pco)
{
cout << " in data callback" << endl;
BytesPtr content = pco->contentPtr ();
@@ -62,7 +62,7 @@
BOOST_CHECK_EQUAL(name, msg);
}
-void encapCallback(const Name &name, Ndnx::PcoPtr pco)
+void encapCallback(const Name &name, Ccnx::PcoPtr pco)
{
cout << " in encap data callback" << endl;
BOOST_CHECK(!c1->verify(pco));
@@ -84,11 +84,11 @@
{
if (!c1)
{
- c1 = make_shared<NdnxWrapper> ();
+ c1 = make_shared<CcnxWrapper> ();
}
if (!c2)
{
- c2 = make_shared<NdnxWrapper> ();
+ c2 = make_shared<CcnxWrapper> ();
}
}
@@ -106,7 +106,7 @@
}
-BOOST_AUTO_TEST_CASE (BlaNdnxWrapperTest)
+BOOST_AUTO_TEST_CASE (BlaCcnxWrapperTest)
{
INIT_LOGGERS ();
@@ -132,7 +132,7 @@
teardown();
}
-BOOST_AUTO_TEST_CASE (NdnxWrapperSelector)
+BOOST_AUTO_TEST_CASE (CcnxWrapperSelector)
{
setup();
@@ -220,7 +220,7 @@
/*
- BOOST_AUTO_TEST_CASE (NdnxWrapperUnsigningTest)
+ BOOST_AUTO_TEST_CASE (CcnxWrapperUnsigningTest)
{
setup();
Bytes data;
diff --git a/test/test-object-manager.cc b/test/unit-tests/test-object-manager.cc
similarity index 100%
rename from test/test-object-manager.cc
rename to test/unit-tests/test-object-manager.cc
diff --git a/test/test-protobuf.cc b/test/unit-tests/test-protobuf.cc
similarity index 100%
rename from test/test-protobuf.cc
rename to test/unit-tests/test-protobuf.cc
diff --git a/test/test-serve-and-fetch.cc b/test/unit-tests/test-serve-and-fetch.cc
similarity index 100%
rename from test/test-serve-and-fetch.cc
rename to test/unit-tests/test-serve-and-fetch.cc
diff --git a/test/test-sync-core.cc b/test/unit-tests/test-sync-core.cc
similarity index 100%
rename from test/test-sync-core.cc
rename to test/unit-tests/test-sync-core.cc
diff --git a/test/test-sync-log.cc b/test/unit-tests/test-sync-log.cc
similarity index 100%
rename from test/test-sync-log.cc
rename to test/unit-tests/test-sync-log.cc
diff --git a/test/wscript b/test/wscript
new file mode 100644
index 0000000..4de283c
--- /dev/null
+++ b/test/wscript
@@ -0,0 +1,25 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+
+top = '..'
+
+from waflib import Logs
+
+def build(bld):
+ Logs.error("Unit tests are temporary disabled")
+ return
+
+ bld(features='cxx',
+ target='unit-tests-main',
+ name='unit-tests-main',
+ source='main.cpp',
+ use='BOOST',
+ defines=['BOOST_TEST_MODULE=CHRONOSHARE Unit Test'])
+
+ unit_tests = bld.program(
+ target='../unit-tests',
+ features='cxx cxxprogram',
+ source=bld.path.ant_glob(['**/*.cpp'], excl=['main.cpp']),
+ use='unit-tests-main',
+ install_path=None,
+ defines='UNIT_TEST_CONFIG_PATH=\"%s/tmp-files/\"' % (bld.bldnode)
+ )
diff --git a/waf b/waf
index b780e61..fa68e1a 100755
--- a/waf
+++ b/waf
Binary files differ
diff --git a/waf-tools/flags.py b/waf-tools/flags.py
deleted file mode 100644
index 416b772..0000000
--- a/waf-tools/flags.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-from waflib import Configure
-
-@Configure.conf
-def add_supported_cflags(self, cflags):
- """
- Check which cflags are supported by compiler and add them to env.CFLAGS variable
- """
- self.start_msg('Checking allowed flags for c compiler')
-
- supportedFlags = []
- for flag in cflags:
- if self.check_cc (cflags=[flag], mandatory=False):
- supportedFlags += [flag]
-
- self.end_msg (' '.join (supportedFlags))
- self.env.CFLAGS += supportedFlags
-
-def configure(conf):
- conf.load ('gnu_dirs')
-
- if conf.options.debug:
- conf.define ('_DEBUG', 1)
- conf.add_supported_cflags (cflags = ['-O0',
- '-Wall',
- '-Wno-unused-variable',
- '-g3',
- '-Wno-unused-private-field', # only clang supports
- '-fcolor-diagnostics', # only clang supports
- '-Qunused-arguments' # only clang supports
- ])
- else:
- conf.add_supported_cflags (cflags = ['-O3', '-g'])
-
-def options(opt):
- opt.load ('gnu_dirs')
- opt.add_option('--debug',action='store_true',default=False,dest='debug',help='''debugging mode''')
diff --git a/waf-tools/ndnx.py b/waf-tools/ndnx.py
deleted file mode 100644
index fccad90..0000000
--- a/waf-tools/ndnx.py
+++ /dev/null
@@ -1,160 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-'''
-
-When using this tool, the wscript will look like:
-
- def options(opt):
- opt.tool_options('ndnx')
-
- def configure(conf):
- conf.load('compiler_c ndnx')
-
- def build(bld):
- bld(source='main.cpp', target='app', use='NDNX')
-
-Options are generated, in order to specify the location of ndnx includes/libraries.
-
-
-'''
-import sys, re
-from waflib import Utils, Logs, Errors, Options, ConfigSet
-from waflib.Configure import conf
-
-NDNX_DIR=['/usr','/usr/local','/opt/local','/sw']
-NDNX_VERSION_FILE='ndn/ndn.h'
-NDNX_VERSION_CODE='''
-#include <ndn/ndn.h>
-#include <stdio.h>
-int main() { printf ("%d.%d.%d", ((NDN_API_VERSION/100000) % 100), ((NDN_API_VERSION/1000) % 100), (NDN_API_VERSION % 1000)); return 0; }
-'''
-
-@conf
-def __ndnx_get_version_file(self,dir):
- # Logs.pprint ('CYAN', ' + %s/%s/%s' % (dir, 'include', NDNX_VERSION_FILE))
- try:
- return self.root.find_dir(dir).find_node('%s/%s' % ('include', NDNX_VERSION_FILE))
- except:
- return None
-@conf
-def ndnx_get_version(self,dir):
- val=self.check_cc(fragment=NDNX_VERSION_CODE,includes=['%s/%s' % (dir, 'include')],execute=True,define_ret = True, mandatory=True)
- return val
-@conf
-def ndnx_get_root(self,*k,**kw):
- root=Options.options.ndnx_dir or (k and k[0]) or kw.get('path',None)
-
- if root:
- if self.__ndnx_get_version_file(root):
- return root
- self.fatal('NDNx not found in %s'%root)
-
- for dir in NDNX_DIR:
- if self.__ndnx_get_version_file(dir):
- return dir
- self.fatal('NDNx not found, please provide a --ndnx argument (see help)')
-
-@conf
-def check_openssl(self,*k,**kw):
- root = k and k[0] or kw.get('path',None) or Options.options.openssl
- mandatory = kw.get('mandatory', True)
- var = kw.get('var', 'SSL')
-
- CODE = """
-#include <openssl/crypto.h>
-#include <stdio.h>
-
-int main(int argc, char **argv) {
- (void)argc;
- printf ("%s", argv[0]);
-
- return 0;
-}
-"""
- if root:
- testApp = self.check_cc (lib=['ssl', 'crypto'],
- header_name='openssl/crypto.h',
- define_name='HAVE_%s' % var,
- uselib_store=var,
- mandatory = mandatory,
- cflags="-I%s/include" % root,
- linkflags="-L%s/lib" % root,
- execute = True, fragment = CODE, define_ret = True)
- else:
- testApp = libcrypto = self.check_cc (lib=['ssl', 'crypto'],
- header_name='openssl/crypto.h',
- define_name='HAVE_%s' % var,
- uselib_store=var,
- mandatory = mandatory,
- execute = True, fragment = CODE, define_ret = True)
-
- if not testApp:
- return
-
- self.start_msg ('Checking if selected openssl matches NDNx')
-
- ndn_var = kw.get('ndn_var', "NDNX")
- if Utils.unversioned_sys_platform () == "darwin":
- def otool (binary):
- p = Utils.subprocess.Popen (['/usr/bin/otool', '-L', binary],
- stdout = Utils.subprocess.PIPE, )
- for line in p.communicate()[0].split ('\n'):
- if re.match ('.*/libcrypto\..*', line):
- return line
-
- selected_crypto = otool (testApp)
- ndnd_crypto = otool ('%s/bin/ndnd' % self.env['%s_ROOT' % ndn_var])
-
- if ndnd_crypto != selected_crypto:
- self.fatal ("Selected openssl does not match used to compile NDNx (%s != %s)" %
- (selected_crypto.strip (), ndnd_crypto.strip ()))
- self.end_msg (True)
-
- elif Utils.unversioned_sys_platform () == "linux" or Utils.unversioned_sys_platform () == "freebsd":
- def ldd (binary):
- p = Utils.subprocess.Popen (['/usr/bin/ldd', binary],
- stdout = Utils.subprocess.PIPE, )
- for line in p.communicate()[0].split ('\n'):
- if re.match ('libcrypto\..*', line):
- return line
-
- selected_crypto = ldd (testApp)
- ndnd_crypto = ldd ('%s/bin/ndnd' % self.env['%s_ROOT' % ndn_var])
-
- if ndnd_crypto != selected_crypto:
- self.fatal ("Selected openssl does not match used to compile NDNx (%s != %s)" %
- (selected_crypto.strip (), ndnd_crypto.strip ()))
- self.end_msg (True)
- else:
- self.end_msg ("Don't know how to check", 'YELLOW')
-
-@conf
-def check_ndnx(self,*k,**kw):
- if not self.env['CC']:
- self.fatal('load a c compiler first, conf.load("compiler_c")')
-
- var=kw.get('uselib_store', 'NDNX')
- self.start_msg('Checking for NDNx')
- root = self.ndnx_get_root(*k,**kw);
- self.env.NDNX_VERSION=self.ndnx_get_version(root)
-
- self.env['INCLUDES_%s' % var]= '%s/%s' % (root, "include");
- self.env['LIB_%s' % var] = "ndn"
- self.env['LIBPATH_%s' % var] = '%s/%s' % (root, "lib")
-
- self.env['%s_ROOT' % var] = root
-
- self.end_msg("%s in %s " % (self.env.NDNX_VERSION, root))
- if Logs.verbose:
- Logs.pprint('CYAN',' NDNx include : %s'%self.env['INCLUDES_%s' % var])
- Logs.pprint('CYAN',' NDNx lib : %s'%self.env['LIB_%s' % var])
- Logs.pprint('CYAN',' NDNx libpath : %s'%self.env['LIBPATH_%s' % var])
-
-def options(opt):
- """
- NDNx options
- """
- ndnopt = opt.add_option_group("NDNx Options")
- ndnopt.add_option('--ndnx',type='string',default=None,dest='ndnx_dir',help='''path to where NDNx is installed, e.g. /usr/local''')
- ndnopt.add_option('--openssl',type='string',default='',dest='openssl',help='''path to openssl, should be the same NDNx is compiled against''')
diff --git a/wscript b/wscript
index 435d075..8db6afa 100644
--- a/wscript
+++ b/wscript
@@ -1,310 +1,258 @@
# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
-VERSION='0.4'
-APPNAME='chronoshare'
+VERSION='1.0'
+APPNAME='ChronoShare'
-from waflib import Build, Logs, Utils, Task, TaskGen, Configure
+from waflib import Logs, Utils, Task, TaskGen
def options(opt):
- opt.add_option('--test', action='store_true',default=False,dest='_test',help='''build unit tests''')
+ opt.add_option('--with-tests', action='store_true', default=False, dest='with_tests',
+ help='''Build unit tests''')
opt.add_option('--yes',action='store_true',default=False) # for autoconf/automake/make compatibility
opt.add_option('--log4cxx', action='store_true',default=False,dest='log4cxx',help='''Compile with log4cxx logging support''')
- if Utils.unversioned_sys_platform () == "darwin":
- opt.add_option('--auto-update', action='store_true',default=False,dest='autoupdate',help='''(OSX) Download sparkle framework and enable autoupdate feature''')
+ opt.add_option('--without-sqlite-locking', action='store_false', default=True,
+ dest='with_sqlite_locking',
+ help='''Disable filesystem locking in sqlite3 database '''
+ '''(use unix-dot locking mechanism instead). '''
+ '''This option may be necessary if home directory is hosted on NFS.''')
- opt.load('compiler_c compiler_cxx boost protoc qt4 gnu_dirs')
- opt.load('ndnx flags tinyxml', tooldir=['waf-tools'])
+ if Utils.unversioned_sys_platform() == "darwin":
+ opt.add_option('--with-auto-update', action='store_true', default=False, dest='autoupdate',
+ help='''(OSX) Download sparkle framework and enable autoupdate feature''')
+
+ opt.load(['compiler_c', 'compiler_cxx', 'gnu_dirs', 'qt5'])
+ opt.load(['default-compiler-flags',
+ 'osx-frameworks', 'boost', 'sqlite3', 'protoc', 'tinyxml',
+ 'coverage', 'sanitizers',
+ 'doxygen', 'sphinx_build'], tooldir=['.waf-tools'])
def configure(conf):
- conf.load("compiler_c compiler_cxx gnu_dirs flags")
+ conf.load(['compiler_c', 'compiler_cxx', 'gnu_dirs',
+ 'default-compiler-flags',
+ 'osx-frameworks', 'boost', 'sqlite3', 'protoc', 'tinyxml',
+ 'doxygen', 'sphinx_build'])
- conf.define ("CHRONOSHARE_VERSION", VERSION)
+ if 'PKG_CONFIG_PATH' not in conf.environ:
+ conf.environ['PKG_CONFIG_PATH'] = Utils.subst_vars('${LIBDIR}/pkgconfig', conf.env)
- conf.check_cfg(package='sqlite3', args=['--cflags', '--libs'], uselib_store='SQLITE3', mandatory=True)
- conf.check_cfg(package='libevent', args=['--cflags', '--libs'], uselib_store='LIBEVENT', mandatory=True)
- conf.check_cfg(package='libevent_pthreads', args=['--cflags', '--libs'], uselib_store='LIBEVENT_PTHREADS', mandatory=True)
- conf.load('tinyxml')
+ conf.check_cfg(package='libndn-cxx', args=['--cflags', '--libs'],
+ uselib_store='NDN_CXX')
+
+ # add homebrew path, as qt5 is no longer linked
+ conf.environ['PKG_CONFIG_PATH'] += ":/usr/local/opt/qt5/lib/pkgconfig:/opt/qt5/5.8/clang_64/lib/pkgconfig"
+ conf.environ['PATH'] += ":/usr/local/opt/qt5/bin:/opt/qt5/5.8/clang_64/bin"
+
+ conf.load('qt5')
+
+ conf.define("CHRONOSHARE_VERSION", VERSION)
+
+ conf.check_sqlite3(mandatory=True)
+ if not conf.options.with_sqlite_locking:
+ conf.define('DISABLE_SQLITE3_FS_LOCKING', 1)
+
conf.check_tinyxml(path=conf.options.tinyxml_dir)
- conf.define ("TRAY_ICON", "chronoshare-big.png")
- if Utils.unversioned_sys_platform () == "linux":
- conf.define ("TRAY_ICON", "chronoshare-ubuntu.png")
-
- if Utils.unversioned_sys_platform () == "darwin":
- conf.check_cxx(framework_name='Foundation', uselib_store='OSX_FOUNDATION', mandatory=False, compile_filename='test.mm')
- conf.check_cxx(framework_name='AppKit', uselib_store='OSX_APPKIT', mandatory=False, compile_filename='test.mm')
- conf.check_cxx(framework_name='CoreWLAN', uselib_store='OSX_COREWLAN', define_name='HAVE_COREWLAN',
- use="OSX_FOUNDATION", mandatory=False, compile_filename='test.mm')
-
- if conf.options.autoupdate:
- def check_sparkle(**kwargs):
- conf.check_cxx (framework_name="Sparkle", header_name=["Foundation/Foundation.h", "AppKit/AppKit.h"],
- uselib_store='OSX_SPARKLE', define_name='HAVE_SPARKLE', mandatory=True,
- compile_filename='test.mm', use="OSX_FOUNDATION OSX_APPKIT",
- **kwargs
- )
- try:
- # Try standard paths first
- check_sparkle()
- except:
- try:
- # Try local path
- Logs.info ("Check local version of Sparkle framework")
- check_sparkle(cxxflags="-F%s/osx/Frameworks/" % conf.path.abspath(),
- linkflags="-F%s/osx/Frameworks/" % conf.path.abspath())
- conf.env.HAVE_LOCAL_SPARKLE = 1
- except:
- import urllib, subprocess, os, shutil
- if not os.path.exists('osx/Frameworks/Sparkle.framework'):
- # Download to local path and retry
- Logs.info ("Sparkle framework not found, trying to download it to 'build/'")
-
- urllib.urlretrieve ("http://sparkle.andymatuschak.org/files/Sparkle%201.5b6.zip", "build/Sparkle.zip")
- if os.path.exists('build/Sparkle.zip'):
- try:
- subprocess.check_call (['unzip', '-qq', 'build/Sparkle.zip', '-d', 'build/Sparkle'])
- os.remove ("build/Sparkle.zip")
- if not os.path.exists("osx/Frameworks"):
- os.mkdir ("osx/Frameworks")
- os.rename ("build/Sparkle/Sparkle.framework", "osx/Frameworks/Sparkle.framework")
- shutil.rmtree("build/Sparkle", ignore_errors=True)
-
- check_sparkle(cxxflags="-F%s/osx/Frameworks/" % conf.path.abspath(),
- linkflags="-F%s/osx/Frameworks/" % conf.path.abspath())
- conf.env.HAVE_LOCAL_SPARKLE = 1
- except subprocess.CalledProcessError as e:
- conf.fatal("Cannot find Sparkle framework. Auto download failed: '%s' returned %s" % (' '.join(e.cmd), e.returncode))
- except:
- conf.fatal("Unknown Error happened when auto downloading Sparkle framework")
-
- if conf.is_defined('HAVE_SPARKLE'):
- conf.env.HAVE_SPARKLE = 1 # small cheat for wscript
+ conf.define("TRAY_ICON", "chronoshare-big.png")
+ if Utils.unversioned_sys_platform() == "linux":
+ conf.define("TRAY_ICON", "chronoshare-ubuntu.png")
if conf.options.log4cxx:
conf.check_cfg(package='liblog4cxx', args=['--cflags', '--libs'], uselib_store='LOG4CXX', mandatory=True)
- conf.define ("HAVE_LOG4CXX", 1)
+ conf.define("HAVE_LOG4CXX", 1)
- conf.load ('ndnx')
+ USED_BOOST_LIBS = ['system', 'filesystem', 'date_time', 'iostreams',
+ 'regex', 'program_options', 'thread']
- conf.load('protoc')
+ conf.env['WITH_TESTS'] = conf.options.with_tests
+ if conf.env['WITH_TESTS']:
+ USED_BOOST_LIBS += ['unit_test_framework']
+ conf.define('HAVE_TESTS', 1)
- conf.load('qt4')
-
- conf.load('boost')
-
- conf.check_boost(lib='system test iostreams filesystem regex thread date_time')
-
- boost_version = conf.env.BOOST_VERSION.split('_')
- if int(boost_version[0]) < 1 or int(boost_version[1]) < 46:
- Logs.error ("Minumum required boost version is 1.46")
+ conf.check_boost(lib=USED_BOOST_LIBS)
+ if conf.env.BOOST_VERSION_NUMBER < 105400:
+ Logs.error("Minimum required boost version is 1.54.0")
+ Logs.error("Please upgrade your distribution or install custom boost libraries" +
+ " (https://redmine.named-data.net/projects/nfd/wiki/Boost_FAQ)")
return
- conf.check_ndnx ()
- conf.check_openssl ()
- conf.define ('NDNX_PATH', conf.env.NDNX_ROOT)
+ # Loading "late" to prevent tests to be compiled with profiling flags
+ conf.load('coverage')
- if conf.options._test:
- conf.define ('_TESTS', 1)
- conf.env.TEST = 1
+ conf.load('sanitizers')
- conf.write_config_header('src/config.h')
+ conf.define('SYSCONFDIR', conf.env['SYSCONFDIR'])
-def build (bld):
- executor = bld.objects (
- target = "executor",
- features = ["cxx"],
- source = bld.path.ant_glob(['executor/**/*.cc']),
- use = 'BOOST BOOST_THREAD LIBEVENT LIBEVENT_PTHREADS LOG4CXX',
- includes = "executor src",
- )
+ conf.write_config_header('core/chronoshare-config.hpp')
- scheduler = bld.objects (
- target = "scheduler",
- features = ["cxx"],
- source = bld.path.ant_glob(['scheduler/**/*.cc']),
- use = 'BOOST BOOST_THREAD LIBEVENT LIBEVENT_PTHREADS LOG4CXX executor',
- includes = "scheduler executor src",
- )
+def build(bld):
+ # if Utils.unversioned_sys_platform() == 'darwin':
+ # bld(
+ # target='adhoc',
+ # mac_app = True,
+ # features=['cxx'],
+ # source='adhoc/adhoc-osx.mm'
+ # includes='. src',
+ # use='OSX_FOUNDATION OSX_COREWLAN',
+ # )
+ Logs.error("Ad hoc network creation routines are temporary disabled")
- libndnx = bld (
- target="ndnx",
- features=['cxx'],
- source = bld.path.ant_glob(['ndnx/**/*.cc', 'ndnx/**/*.cpp']),
- use = 'TINYXML BOOST BOOST_THREAD SSL NDNX LOG4CXX scheduler executor',
- includes = "ndnx src scheduler executor",
- )
+ # chornoshare = bld(
+ # target="chronoshare",
+ # features=['cxx'],
+ # source=bld.path.ant_glob(['src/**/*.cpp', 'src/*.proto']),
+ # use='core adhoc BOOST LOG4CXX NDN_CXX TINYXML SQLITE3',
+ # includes="src",
+ # export_includes="src",
+ # )
+ Logs.error("Chronoshare source compilation is temporary disabled")
- adhoc = bld (
- target = "adhoc",
- features=['cxx'],
- includes = "ndnx src",
- )
- if Utils.unversioned_sys_platform () == "darwin":
- adhoc.mac_app = True
- adhoc.source = 'adhoc/adhoc-osx.mm'
- adhoc.use = "BOOST BOOST_THREAD BOOST_DATE_TIME LOG4CXX OSX_FOUNDATION OSX_COREWLAN"
+ # fs_watcher = bld(
+ # features=['qt5', 'cxx'],
+ # target='fs-watcher',
+ # defines='WAF',
+ # source=bld.path.ant_glob('fs-watcher/*.cpp'),
+ # use='chronoshare QT5CORE',
+ # includes='fs-watcher',
+ # export_includes='fs-watcher',
+ # )
+ Logs.error("fs-watcher compilation is temporary disabled")
- chornoshare = bld (
- target="chronoshare",
- features=['cxx'],
- source = bld.path.ant_glob(['src/**/*.cc', 'src/**/*.cpp', 'src/**/*.proto']),
- use = "BOOST BOOST_FILESYSTEM BOOST_DATE_TIME SQLITE3 LOG4CXX scheduler ndnx",
- includes = "ndnx scheduler src executor",
- )
+ # http_server = bld(
+ # target = "http_server",
+ # features = "qt5 cxx",
+ # source = bld.path.ant_glob(['server/*.cpp']),
+ # includes = "server src .",
+ # use = 'BOOST QT5CORE',
+ # )
+ Logs.error("http_server compilation is temporary disabled")
- fs_watcher = bld (
- target = "fs_watcher",
- features = "qt4 cxx",
- defines = "WAF",
- source = bld.path.ant_glob(['fs-watcher/*.cc']),
- use = "SQLITE3 LOG4CXX scheduler executor QTCORE",
- includes = "ndnx fs-watcher scheduler executor src",
- )
+# qt = bld(
+# target = "ChronoShare",
+# features = "qt5 cxx cxxprogram html_resources",
+# defines = "WAF",
+# source = bld.path.ant_glob(['gui/*.cpp', 'gui/images.qrc']),
+# includes = "fs-watcher gui src adhoc server . ",
+# use = "fs_watcher chronoshare http_server QT5CORE QT5GUI QT5WIDGETS",
- # Unit tests
- if bld.env['TEST']:
- unittests = bld.program (
- target="unit-tests",
- features = "qt4 cxx cxxprogram",
- defines = "WAF",
- source = bld.path.ant_glob(['test/*.cc']),
- use = 'BOOST_TEST BOOST_FILESYSTEM BOOST_DATE_TIME LOG4CXX SQLITE3 QTCORE QTGUI ndnx database fs_watcher chronoshare',
- includes = "ndnx scheduler src executor gui fs-watcher",
- install_prefix = None,
- )
+# html_resources = bld.path.find_dir("gui/html").ant_glob([
+# '**/*.js', '**/*.png', '**/*.css',
+# '**/*.html', '**/*.gif', '**/*.ico'
+# ]),
+# )
- http_server = bld (
- target = "http_server",
- features = "qt4 cxx",
- source = bld.path.ant_glob(['server/*.cpp']),
- includes = "server src .",
- use = "BOOST QTCORE"
- )
+# if Utils.unversioned_sys_platform() == "darwin":
+# app_plist = '''<?xml version="1.0" encoding="UTF-8"?>
+# <!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
+# <plist version="0.9">
+# <dict>
+# <key>CFBundlePackageType</key>
+# <string>APPL</string>
+# <key>CFBundleIconFile</key>
+# <string>chronoshare.icns</string>
+# <key>CFBundleGetInfoString</key>
+# <string>Created by Waf</string>
+# <key>CFBundleIdentifier</key>
+# <string>edu.ucla.cs.irl.Chronoshare</string>
+# <key>CFBundleSignature</key>
+# <string>????</string>
+# <key>NOTE</key>
+# <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
+# <key>CFBundleExecutable</key>
+# <string>%s</string>
+# <key>LSUIElement</key>
+# <string>1</string>
+# <key>SUPublicDSAKeyFile</key>
+# <string>dsa_pub.pem</string>
+# <key>CFBundleIconFile</key>
+# <string>chronoshare.icns</string>
+# </dict>
+# </plist>'''
+# qt.mac_app = "ChronoShare.app"
+# qt.mac_plist = app_plist % "ChronoShare"
+# qt.mac_resources = 'chronoshare.icns'
+# qt.use += " OSX_FOUNDATION OSX_COREWLAN adhoc"
- qt = bld (
- target = "ChronoShare",
- features = "qt4 cxx cxxprogram html_resources",
- defines = "WAF",
- source = bld.path.ant_glob(['gui/*.cpp', 'gui/*.cc', 'gui/images.qrc']),
- includes = "ndnx scheduler executor fs-watcher gui src adhoc server . ",
- use = "BOOST BOOST_FILESYSTEM BOOST_DATE_TIME SQLITE3 QTCORE QTGUI LOG4CXX fs_watcher ndnx database chronoshare http_server",
+# if bld.env['HAVE_SPARKLE']:
+# qt.use += " OSX_SPARKLE"
+# qt.source += ["osx/auto-update/sparkle-auto-update.mm"]
+# qt.includes += " osx/auto-update"
+# if bld.env['HAVE_LOCAL_SPARKLE']:
+# qt.mac_frameworks = "osx/Frameworks/Sparkle.framework"
- html_resources = bld.path.find_dir ("gui/html").ant_glob([
- '**/*.js', '**/*.png', '**/*.css',
- '**/*.html', '**/*.gif', '**/*.ico'
- ]),
- )
+# if Utils.unversioned_sys_platform() == "linux":
+# bld(
+# features = "process_in",
+# target = "ChronoShare.desktop",
+# source = "ChronoShare.desktop.in",
+# install_prefix = "${DATADIR}/applications",
+# )
+# bld.install_files("${DATADIR}/applications", "ChronoShare.desktop")
+# bld.install_files("${DATADIR}/ChronoShare", "gui/images/chronoshare-big.png")
+ Logs.error("ChronoShare app compilation is temporary disabled")
- if Utils.unversioned_sys_platform () == "darwin":
- app_plist = '''<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
-<plist version="0.9">
-<dict>
- <key>CFBundlePackageType</key>
- <string>APPL</string>
- <key>CFBundleIconFile</key>
- <string>chronoshare.icns</string>
- <key>CFBundleGetInfoString</key>
- <string>Created by Waf</string>
- <key>CFBundleIdentifier</key>
- <string>edu.ucla.cs.irl.Chronoshare</string>
- <key>CFBundleSignature</key>
- <string>????</string>
- <key>NOTE</key>
- <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
- <key>CFBundleExecutable</key>
- <string>%s</string>
- <key>LSUIElement</key>
- <string>1</string>
- <key>SUPublicDSAKeyFile</key>
- <string>dsa_pub.pem</string>
- <key>CFBundleIconFile</key>
- <string>chronoshare.icns</string>
-</dict>
-</plist>'''
- qt.mac_app = "ChronoShare.app"
- qt.mac_plist = app_plist % "ChronoShare"
- qt.mac_resources = 'chronoshare.icns'
- qt.use += " OSX_FOUNDATION OSX_COREWLAN adhoc"
+# cmdline = bld(
+# target = "csd",
+# features = "qt5 cxx cxxprogram",
+# defines = "WAF",
+# source = bld.path.ant_glob(['cmd/csd.cpp']),
+# use = "fs_watcher chronoshare http_server QT5CORE",
+# )
+ Logs.error("csd app compilation is temporary disabled")
- if bld.env['HAVE_SPARKLE']:
- qt.use += " OSX_SPARKLE"
- qt.source += ["osx/auto-update/sparkle-auto-update.mm"]
- qt.includes += " osx/auto-update"
- if bld.env['HAVE_LOCAL_SPARKLE']:
- qt.mac_frameworks = "osx/Frameworks/Sparkle.framework"
+# dump_db = bld(
+# target = "dump-db",
+# features = "cxx cxxprogram",
+# source = bld.path.ant_glob(['cmd/dump-db.cpp']),
+# use = "fs_watcher chronoshare http_server QT5CORE",
+# )
+ Logs.error("dump-db app compilation is temporary disabled")
- if Utils.unversioned_sys_platform () == "linux":
- bld (
- features = "process_in",
- target = "ChronoShare.desktop",
- source = "ChronoShare.desktop.in",
- install_prefix = "${DATADIR}/applications",
- )
- bld.install_files ("${DATADIR}/applications", "ChronoShare.desktop")
- bld.install_files ("${DATADIR}/ChronoShare", "gui/images/chronoshare-big.png")
-
- cmdline = bld (
- target = "csd",
- features = "qt4 cxx cxxprogram",
- defines = "WAF",
- source = "cmd/csd.cc",
- includes = "ndnx scheduler executor gui fs-watcher src . ",
- use = "BOOST BOOST_FILESYSTEM BOOST_DATE_TIME SQLITE3 QTCORE QTGUI LOG4CXX fs_watcher ndnx database chronoshare"
- )
-
- dump_db = bld (
- target = "dump-db",
- features = "cxx cxxprogram",
- source = "cmd/dump-db.cc",
- includes = "ndnx scheduler executor gui fs-watcher src . ",
- use = "BOOST BOOST_FILESYSTEM BOOST_DATE_TIME SQLITE3 QTCORE LOG4CXX fs_watcher ndnx database chronoshare"
- )
+ bld.recurse('test');
from waflib import TaskGen
@TaskGen.extension('.mm')
def m_hook(self, node):
- """Alias .mm files to be compiled the same as .cc files, gcc/clang will do the right thing."""
+ """Alias .mm files to be compiled the same as .cpp files, gcc/clang will do the right thing."""
return self.create_compiled_task('cxx', node)
@TaskGen.extension('.js', '.png', '.css', '.html', '.gif', '.ico', '.in')
def sig_hook(self, node):
- node.sig=Utils.h_file (node.abspath())
+ node.sig=Utils.h_file(node.abspath())
@TaskGen.feature('process_in')
@TaskGen.after_method('process_source')
def create_process_in(self):
- dst = self.bld.path.find_or_declare (self.target)
- tsk = self.create_task ('process_in', self.source, dst)
+ dst = self.bld.path.find_or_declare(self.target)
+ tsk = self.create_task('process_in', self.source, dst)
class process_in(Task.Task):
color='PINK'
- def run (self):
- self.outputs[0].write (Utils.subst_vars(self.inputs[0].read (), self.env))
+ def run(self):
+ self.outputs[0].write(Utils.subst_vars(self.inputs[0].read(), self.env))
@TaskGen.feature('html_resources')
@TaskGen.before_method('process_source')
def create_qrc_task(self):
- output = self.bld.path.find_or_declare ("gui/html.qrc")
+ output = self.bld.path.find_or_declare("gui/html.qrc")
tsk = self.create_task('html_resources', self.html_resources, output)
- tsk.base_path = output.parent.get_src ()
- self.create_rcc_task (output.get_src ())
+ tsk.base_path = output.parent.get_src()
+ self.create_rcc_task(output.get_src())
class html_resources(Task.Task):
color='PINK'
- def __str__ (self):
- return "%s: Generating %s\n" % (self.__class__.__name__.replace('_task',''), self.outputs[0].nice_path ())
+ def __str__(self):
+ return "%s: Generating %s\n" % (self.__class__.__name__.replace('_task',''), self.outputs[0].path_from(self.outputs[0].ctx.launch_node()))
- def run (self):
+ def run(self):
out = self.outputs[0]
- bld_out = out.get_bld ()
- src_out = out.get_src ()
+ bld_out = out.get_bld()
+ src_out = out.get_src()
bld_out.write('<RCC>\n <qresource prefix="/">\n')
for f in self.inputs:
- bld_out.write (' <file>%s</file>\n' % f.path_from (self.base_path), 'a')
+ bld_out.write(' <file>%s</file>\n' % f.path_from(self.base_path), 'a')
bld_out.write(' </qresource>\n</RCC>', 'a')
- src_out.write (bld_out.read(), 'w')
+ src_out.write(bld_out.read(), 'w')
return 0