build: Fix compilation on Ubuntu platform and add work-around for ndn-cxx regression
Note that on Ubuntu 12.04, either boost libraries or compiler needs to be upgraded.
Change-Id: I33f1089f961f99abf3d1803bf833e76ff7fb528d
Refs: #2379, #2380, #2382, #2381, #2383
diff --git a/.gitignore b/.gitignore
index 0f1f3e2..8cc655a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,6 @@
*.pyc
-.waf*
+.waf3-*
+.waf-1*
*~
.DS_Store
docs/doxygen.warnings.log
diff --git a/.jenkins b/.jenkins
new file mode 100755
index 0000000..674d751
--- /dev/null
+++ b/.jenkins
@@ -0,0 +1,10 @@
+#!/usr/bin/env bash
+set -e
+
+DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+
+for file in "$DIR"/.jenkins.d/*; do
+ [[ -f $file && -x $file ]] || continue
+ echo "Run: $file"
+ "$file"
+done
diff --git a/.jenkins.d/00-deps-ndn-cxx.sh b/.jenkins.d/00-deps-ndn-cxx.sh
new file mode 100755
index 0000000..12e3eb2
--- /dev/null
+++ b/.jenkins.d/00-deps-ndn-cxx.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+set -x
+set -e
+
+cd /tmp
+BUILD="no"
+if [ ! -d ndn-cxx ]; then
+ git clone git://github.com/named-data/ndn-cxx
+ cd ndn-cxx
+ # TEMPORARY, the following must be removed after issue if fixed
+ git checkout 81a6c5dea60cea97c60dab0d78576c0d3b4e29ed
+ BUILD="yes"
+else
+ cd ndn-cxx
+ INSTALLED_VERSION=`git rev-parse HEAD || echo NONE`
+ sudo rm -Rf latest-version
+ git clone git://github.com/named-data/ndn-cxx latest-version
+ cd latest-version
+ # TEMPORARY, the following must be removed after issue if fixed
+ git checkout 81a6c5dea60cea97c60dab0d78576c0d3b4e29ed
+ LATEST_VERSION=`git rev-parse HEAD || echo UNKNOWN`
+ cd ..
+ rm -Rf latest-version
+ if [ "$INSTALLED_VERSION" != "$LATEST_VERSION" ]; then
+ cd ..
+ sudo rm -Rf ndn-cxx
+ git clone --depth 1 git://github.com/named-data/ndn-cxx
+ cd ndn-cxx
+ BUILD="yes"
+ fi
+fi
+
+sudo rm -Rf /usr/local/include/ndn-cxx
+sudo rm -f /usr/local/lib/libndn-cxx*
+sudo rm -f /usr/local/lib/pkgconfig/libndn-cxx*
+
+if [ "$BUILD" = "yes" ]; then
+ sudo ./waf distclean -j1 --color=yes
+fi
+
+./waf configure -j1 --color=yes --without-osx-keychain
+./waf -j1 --color=yes
+sudo ./waf install -j1 --color=yes
diff --git a/.jenkins.d/10-build.sh b/.jenkins.d/10-build.sh
new file mode 100755
index 0000000..9297c7e
--- /dev/null
+++ b/.jenkins.d/10-build.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+set -x
+set -e
+
+# git submodule init
+# git submodule sync
+# git submodule update
+
+# COVERAGE=$( python -c "print '--with-coverage' if 'code-coverage' in '$JOB_NAME' else ''" )
+
+# Cleanup
+sudo ./waf -j1 distclean
+
+# Configure/build in debug mode
+./waf -j1 configure --enable-examples --enable-tests
+./waf -j1 build
+
+# # Cleanup
+# sudo ./waf -j1 --color=yes distclean
+
+# # Configure/build in optimized mode without tests with precompiled headers
+# ./waf -j1 --color=yes configure
+# ./waf -j1 --color=yes build
+
+# # Cleanup
+# sudo ./waf -j1 --color=yes distclean
+
+# # Configure/build in optimized mode
+# ./waf -j1 --color=yes configure --with-tests --without-pch $COVERAGE
+# ./waf -j1 --color=yes build
+
+# # (tests will be run against optimized version)
+
+# # Install
+# sudo ./waf -j1 --color=yes install
diff --git a/.waf-tools/compiler-features.py b/.waf-tools/compiler-features.py
new file mode 100644
index 0000000..5344939
--- /dev/null
+++ b/.waf-tools/compiler-features.py
@@ -0,0 +1,27 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+
+from waflib.Configure import conf
+
+OVERRIDE = '''
+class Base
+{
+ virtual void
+ f(int a);
+};
+
+class Derived : public Base
+{
+ virtual void
+ f(int a) override;
+};
+'''
+
+@conf
+def check_override(self):
+ if self.check_cxx(msg='Checking for override specifier',
+ fragment=OVERRIDE,
+ features='cxx', mandatory=False):
+ self.define('HAVE_CXX_OVERRIDE', 1)
+
+def configure(conf):
+ conf.check_override()
diff --git a/.waf-tools/dependency-checker.py b/.waf-tools/dependency-checker.py
new file mode 100644
index 0000000..629fbfd
--- /dev/null
+++ b/.waf-tools/dependency-checker.py
@@ -0,0 +1,28 @@
+# encoding: utf-8
+
+from waflib import Options, Logs
+from waflib.Configure import conf
+
+def addDependencyOptions(self, opt, name, extraHelp=''):
+ opt.add_option('--with-%s' % name, type='string', default=None,
+ dest='with_%s' % name,
+ help='Path to %s, e.g., /usr/local %s' % (name, extraHelp))
+setattr(Options.OptionsContext, "addDependencyOptions", addDependencyOptions)
+
+@conf
+def checkDependency(self, name, **kw):
+ root = kw.get('path', getattr(Options.options, 'with_%s' % name))
+ kw['msg'] = kw.get('msg', 'Checking for %s library' % name)
+ kw['uselib_store'] = kw.get('uselib_store', name.upper())
+ kw['define_name'] = kw.get('define_name', 'HAVE_%s' % kw['uselib_store'])
+ kw['mandatory'] = kw.get('mandatory', True)
+
+ if root:
+ isOk = self.check_cxx(includes="%s/include" % root,
+ libpath="%s/lib" % root,
+ **kw)
+ else:
+ isOk = self.check_cxx(**kw)
+
+ if isOk:
+ self.env[kw['define_name']] = True
diff --git a/.waf-tools/doxygen.py b/.waf-tools/doxygen.py
new file mode 100644
index 0000000..ac8c70b
--- /dev/null
+++ b/.waf-tools/doxygen.py
@@ -0,0 +1,214 @@
+#! /usr/bin/env python
+# encoding: UTF-8
+# Thomas Nagy 2008-2010 (ita)
+
+"""
+
+Doxygen support
+
+Variables passed to bld():
+* doxyfile -- the Doxyfile to use
+
+When using this tool, the wscript will look like:
+
+ def options(opt):
+ opt.load('doxygen')
+
+ def configure(conf):
+ conf.load('doxygen')
+ # check conf.env.DOXYGEN, if it is mandatory
+
+ def build(bld):
+ if bld.env.DOXYGEN:
+ bld(features="doxygen", doxyfile='Doxyfile', ...)
+
+ def doxygen(bld):
+ if bld.env.DOXYGEN:
+ bld(features="doxygen", doxyfile='Doxyfile', ...)
+"""
+
+from fnmatch import fnmatchcase
+import os, os.path, re, stat
+from waflib import Task, Utils, Node, Logs, Errors, Build
+from waflib.TaskGen import feature
+
+DOXY_STR = '"${DOXYGEN}" - '
+DOXY_FMTS = 'html latex man rft xml'.split()
+DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
+c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
+inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
+'''.split())
+
+re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
+re_nl = re.compile('\r*\n', re.M)
+def parse_doxy(txt):
+ tbl = {}
+ txt = re_rl.sub('', txt)
+ lines = re_nl.split(txt)
+ for x in lines:
+ x = x.strip()
+ if not x or x.startswith('#') or x.find('=') < 0:
+ continue
+ if x.find('+=') >= 0:
+ tmp = x.split('+=')
+ key = tmp[0].strip()
+ if key in tbl:
+ tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
+ else:
+ tbl[key] = '+='.join(tmp[1:]).strip()
+ else:
+ tmp = x.split('=')
+ tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
+ return tbl
+
+class doxygen(Task.Task):
+ vars = ['DOXYGEN', 'DOXYFLAGS']
+ color = 'BLUE'
+
+ def runnable_status(self):
+ '''
+ self.pars are populated in runnable_status - because this function is being
+ run *before* both self.pars "consumers" - scan() and run()
+
+ set output_dir (node) for the output
+ '''
+
+ for x in self.run_after:
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'pars', None):
+ txt = self.inputs[0].read()
+ self.pars = parse_doxy(txt)
+ if not self.pars.get('OUTPUT_DIRECTORY'):
+ self.pars['OUTPUT_DIRECTORY'] = self.inputs[0].parent.get_bld().abspath()
+
+ # Override with any parameters passed to the task generator
+ if getattr(self.generator, 'pars', None):
+ for k, v in self.generator.pars.iteritems():
+ self.pars[k] = v
+
+ self.doxy_inputs = getattr(self, 'doxy_inputs', [])
+ if not self.pars.get('INPUT'):
+ self.doxy_inputs.append(self.inputs[0].parent)
+ else:
+ for i in self.pars.get('INPUT').split():
+ if os.path.isabs(i):
+ node = self.generator.bld.root.find_node(i)
+ else:
+ node = self.generator.path.find_node(i)
+ if not node:
+ self.generator.bld.fatal('Could not find the doxygen input %r' % i)
+ self.doxy_inputs.append(node)
+
+ if not getattr(self, 'output_dir', None):
+ bld = self.generator.bld
+ # First try to find an absolute path, then find or declare a relative path
+ self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
+ if not self.output_dir:
+ self.output_dir = bld.path.find_or_declare(self.pars['OUTPUT_DIRECTORY'])
+
+ self.signature()
+ return Task.Task.runnable_status(self)
+
+ def scan(self):
+ exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
+ file_patterns = self.pars.get('FILE_PATTERNS','').split()
+ if not file_patterns:
+ file_patterns = DOXY_FILE_PATTERNS
+ if self.pars.get('RECURSIVE') == 'YES':
+ file_patterns = ["**/%s" % pattern for pattern in file_patterns]
+ nodes = []
+ names = []
+ for node in self.doxy_inputs:
+ if os.path.isdir(node.abspath()):
+ for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
+ nodes.append(m)
+ else:
+ nodes.append(node)
+ return (nodes, names)
+
+ def run(self):
+ dct = self.pars.copy()
+ dct['INPUT'] = ' '.join(['"%s"' % x.abspath() for x in self.doxy_inputs])
+ code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
+ code = code.encode() # for python 3
+ #fmt = DOXY_STR % (self.inputs[0].parent.abspath())
+ cmd = Utils.subst_vars(DOXY_STR, self.env)
+ env = self.env.env or None
+ proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.generator.bld.path.get_bld().abspath())
+ proc.communicate(code)
+ return proc.returncode
+
+ def post_run(self):
+ nodes = self.output_dir.ant_glob('**/*', quiet=True)
+ for x in nodes:
+ x.sig = Utils.h_file(x.abspath())
+ self.outputs += nodes
+ return Task.Task.post_run(self)
+
+class tar(Task.Task):
+ "quick tar creation"
+ run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
+ color = 'RED'
+ after = ['doxygen']
+ def runnable_status(self):
+ for x in getattr(self, 'input_tasks', []):
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ if not getattr(self, 'tar_done_adding', None):
+ # execute this only once
+ self.tar_done_adding = True
+ for x in getattr(self, 'input_tasks', []):
+ self.set_inputs(x.outputs)
+ if not self.inputs:
+ return Task.SKIP_ME
+ return Task.Task.runnable_status(self)
+
+ def __str__(self):
+ tgt_str = ' '.join([a.nice_path(self.env) for a in self.outputs])
+ return '%s: %s\n' % (self.__class__.__name__, tgt_str)
+
+@feature('doxygen')
+def process_doxy(self):
+ if not getattr(self, 'doxyfile', None):
+ self.generator.bld.fatal('no doxyfile??')
+
+ node = self.doxyfile
+ if not isinstance(node, Node.Node):
+ node = self.path.find_resource(node)
+ if not node:
+ raise ValueError('doxygen file not found')
+
+ # the task instance
+ dsk = self.create_task('doxygen', node)
+
+ if getattr(self, 'doxy_tar', None):
+ tsk = self.create_task('tar')
+ tsk.input_tasks = [dsk]
+ tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
+ if self.doxy_tar.endswith('bz2'):
+ tsk.env['TAROPTS'] = ['cjf']
+ elif self.doxy_tar.endswith('gz'):
+ tsk.env['TAROPTS'] = ['czf']
+ else:
+ tsk.env['TAROPTS'] = ['cf']
+
+def configure(conf):
+ '''
+ Check if doxygen and tar commands are present in the system
+
+ If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
+ variables will be set. Detection can be controlled by setting DOXYGEN and
+ TAR environmental variables.
+ '''
+
+ conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
+ conf.find_program('tar', var='TAR', mandatory=False)
+
+# doxygen docs
+from waflib.Build import BuildContext
+class doxy(BuildContext):
+ cmd = "doxygen"
+ fun = "doxygen"
diff --git a/.waf-tools/sphinx_build.py b/.waf-tools/sphinx_build.py
new file mode 100644
index 0000000..e61da6e
--- /dev/null
+++ b/.waf-tools/sphinx_build.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# encoding: utf-8
+
+# inspired by code by Hans-Martin von Gaudecker, 2012
+
+import os
+from waflib import Node, Task, TaskGen, Errors, Logs, Build, Utils
+
+class sphinx_build(Task.Task):
+ color = 'BLUE'
+ run_str = '${SPHINX_BUILD} -D ${VERSION} -D ${RELEASE} -q -b ${BUILDERNAME} -d ${DOCTREEDIR} ${SRCDIR} ${OUTDIR}'
+
+ def __str__(self):
+ env = self.env
+ src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs])
+ tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
+ if self.outputs: sep = ' -> '
+ else: sep = ''
+ return'%s [%s]: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),
+ self.env['BUILDERNAME'], src_str, sep, tgt_str)
+
+@TaskGen.extension('.py', '.rst')
+def sig_hook(self, node):
+ node.sig=Utils.h_file(node.abspath())
+
+@TaskGen.feature("sphinx")
+@TaskGen.before_method("process_source")
+def apply_sphinx(self):
+ """Set up the task generator with a Sphinx instance and create a task."""
+
+ inputs = []
+ for i in Utils.to_list(self.source):
+ if not isinstance(i, Node.Node):
+ node = self.path.find_node(node)
+ else:
+ node = i
+ if not node:
+ raise ValueError('[%s] file not found' % i)
+ inputs.append(node)
+
+ task = self.create_task('sphinx_build', inputs)
+
+ conf = self.path.find_node(self.config)
+ task.inputs.append(conf)
+
+ confdir = conf.parent.abspath()
+ buildername = getattr(self, "builder", "html")
+ srcdir = getattr(self, "srcdir", confdir)
+ outdir = self.path.find_or_declare(getattr(self, "outdir", buildername)).get_bld()
+ doctreedir = getattr(self, "doctreedir", os.path.join(outdir.abspath(), ".doctrees"))
+
+ task.env['BUILDERNAME'] = buildername
+ task.env['SRCDIR'] = srcdir
+ task.env['DOCTREEDIR'] = doctreedir
+ task.env['OUTDIR'] = outdir.abspath()
+ task.env['VERSION'] = "version=%s" % self.VERSION
+ task.env['RELEASE'] = "release=%s" % self.VERSION
+
+ import imp
+ confData = imp.load_source('sphinx_conf', conf.abspath())
+
+ if buildername == "man":
+ for i in confData.man_pages:
+ target = outdir.find_or_declare('%s.%d' % (i[1], i[4]))
+ task.outputs.append(target)
+
+ if self.install_path:
+ self.bld.install_files("%s/man%d/" % (self.install_path, i[4]), target)
+ else:
+ task.outputs.append(outdir)
+
+def configure(conf):
+ conf.find_program('sphinx-build', var='SPHINX_BUILD', mandatory=False)
+
+# sphinx docs
+from waflib.Build import BuildContext
+class sphinx(BuildContext):
+ cmd = "sphinx"
+ fun = "sphinx"
diff --git a/.waf-tools/type_traits.py b/.waf-tools/type_traits.py
new file mode 100644
index 0000000..07eb129
--- /dev/null
+++ b/.waf-tools/type_traits.py
@@ -0,0 +1,30 @@
+# -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
+#
+# Copyright (c) 2014, Regents of the University of California
+#
+# GPL 3.0 license, see the COPYING.md file for more information
+
+from waflib import Configure
+
+IS_DEFAULT_CONSTRUCTIBLE_CHECK = '''
+#include <type_traits>
+static_assert(std::is_default_constructible<int>::value, "");
+'''
+
+IS_MOVE_CONSTRUCTIBLE_CHECK = '''
+#include <type_traits>
+static_assert(std::is_move_constructible<int>::value, "");
+'''
+
+def configure(conf):
+ if conf.check_cxx(msg='Checking for std::is_default_constructible',
+ fragment=IS_DEFAULT_CONSTRUCTIBLE_CHECK,
+ features='cxx', mandatory=False):
+ conf.define('HAVE_IS_DEFAULT_CONSTRUCTIBLE', 1)
+ conf.env['HAVE_IS_DEFAULT_CONSTRUCTIBLE'] = True
+
+ if conf.check_cxx(msg='Checking for std::is_move_constructible',
+ fragment=IS_MOVE_CONSTRUCTIBLE_CHECK,
+ features='cxx', mandatory=False):
+ conf.define('HAVE_IS_MOVE_CONSTRUCTIBLE', 1)
+ conf.env['HAVE_IS_MOVE_CONSTRUCTIBLE'] = True
diff --git a/NFD/config.hpp b/NFD/config.hpp
index 7a7281b..2aca644 100644
--- a/NFD/config.hpp
+++ b/NFD/config.hpp
@@ -3,8 +3,4 @@
#ifndef W_CONFIG_HPP_WAF
#define W_CONFIG_HPP_WAF
-#define HAVE_IS_DEFAULT_CONSTRUCTIBLE 1
-#define HAVE_IS_MOVE_CONSTRUCTIBLE 1
-#define HAVE_CXX_OVERRIDE 1
-
#endif /* W_CONFIG_HPP_WAF */
diff --git a/apps/ndn-producer.cpp b/apps/ndn-producer.cpp
index fe8e42f..f381834 100644
--- a/apps/ndn-producer.cpp
+++ b/apps/ndn-producer.cpp
@@ -110,10 +110,10 @@
data->setName(dataName);
data->setFreshnessPeriod(::ndn::time::milliseconds(m_freshness.GetMilliSeconds()));
- data->setContent(make_shared<::ndn::Buffer>(m_virtualPayloadSize));
+ data->setContent(make_shared< ::ndn::Buffer>(m_virtualPayloadSize));
Signature signature;
- SignatureInfo signatureInfo(static_cast<::ndn::tlv::SignatureTypeValue>(255));
+ SignatureInfo signatureInfo(static_cast< ::ndn::tlv::SignatureTypeValue>(255));
if (m_keyLocator.size() > 0) {
signatureInfo.setKeyLocator(m_keyLocator);
diff --git a/bindings/modulegen__gcc_ILP32.py b/bindings/modulegen__gcc_ILP32.py
index a684069..940aa6e 100644
--- a/bindings/modulegen__gcc_ILP32.py
+++ b/bindings/modulegen__gcc_ILP32.py
@@ -154,8 +154,8 @@
cls.add_method('AddOrigin', 'void', [param('const std::string&', 'prefix'), param('const std::string&', 'nodeName')])
cls.add_method('AddOrigins', 'void', [param('const std::string&', 'prefix'), param('const ns3::NodeContainer&', 'nodes')])
cls.add_method('AddOriginsForAll', 'void', [])
- cls.add_method('CalculateRoutes', 'void', [param('bool', 'invalidatedRoutes', default_value='true')])
- cls.add_method('CalculateAllPossibleRoutes', 'void', [param('bool', 'invalidatedRoutes', default_value='true')])
+ cls.add_method('CalculateRoutes', 'void', [])
+ cls.add_method('CalculateAllPossibleRoutes', 'void', [])
reg_GlobalRoutingHelper(root_module['ns3::ndn::GlobalRoutingHelper'])
def reg_Name(root_module, cls):
diff --git a/bindings/modulegen__gcc_LP64.py b/bindings/modulegen__gcc_LP64.py
index a684069..940aa6e 100644
--- a/bindings/modulegen__gcc_LP64.py
+++ b/bindings/modulegen__gcc_LP64.py
@@ -154,8 +154,8 @@
cls.add_method('AddOrigin', 'void', [param('const std::string&', 'prefix'), param('const std::string&', 'nodeName')])
cls.add_method('AddOrigins', 'void', [param('const std::string&', 'prefix'), param('const ns3::NodeContainer&', 'nodes')])
cls.add_method('AddOriginsForAll', 'void', [])
- cls.add_method('CalculateRoutes', 'void', [param('bool', 'invalidatedRoutes', default_value='true')])
- cls.add_method('CalculateAllPossibleRoutes', 'void', [param('bool', 'invalidatedRoutes', default_value='true')])
+ cls.add_method('CalculateRoutes', 'void', [])
+ cls.add_method('CalculateAllPossibleRoutes', 'void', [])
reg_GlobalRoutingHelper(root_module['ns3::ndn::GlobalRoutingHelper'])
def reg_Name(root_module, cls):
diff --git a/docs/source/faq.rst b/docs/source/faq.rst
index 068afb0..2a83682 100644
--- a/docs/source/faq.rst
+++ b/docs/source/faq.rst
@@ -5,7 +5,8 @@
---------------
.. note::
- **My ubuntu/redhat/freebsd have an old version of boost libraries. How can I get the latest one?**
+ **My ubuntu/redhat/freebsd have an old version of boost libraries. How can I get the
+ latest one?**
.. _Installing boost libraries:
@@ -15,25 +16,30 @@
.. role:: red
.. note::
- **The following instructions are for those who want to install latest version of boost libraries** :red:`and has root access`.
+ **The following instructions are for those who want to install latest version of boost
+ libraries** :red:`and has root access`.
-The following commands would install the latest version of boost libraries (at the time of writing, version 1.53) ot ``/usr/local``, assuming you have a root access to your machine.
-If you don't have root access, please refer to section :ref:`Installing boost libraries to a non-privileged location`.
+The following commands would install the latest version of boost libraries (at the time of
+writing, version 1.53) ot ``/usr/local``, assuming you have a root access to your machine. If
+you don't have root access, please refer to section :ref:`Installing boost libraries to a
+non-privileged location`.
.. note::
- If you are using Ubuntyu, make sure that you have installed ``libbz2-dev``, otherwise not all libraries required by ndnSIM will be installed (see :ref:`Common pitfalls`)
+ If you are using Ubuntu, make sure that you have installed ``libbz2-dev``, otherwise not
+ all libraries required by ndnSIM will be installed (see :ref:`Common pitfalls`)
.. code-block:: bash
:linenos:
- wget http://downloads.sourceforge.net/project/boost/boost/1.53.0/boost_1_53_0.tar.bz2
- tar jxf boost_1_53_0.tar.bz2
- cd boost_1_53_0
+ wget http://downloads.sourceforge.net/project/boost/boost/1.57.0/boost_1_57_0.tar.bz2
+ tar jxf boost_1_57_0.tar.bz2
+ cd boost_1_57_0
./bootstrap.sh
sudo ./b2 --prefix=/usr/local install
-The following commands should allow compilation and run of NS-3 simulations with custom install of boost libraries:
+The following commands should allow compilation and run of NS-3 simulations with custom install
+of boost libraries:
.. code-block:: bash
:linenos:
@@ -44,7 +50,9 @@
LD_LIBRARY_PATH=/usr/local/lib NS_LOG=ndn.Face:ndn.Consumer ./waf --run=ndn-simple
.. note::
- `LD_LIBRARY_PATH=/usr/local/lib` is necessary on Linux platform in order for the dynamic linker to find libraries installed in a location different from one of the folders specified in /etc/ld.so.conf.
+ ``LD_LIBRARY_PATH=/usr/local/lib`` is necessary on Linux platform in order for the dynamic
+ linker to find libraries installed in a location different from one of the folders
+ specified in ``/etc/ld.so.conf``.
.. _Installing boost libraries to a non-privileged location:
@@ -52,17 +60,20 @@
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. note::
- **Follow these general instructions if you are trying to installboost libraries to a non-privileged location** :red:`(i.e., you do not have root access),` **but something is going wrong.**
+ **Follow these general instructions if you are trying to installboost libraries to a
+ non-privileged location** :red:`(i.e., you do not have root access),` **but something is
+ going wrong.**
-Normally, to compile and install boost libraries in non-privileged mode, you would need to issue following commands (e.g., for boost version 1.53.0):
+Normally, to compile and install boost libraries in non-privileged mode, you would need to
+issue following commands (e.g., for boost version 1.57.0):
.. code-block:: bash
:linenos:
export BOOSTDIR=/home/non-privileged-user/boost
- wget http://downloads.sourceforge.net/project/boost/boost/1.53.0/boost_1_53_0.tar.bz2
- tar jxf boost_1_53_0.tar.bz2
- cd boost_1_53_0
+ wget http://downloads.sourceforge.net/project/boost/boost/1.57.0/boost_1_57_0.tar.bz2
+ tar jxf boost_1_57_0.tar.bz2
+ cd boost_1_57_0
./bootstrap.sh
./b2 --prefix=$BOOSTDIR install
@@ -73,8 +84,9 @@
Common pitfalls
^^^^^^^^^^^^^^^
-The common pitfalls is with the **boost iostreams** library, which is required by ndnSIM, but failed to build because of the missing bzip2 library.
-This problem can be easily fixed by downloading and installing bzip2 library, e.g., using the following steps:
+The common pitfalls is with the **boost iostreams** library, which is required by ndnSIM, but
+failed to build because of the missing bzip2 library. This problem can be easily fixed by
+downloading and installing bzip2 library, e.g., using the following steps:
.. code-block:: bash
:linenos:
@@ -84,11 +96,14 @@
cd bzip2-1.0.6
make PREFIX=$BOOSTDIR CFLAGS="-fPIC -O2 -g" install
-Afte bzip2 library is installed, you may recompile and reinstall boost libraries using custom compilation flags::
+Afte bzip2 library is installed, you may recompile and reinstall boost libraries using custom
+compilation flags::
./b2 --prefix=$BOOSTDIR cxxflags=-I$BOOSTDIR/include linkflags=-L$BOOSTDIR/lib install
-Alternatively, you can solve this particular problem by installing development package for bzip2 library (:red:`if you have root access`). For example, on Ubuntu 12.04 it would be the following command::
+Alternatively, you can solve this particular problem by installing development package for
+bzip2 library (:red:`if you have root access`). For example, on Ubuntu 12.04 it would be the
+following command::
sudo apt-get install libbz2-dev
@@ -97,7 +112,8 @@
./b2 --prefix=$BOOSTDIR
-The following commands should allow compilation and run of NS-3 simulations with custom install of boost libraries::
+The following commands should allow compilation and run of NS-3 simulations with custom install
+of boost libraries::
cd <ns-3>
./waf configure --boost-includes=$BOOSTDIR/include --boost-libs=$BOOSTDIR/lib --enable-examples --enable-ndn-plugins=topology,mobility
@@ -105,7 +121,9 @@
LD_LIBRARY_PATH=$BOOSTDIR/lib NS_LOG=ndn.Face:ndn.Consumer ./waf --run=ndn-simple
.. note::
- `LD_LIBRARY_PATH=$BOOSTDIR/lib` is necessary on Linux platform in order for the dynamic linker to find libraries installed in a location different from one of the folders specified in /etc/ld.so.conf.
+ ``LD_LIBRARY_PATH=$BOOSTDIR/lib`` is necessary on Linux platform in order for the dynamic
+ linker to find libraries installed in a location different from one of the folders
+ specified in /etc/ld.so.conf.
@@ -119,8 +137,8 @@
Waf: Entering directory `/ndnSIM/ns-3/build'
Could not find a task generator for the name 'ns3-visualizer'..
-Something is wrong with your python bindings and python bindings dependencies.
-Please follow the :ref:`requirements` section that lists what should be installed in order to run visualizer.
+Something is wrong with your python bindings and python bindings dependencies. Please follow
+the :ref:`requirements` section that lists what should be installed in order to run visualizer.
Code questions
--------------
diff --git a/docs/source/getting-started.rst b/docs/source/getting-started.rst
index 23ff20d..0f0ee5a 100644
--- a/docs/source/getting-started.rst
+++ b/docs/source/getting-started.rst
@@ -4,9 +4,30 @@
Portability
------------
-ndnSIM has been successfully compiled and used under Ubuntu Linux 12.04 (only with boost
-libraries **1.48**), 14.04 (default version of boost), OS X 10.10 (Xcode 6.1.1, macports boost
-1.56-1.57).
+ndnSIM 2.0 has been successfully compiled and used on following platforms:
+
+- Ubuntu Linux 12.04 (see the note)
+- Ubuntu Linux 14.04
+- OS X 10.10
+
+.. note::
+ ndnSIM is currently cannot be compiled on Ubuntu Linux 12.04 with the packaged boost
+ libraries (there is an `issue with boost 1.48 and gcc 4.6
+ <https://svn.boost.org/trac/boost/ticket/6153>`_). It is still possible to compile ndnSIM
+ on this platform, but either compiler or boost libraries (or both) need to get upgraded.
+
+ More recent version of boost can be installed from "Boost C++ Libraries" team PPA::
+
+ sudo apt-get install python-software-properties
+ sudo add-apt-repository ppa:boost-latest/ppa
+ sudo apt-get update
+ sudo apt-get install libboost1.55-all-dev
+
+ # add --boost-libs=/usr/lib/x86_64-linux-gnu to ./waf configure for ndn-cxx and ns3
+ # ./waf configure --boost-libs=/usr/lib/x86_64-linux-gnu
+
+ Make sure that all other version of boost libraries (``-dev`` packages) are removed,
+ otherwise compilation will fail.
.. _requirements:
@@ -19,19 +40,12 @@
.. role:: red
.. note::
- :red:`!!! ndn-cxx and ndnSIM requires boost version at least 1.48.` Many linux distribution
+ :red:`!!! ndnSIM requires boost version at least 1.49.` Many linux distribution
(Fedora 16, 17 at the time of this writing) ship an old version of boost, making it
impossible to compile ndnSIM out-of-the-box. Please install the latest version, following
:ref:`these simple instructions <Installing boost libraries>`.
.. note::
- :red:`For Ubuntu 12.04` Ubuntu 12.04 ships with two versions of boost libraries and it is
- known that if both are installed, then compilation of ndnSIM will most likely fail. Please
- install ``libboost1.48-dev-all`` package and uninstall ``libboost-dev-all``. If you want to
- install the latest version of boost libraries, then uninstall both ``libboost1.48-dev-all``
- and ``libboost-dev-all``, so the libraries do not interfere with each other.
-
-.. note::
!!! If you do not have root permissions to install boost, you can install it in your home
folder. However, you need to be make sure that `libboost_iostreams` library is successfully
compiled and is installed. Please refer to :ref:`the following example <Installing boost
@@ -43,7 +57,7 @@
dependencies should be installed. For example, in order to run `visualizer`_ module, the
following should be installed:
- * For Ubuntu (tested on Ubuntu 14.04, should work on later versions as well):
+ * For Ubuntu:
.. code-block:: bash
@@ -51,7 +65,7 @@
sudo apt-get install python-pygoocanvas python-gnome2
sudo apt-get install python-gnomedesktop python-rsvg ipython
- * For Fedora (tested on Fedora 16):
+ * For Fedora:
.. code-block:: bash
@@ -62,7 +76,7 @@
sudo yum install python-pip
sudo easy_install pygraphviz
- * For MacOS (macports):
+ * For OS X with MacPorts:
.. code-block:: bash
@@ -93,6 +107,9 @@
mkdir ndnSIM
cd ndnSIM
git clone https://github.com/named-data/ndn-cxx.git ndn-cxx
+ cd ndn-cxx
+ git checkout 81a6c5dea60cea97c60dab0d78576c0d3b4e29ed
+ cd ..
git clone https://github.com/cawka/ns-3-dev-ndnSIM.git ns-3
git clone https://github.com/cawka/pybindgen.git pybindgen
git clone https://github.com/named-data/ndnSIM.git ns-3/src/ndnSIM
diff --git a/examples/ndn-custom-apps/custom-app.cpp b/examples/ndn-custom-apps/custom-app.cpp
index 414efa2..9aad485 100644
--- a/examples/ndn-custom-apps/custom-app.cpp
+++ b/examples/ndn-custom-apps/custom-app.cpp
@@ -100,7 +100,7 @@
auto data = std::make_shared<ndn::Data>(interest->getName());
data->setFreshnessPeriod(ndn::time::milliseconds(1000));
- data->setContent(std::make_shared<::ndn::Buffer>(1024));
+ data->setContent(std::make_shared< ::ndn::Buffer>(1024));
ndn::StackHelper::getKeyChain().sign(*data);
NS_LOG_DEBUG("Sending Data packet for " << data->getName());
diff --git a/helper/boost-graph-ndn-global-routing-helper.hpp b/helper/boost-graph-ndn-global-routing-helper.hpp
index 18a1a44..fe160f3 100644
--- a/helper/boost-graph-ndn-global-routing-helper.hpp
+++ b/helper/boost-graph-ndn-global-routing-helper.hpp
@@ -182,9 +182,9 @@
typedef readable_property_map_tag category;
};
-const property_traits<EdgeWeights>::value_type WeightZero(0, 0, 0.0);
+const property_traits<EdgeWeights>::value_type WeightZero(nullptr, 0, 0.0);
const property_traits<EdgeWeights>::value_type
- WeightInf(0, std::numeric_limits<uint16_t>::max(), 0.0);
+ WeightInf(nullptr, std::numeric_limits<uint16_t>::max(), 0.0);
struct WeightCompare : public std::binary_function<property_traits<EdgeWeights>::reference,
property_traits<EdgeWeights>::reference, bool> {
@@ -220,7 +220,7 @@
operator()(std::tuple<std::shared_ptr<nfd::Face>, uint32_t, double> a,
property_traits<EdgeWeights>::reference b) const
{
- if (std::get<0>(a) == 0)
+ if (std::get<0>(a) == nullptr)
return std::make_tuple(std::get<0>(b), std::get<1>(a) + std::get<1>(b),
std::get<2>(a) + std::get<2>(b));
else
@@ -270,7 +270,7 @@
get(const boost::EdgeWeights&, ns3::ndn::GlobalRouter::Incidency& edge)
{
if (std::get<1>(edge) == 0)
- return property_traits<EdgeWeights>::reference(0, 0, 0.0);
+ return property_traits<EdgeWeights>::reference(nullptr, 0, 0.0);
else {
return property_traits<EdgeWeights>::reference(std::get<1>(edge),
static_cast<uint16_t>(
@@ -305,13 +305,31 @@
typedef read_write_property_map_tag category;
};
+} // boost
+
+namespace std {
+template<>
+class numeric_limits<std::tuple<std::shared_ptr<nfd::Face>, uint32_t, double>>
+{
+public:
+ typedef std::tuple<std::shared_ptr<nfd::Face>, uint32_t, double> value;
+ static value
+ max()
+ {
+ return boost::WeightInf;
+ }
+};
+}
+
+namespace boost {
+
inline std::tuple<std::shared_ptr<nfd::Face>, uint32_t, double>
get(DistancesMap& map, ns3::Ptr<ns3::ndn::GlobalRouter> key)
{
boost::DistancesMap::iterator i = map.find(key);
if (i == map.end())
return std::tuple<std::shared_ptr<nfd::Face>, uint32_t,
- double>(0, std::numeric_limits<uint32_t>::max(), 0.0);
+ double>(nullptr, std::numeric_limits<uint32_t>::max(), 0.0);
else
return i->second;
}
diff --git a/helper/ndn-global-routing-helper.cpp b/helper/ndn-global-routing-helper.cpp
index e53c441..cf52c6f 100644
--- a/helper/ndn-global-routing-helper.cpp
+++ b/helper/ndn-global-routing-helper.cpp
@@ -213,7 +213,7 @@
}
void
-GlobalRoutingHelper::CalculateRoutes(bool invalidatedRoutes /* = true*/)
+GlobalRoutingHelper::CalculateRoutes()
{
/**
* Implementation of route calculation is heavily based on Boost Graph Library
@@ -253,19 +253,6 @@
Ptr<L3Protocol> L3protocol = (*node)->GetObject<L3Protocol>();
shared_ptr<nfd::Forwarder> forwarder = L3protocol->getForwarder();
- if (invalidatedRoutes) {
- std::vector<::nfd::fib::NextHop> NextHopList;
- for (nfd::Fib::const_iterator fibIt = forwarder->getFib().begin();
- fibIt != forwarder->getFib().end();) {
- NextHopList.clear();
- NextHopList = fibIt->getNextHops();
- ++fibIt;
- for (int i = 0; i < NextHopList.size(); i++) {
- NextHopList[i].setCost(std::numeric_limits<uint64_t>::max());
- }
- }
- }
-
NS_LOG_DEBUG("Reachability from Node: " << source->GetObject<Node>()->GetId());
for (const auto& dist : distances) {
if (dist.first == source)
@@ -291,7 +278,7 @@
}
void
-GlobalRoutingHelper::CalculateAllPossibleRoutes(bool invalidatedRoutes /* = true*/)
+GlobalRoutingHelper::CalculateAllPossibleRoutes()
{
/**
* Implementation of route calculation is heavily based on Boost Graph Library
@@ -318,19 +305,6 @@
Ptr<L3Protocol> L3protocol = (*node)->GetObject<L3Protocol>();
shared_ptr<nfd::Forwarder> forwarder = L3protocol->getForwarder();
- if (invalidatedRoutes) {
- std::vector<::nfd::fib::NextHop> NextHopList;
- for (nfd::Fib::const_iterator fibIt = forwarder->getFib().begin();
- fibIt != forwarder->getFib().end();) {
- NextHopList.clear();
- NextHopList = fibIt->getNextHops();
- ++fibIt;
- for (int i = 0; i < NextHopList.size(); i++) {
- NextHopList[i].setCost(std::numeric_limits<uint64_t>::max());
- }
- }
- }
-
NS_LOG_DEBUG("Reachability from Node: " << source->GetObject<Node>()->GetId() << " ("
<< Names::FindName(source->GetObject<Node>()) << ")");
diff --git a/helper/ndn-global-routing-helper.hpp b/helper/ndn-global-routing-helper.hpp
index 0e65810..b405838 100644
--- a/helper/ndn-global-routing-helper.hpp
+++ b/helper/ndn-global-routing-helper.hpp
@@ -96,26 +96,20 @@
/**
* @brief Calculate for every node shortest path trees and install routes to all prefix origins
- *
- * @param invalidatedRoutes flag indicating whether existing routes should be invalidated or keps
- *as is
*/
static void
- CalculateRoutes(bool invalidatedRoutes = true);
+ CalculateRoutes();
/**
* @brief Calculate all possible next-hop independent alternative routes
*
- * @param invalidatedRoutes flag indicating whether existing routes should be invalidated or keps
- *as is
- *
* Refer to the implementation for more details.
*
* Note that this method is highly experimental and should be used with caution (very time
*consuming).
*/
static void
- CalculateAllPossibleRoutes(bool invalidatedRoutes = true);
+ CalculateAllPossibleRoutes();
private:
void
diff --git a/model/ndn-header.cpp b/model/ndn-header.cpp
index 2807c29..929b001 100644
--- a/model/ndn-header.cpp
+++ b/model/ndn-header.cpp
@@ -82,14 +82,14 @@
template<class Pkt>
void
-PacketHeader<Pkt>::Serialize(Buffer::Iterator start) const
+PacketHeader<Pkt>::Serialize(ns3::Buffer::Iterator start) const
{
start.Write(m_packet->wireEncode().wire(), m_packet->wireEncode().size());
}
class Ns3BufferIteratorSource : public io::source {
public:
- Ns3BufferIteratorSource(Buffer::Iterator& is)
+ Ns3BufferIteratorSource(ns3::Buffer::Iterator& is)
: m_is(is)
{
}
@@ -110,12 +110,12 @@
}
private:
- Buffer::Iterator& m_is;
+ ns3::Buffer::Iterator& m_is;
};
template<class Pkt>
uint32_t
-PacketHeader<Pkt>::Deserialize(Buffer::Iterator start)
+PacketHeader<Pkt>::Deserialize(ns3::Buffer::Iterator start)
{
auto packet = make_shared<Pkt>();
io::stream<Ns3BufferIteratorSource> is(start);
diff --git a/model/ndn-ns3.hpp b/model/ndn-ns3.hpp
index dab01f4..b292fca 100644
--- a/model/ndn-ns3.hpp
+++ b/model/ndn-ns3.hpp
@@ -22,6 +22,7 @@
#include "ns3/packet.h"
#include "ns3/ptr.h"
+#include <memory>
namespace ns3 {
namespace ndn {
diff --git a/utils/trie/trie.hpp b/utils/trie/trie.hpp
index 28c075e..6d11ecb 100644
--- a/utils/trie/trie.hpp
+++ b/utils/trie/trie.hpp
@@ -152,7 +152,7 @@
// container
, children_(bucket_traits(buckets_.get(), bucketSize_))
, payload_(PayloadTraits::empty_payload)
- , parent_(0)
+ , parent_(nullptr)
{
}
diff --git a/wscript b/wscript
index 95b16a1..e9ef718 100644
--- a/wscript
+++ b/wscript
@@ -11,8 +11,16 @@
def required_boost_libs(conf):
conf.env.REQUIRED_BOOST_LIBS += REQUIRED_BOOST_LIBS
+def options(opt):
+ opt.load(['dependency-checker',
+ 'doxygen', 'sphinx_build', 'type_traits', 'compiler-features'],
+ tooldir=['%s/.waf-tools' % opt.path.abspath()])
+
def configure(conf):
- conf.env['ENABLE_NDNSIM']=False;
+ conf.load(['dependency-checker',
+ 'doxygen', 'sphinx_build', 'type_traits', 'compiler-features'])
+
+ conf.env['ENABLE_NDNSIM']=False
conf.check_cfg(package='libndn-cxx', args=['--cflags', '--libs'],
uselib_store='NDN_CXX', mandatory=True)