Make decode take unsigned char *input, unsigned int inputLength
diff --git a/ndn-cpp/encoding/BinaryXMLWireFormat.hpp b/ndn-cpp/encoding/BinaryXMLWireFormat.hpp
index f8cf1eb..c757103 100644
--- a/ndn-cpp/encoding/BinaryXMLWireFormat.hpp
+++ b/ndn-cpp/encoding/BinaryXMLWireFormat.hpp
@@ -15,10 +15,10 @@
class BinaryXMLWireFormat : public WireFormat {
public:
virtual void encodeName(Name &name, std::vector<unsigned char> &output);
- virtual void decodeName(Name &name, std::vector<unsigned char> &input);
+ virtual void decodeName(Name &name, const unsigned char *input, unsigned int inputLength);
virtual void encodeInterest(Interest &interest, std::vector<unsigned char> &output);
- virtual void decodeInterest(Interest &interest, std::vector<unsigned char> &input);
+ virtual void decodeInterest(Interest &interest, const unsigned char *input, unsigned int inputLength);
static BinaryXMLWireFormat &instance() { return instance_; }