Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • scs/ipaaca
  • ramin.yaghoubzadeh/ipaaca
2 results
Show changes
Showing
with 11215 additions and 2959 deletions
...@@ -30,6 +30,10 @@ ...@@ -30,6 +30,10 @@
// //
/////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////
// The above software in this distribution may have been modified by
// THL A29 Limited ("Tencent Modifications").
// All Tencent Modifications are Copyright (C) 2015 THL A29 Limited.
#ifndef _MSC_VER // [ #ifndef _MSC_VER // [
#error "Use this header only with Microsoft Visual C++ compilers!" #error "Use this header only with Microsoft Visual C++ compilers!"
#endif // _MSC_VER ] #endif // _MSC_VER ]
...@@ -85,14 +89,14 @@ ...@@ -85,14 +89,14 @@
#include <limits.h> #include <limits.h>
// For Visual Studio 6 in C++ mode and for many Visual Studio versions when // For Visual Studio 6 in C++ mode and for many Visual Studio versions when
// compiling for ARM we should wrap <wchar.h> include with 'extern "C++" {}' // compiling for ARM we have to wrap <wchar.h> include with 'extern "C++" {}'
// or compiler give many errors like this: // or compiler would give many errors like this:
// error C2733: second C linkage of overloaded function 'wmemchr' not allowed // error C2733: second C linkage of overloaded function 'wmemchr' not allowed
#ifdef __cplusplus #if defined(__cplusplus) && !defined(_M_ARM)
extern "C" { extern "C" {
#endif #endif
# include <wchar.h> # include <wchar.h>
#ifdef __cplusplus #if defined(__cplusplus) && !defined(_M_ARM)
} }
#endif #endif
......
// Tencent is pleased to support the open source community by making RapidJSON available.
//
// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip.
//
// Licensed under the MIT License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#ifndef RAPIDJSON_OSTREAMWRAPPER_H_
#define RAPIDJSON_OSTREAMWRAPPER_H_
#include "stream.h"
#include <iosfwd>
#ifdef __clang__
RAPIDJSON_DIAG_PUSH
RAPIDJSON_DIAG_OFF(padded)
#endif
RAPIDJSON_NAMESPACE_BEGIN
//! Wrapper of \c std::basic_ostream into RapidJSON's Stream concept.
/*!
The classes can be wrapped including but not limited to:
- \c std::ostringstream
- \c std::stringstream
- \c std::wpstringstream
- \c std::wstringstream
- \c std::ifstream
- \c std::fstream
- \c std::wofstream
- \c std::wfstream
\tparam StreamType Class derived from \c std::basic_ostream.
*/
template <typename StreamType>
class BasicOStreamWrapper {
public:
typedef typename StreamType::char_type Ch;
BasicOStreamWrapper(StreamType& stream) : stream_(stream) {}
void Put(Ch c) {
stream_.put(c);
}
void Flush() {
stream_.flush();
}
// Not implemented
char Peek() const { RAPIDJSON_ASSERT(false); return 0; }
char Take() { RAPIDJSON_ASSERT(false); return 0; }
size_t Tell() const { RAPIDJSON_ASSERT(false); return 0; }
char* PutBegin() { RAPIDJSON_ASSERT(false); return 0; }
size_t PutEnd(char*) { RAPIDJSON_ASSERT(false); return 0; }
private:
BasicOStreamWrapper(const BasicOStreamWrapper&);
BasicOStreamWrapper& operator=(const BasicOStreamWrapper&);
StreamType& stream_;
};
typedef BasicOStreamWrapper<std::ostream> OStreamWrapper;
typedef BasicOStreamWrapper<std::wostream> WOStreamWrapper;
#ifdef __clang__
RAPIDJSON_DIAG_POP
#endif
RAPIDJSON_NAMESPACE_END
#endif // RAPIDJSON_OSTREAMWRAPPER_H_
// Tencent is pleased to support the open source community by making RapidJSON available.
//
// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip.
//
// Licensed under the MIT License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#ifndef RAPIDJSON_POINTER_H_
#define RAPIDJSON_POINTER_H_
#include "document.h"
#include "uri.h"
#include "internal/itoa.h"
#ifdef __clang__
RAPIDJSON_DIAG_PUSH
RAPIDJSON_DIAG_OFF(switch-enum)
#elif defined(_MSC_VER)
RAPIDJSON_DIAG_PUSH
RAPIDJSON_DIAG_OFF(4512) // assignment operator could not be generated
#endif
RAPIDJSON_NAMESPACE_BEGIN
static const SizeType kPointerInvalidIndex = ~SizeType(0); //!< Represents an invalid index in GenericPointer::Token
//! Error code of parsing.
/*! \ingroup RAPIDJSON_ERRORS
\see GenericPointer::GenericPointer, GenericPointer::GetParseErrorCode
*/
enum PointerParseErrorCode {
kPointerParseErrorNone = 0, //!< The parse is successful
kPointerParseErrorTokenMustBeginWithSolidus, //!< A token must begin with a '/'
kPointerParseErrorInvalidEscape, //!< Invalid escape
kPointerParseErrorInvalidPercentEncoding, //!< Invalid percent encoding in URI fragment
kPointerParseErrorCharacterMustPercentEncode //!< A character must percent encoded in URI fragment
};
///////////////////////////////////////////////////////////////////////////////
// GenericPointer
//! Represents a JSON Pointer. Use Pointer for UTF8 encoding and default allocator.
/*!
This class implements RFC 6901 "JavaScript Object Notation (JSON) Pointer"
(https://tools.ietf.org/html/rfc6901).
A JSON pointer is for identifying a specific value in a JSON document
(GenericDocument). It can simplify coding of DOM tree manipulation, because it
can access multiple-level depth of DOM tree with single API call.
After it parses a string representation (e.g. "/foo/0" or URI fragment
representation (e.g. "#/foo/0") into its internal representation (tokens),
it can be used to resolve a specific value in multiple documents, or sub-tree
of documents.
Contrary to GenericValue, Pointer can be copy constructed and copy assigned.
Apart from assignment, a Pointer cannot be modified after construction.
Although Pointer is very convenient, please aware that constructing Pointer
involves parsing and dynamic memory allocation. A special constructor with user-
supplied tokens eliminates these.
GenericPointer depends on GenericDocument and GenericValue.
\tparam ValueType The value type of the DOM tree. E.g. GenericValue<UTF8<> >
\tparam Allocator The allocator type for allocating memory for internal representation.
\note GenericPointer uses same encoding of ValueType.
However, Allocator of GenericPointer is independent of Allocator of Value.
*/
template <typename ValueType, typename Allocator = CrtAllocator>
class GenericPointer {
public:
typedef typename ValueType::EncodingType EncodingType; //!< Encoding type from Value
typedef typename ValueType::Ch Ch; //!< Character type from Value
typedef GenericUri<ValueType, Allocator> UriType;
//! A token is the basic units of internal representation.
/*!
A JSON pointer string representation "/foo/123" is parsed to two tokens:
"foo" and 123. 123 will be represented in both numeric form and string form.
They are resolved according to the actual value type (object or array).
For token that are not numbers, or the numeric value is out of bound
(greater than limits of SizeType), they are only treated as string form
(i.e. the token's index will be equal to kPointerInvalidIndex).
This struct is public so that user can create a Pointer without parsing and
allocation, using a special constructor.
*/
struct Token {
const Ch* name; //!< Name of the token. It has null character at the end but it can contain null character.
SizeType length; //!< Length of the name.
SizeType index; //!< A valid array index, if it is not equal to kPointerInvalidIndex.
};
//!@name Constructors and destructor.
//@{
//! Default constructor.
GenericPointer(Allocator* allocator = 0) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) {}
//! Constructor that parses a string or URI fragment representation.
/*!
\param source A null-terminated, string or URI fragment representation of JSON pointer.
\param allocator User supplied allocator for this pointer. If no allocator is provided, it creates a self-owned one.
*/
explicit GenericPointer(const Ch* source, Allocator* allocator = 0) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) {
Parse(source, internal::StrLen(source));
}
#if RAPIDJSON_HAS_STDSTRING
//! Constructor that parses a string or URI fragment representation.
/*!
\param source A string or URI fragment representation of JSON pointer.
\param allocator User supplied allocator for this pointer. If no allocator is provided, it creates a self-owned one.
\note Requires the definition of the preprocessor symbol \ref RAPIDJSON_HAS_STDSTRING.
*/
explicit GenericPointer(const std::basic_string<Ch>& source, Allocator* allocator = 0) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) {
Parse(source.c_str(), source.size());
}
#endif
//! Constructor that parses a string or URI fragment representation, with length of the source string.
/*!
\param source A string or URI fragment representation of JSON pointer.
\param length Length of source.
\param allocator User supplied allocator for this pointer. If no allocator is provided, it creates a self-owned one.
\note Slightly faster than the overload without length.
*/
GenericPointer(const Ch* source, size_t length, Allocator* allocator = 0) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) {
Parse(source, length);
}
//! Constructor with user-supplied tokens.
/*!
This constructor let user supplies const array of tokens.
This prevents the parsing process and eliminates allocation.
This is preferred for memory constrained environments.
\param tokens An constant array of tokens representing the JSON pointer.
\param tokenCount Number of tokens.
\b Example
\code
#define NAME(s) { s, sizeof(s) / sizeof(s[0]) - 1, kPointerInvalidIndex }
#define INDEX(i) { #i, sizeof(#i) - 1, i }
static const Pointer::Token kTokens[] = { NAME("foo"), INDEX(123) };
static const Pointer p(kTokens, sizeof(kTokens) / sizeof(kTokens[0]));
// Equivalent to static const Pointer p("/foo/123");
#undef NAME
#undef INDEX
\endcode
*/
GenericPointer(const Token* tokens, size_t tokenCount) : allocator_(), ownAllocator_(), nameBuffer_(), tokens_(const_cast<Token*>(tokens)), tokenCount_(tokenCount), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) {}
//! Copy constructor.
GenericPointer(const GenericPointer& rhs) : allocator_(), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) {
*this = rhs;
}
//! Copy constructor.
GenericPointer(const GenericPointer& rhs, Allocator* allocator) : allocator_(allocator), ownAllocator_(), nameBuffer_(), tokens_(), tokenCount_(), parseErrorOffset_(), parseErrorCode_(kPointerParseErrorNone) {
*this = rhs;
}
//! Destructor.
~GenericPointer() {
if (nameBuffer_) // If user-supplied tokens constructor is used, nameBuffer_ is nullptr and tokens_ are not deallocated.
Allocator::Free(tokens_);
RAPIDJSON_DELETE(ownAllocator_);
}
//! Assignment operator.
GenericPointer& operator=(const GenericPointer& rhs) {
if (this != &rhs) {
// Do not delete ownAllcator
if (nameBuffer_)
Allocator::Free(tokens_);
tokenCount_ = rhs.tokenCount_;
parseErrorOffset_ = rhs.parseErrorOffset_;
parseErrorCode_ = rhs.parseErrorCode_;
if (rhs.nameBuffer_)
CopyFromRaw(rhs); // Normally parsed tokens.
else {
tokens_ = rhs.tokens_; // User supplied const tokens.
nameBuffer_ = 0;
}
}
return *this;
}
//! Swap the content of this pointer with an other.
/*!
\param other The pointer to swap with.
\note Constant complexity.
*/
GenericPointer& Swap(GenericPointer& other) RAPIDJSON_NOEXCEPT {
internal::Swap(allocator_, other.allocator_);
internal::Swap(ownAllocator_, other.ownAllocator_);
internal::Swap(nameBuffer_, other.nameBuffer_);
internal::Swap(tokens_, other.tokens_);
internal::Swap(tokenCount_, other.tokenCount_);
internal::Swap(parseErrorOffset_, other.parseErrorOffset_);
internal::Swap(parseErrorCode_, other.parseErrorCode_);
return *this;
}
//! free-standing swap function helper
/*!
Helper function to enable support for common swap implementation pattern based on \c std::swap:
\code
void swap(MyClass& a, MyClass& b) {
using std::swap;
swap(a.pointer, b.pointer);
// ...
}
\endcode
\see Swap()
*/
friend inline void swap(GenericPointer& a, GenericPointer& b) RAPIDJSON_NOEXCEPT { a.Swap(b); }
//@}
//!@name Append token
//@{
//! Append a token and return a new Pointer
/*!
\param token Token to be appended.
\param allocator Allocator for the newly return Pointer.
\return A new Pointer with appended token.
*/
GenericPointer Append(const Token& token, Allocator* allocator = 0) const {
GenericPointer r;
r.allocator_ = allocator;
Ch *p = r.CopyFromRaw(*this, 1, token.length + 1);
std::memcpy(p, token.name, (token.length + 1) * sizeof(Ch));
r.tokens_[tokenCount_].name = p;
r.tokens_[tokenCount_].length = token.length;
r.tokens_[tokenCount_].index = token.index;
return r;
}
//! Append a name token with length, and return a new Pointer
/*!
\param name Name to be appended.
\param length Length of name.
\param allocator Allocator for the newly return Pointer.
\return A new Pointer with appended token.
*/
GenericPointer Append(const Ch* name, SizeType length, Allocator* allocator = 0) const {
Token token = { name, length, kPointerInvalidIndex };
return Append(token, allocator);
}
//! Append a name token without length, and return a new Pointer
/*!
\param name Name (const Ch*) to be appended.
\param allocator Allocator for the newly return Pointer.
\return A new Pointer with appended token.
*/
template <typename T>
RAPIDJSON_DISABLEIF_RETURN((internal::NotExpr<internal::IsSame<typename internal::RemoveConst<T>::Type, Ch> >), (GenericPointer))
Append(T* name, Allocator* allocator = 0) const {
return Append(name, internal::StrLen(name), allocator);
}
#if RAPIDJSON_HAS_STDSTRING
//! Append a name token, and return a new Pointer
/*!
\param name Name to be appended.
\param allocator Allocator for the newly return Pointer.
\return A new Pointer with appended token.
*/
GenericPointer Append(const std::basic_string<Ch>& name, Allocator* allocator = 0) const {
return Append(name.c_str(), static_cast<SizeType>(name.size()), allocator);
}
#endif
//! Append a index token, and return a new Pointer
/*!
\param index Index to be appended.
\param allocator Allocator for the newly return Pointer.
\return A new Pointer with appended token.
*/
GenericPointer Append(SizeType index, Allocator* allocator = 0) const {
char buffer[21];
char* end = sizeof(SizeType) == 4 ? internal::u32toa(index, buffer) : internal::u64toa(index, buffer);
SizeType length = static_cast<SizeType>(end - buffer);
buffer[length] = '\0';
if (sizeof(Ch) == 1) {
Token token = { reinterpret_cast<Ch*>(buffer), length, index };
return Append(token, allocator);
}
else {
Ch name[21];
for (size_t i = 0; i <= length; i++)
name[i] = static_cast<Ch>(buffer[i]);
Token token = { name, length, index };
return Append(token, allocator);
}
}
//! Append a token by value, and return a new Pointer
/*!
\param token token to be appended.
\param allocator Allocator for the newly return Pointer.
\return A new Pointer with appended token.
*/
GenericPointer Append(const ValueType& token, Allocator* allocator = 0) const {
if (token.IsString())
return Append(token.GetString(), token.GetStringLength(), allocator);
else {
RAPIDJSON_ASSERT(token.IsUint64());
RAPIDJSON_ASSERT(token.GetUint64() <= SizeType(~0));
return Append(static_cast<SizeType>(token.GetUint64()), allocator);
}
}
//!@name Handling Parse Error
//@{
//! Check whether this is a valid pointer.
bool IsValid() const { return parseErrorCode_ == kPointerParseErrorNone; }
//! Get the parsing error offset in code unit.
size_t GetParseErrorOffset() const { return parseErrorOffset_; }
//! Get the parsing error code.
PointerParseErrorCode GetParseErrorCode() const { return parseErrorCode_; }
//@}
//! Get the allocator of this pointer.
Allocator& GetAllocator() { return *allocator_; }
//!@name Tokens
//@{
//! Get the token array (const version only).
const Token* GetTokens() const { return tokens_; }
//! Get the number of tokens.
size_t GetTokenCount() const { return tokenCount_; }
//@}
//!@name Equality/inequality operators
//@{
//! Equality operator.
/*!
\note When any pointers are invalid, always returns false.
*/
bool operator==(const GenericPointer& rhs) const {
if (!IsValid() || !rhs.IsValid() || tokenCount_ != rhs.tokenCount_)
return false;
for (size_t i = 0; i < tokenCount_; i++) {
if (tokens_[i].index != rhs.tokens_[i].index ||
tokens_[i].length != rhs.tokens_[i].length ||
(tokens_[i].length != 0 && std::memcmp(tokens_[i].name, rhs.tokens_[i].name, sizeof(Ch)* tokens_[i].length) != 0))
{
return false;
}
}
return true;
}
//! Inequality operator.
/*!
\note When any pointers are invalid, always returns true.
*/
bool operator!=(const GenericPointer& rhs) const { return !(*this == rhs); }
//! Less than operator.
/*!
\note Invalid pointers are always greater than valid ones.
*/
bool operator<(const GenericPointer& rhs) const {
if (!IsValid())
return false;
if (!rhs.IsValid())
return true;
if (tokenCount_ != rhs.tokenCount_)
return tokenCount_ < rhs.tokenCount_;
for (size_t i = 0; i < tokenCount_; i++) {
if (tokens_[i].index != rhs.tokens_[i].index)
return tokens_[i].index < rhs.tokens_[i].index;
if (tokens_[i].length != rhs.tokens_[i].length)
return tokens_[i].length < rhs.tokens_[i].length;
if (int cmp = std::memcmp(tokens_[i].name, rhs.tokens_[i].name, sizeof(Ch) * tokens_[i].length))
return cmp < 0;
}
return false;
}
//@}
//!@name Stringify
//@{
//! Stringify the pointer into string representation.
/*!
\tparam OutputStream Type of output stream.
\param os The output stream.
*/
template<typename OutputStream>
bool Stringify(OutputStream& os) const {
return Stringify<false, OutputStream>(os);
}
//! Stringify the pointer into URI fragment representation.
/*!
\tparam OutputStream Type of output stream.
\param os The output stream.
*/
template<typename OutputStream>
bool StringifyUriFragment(OutputStream& os) const {
return Stringify<true, OutputStream>(os);
}
//@}
//!@name Create value
//@{
//! Create a value in a subtree.
/*!
If the value is not exist, it creates all parent values and a JSON Null value.
So it always succeed and return the newly created or existing value.
Remind that it may change types of parents according to tokens, so it
potentially removes previously stored values. For example, if a document
was an array, and "/foo" is used to create a value, then the document
will be changed to an object, and all existing array elements are lost.
\param root Root value of a DOM subtree to be resolved. It can be any value other than document root.
\param allocator Allocator for creating the values if the specified value or its parents are not exist.
\param alreadyExist If non-null, it stores whether the resolved value is already exist.
\return The resolved newly created (a JSON Null value), or already exists value.
*/
ValueType& Create(ValueType& root, typename ValueType::AllocatorType& allocator, bool* alreadyExist = 0) const {
RAPIDJSON_ASSERT(IsValid());
ValueType* v = &root;
bool exist = true;
for (const Token *t = tokens_; t != tokens_ + tokenCount_; ++t) {
if (v->IsArray() && t->name[0] == '-' && t->length == 1) {
v->PushBack(ValueType().Move(), allocator);
v = &((*v)[v->Size() - 1]);
exist = false;
}
else {
if (t->index == kPointerInvalidIndex) { // must be object name
if (!v->IsObject())
v->SetObject(); // Change to Object
}
else { // object name or array index
if (!v->IsArray() && !v->IsObject())
v->SetArray(); // Change to Array
}
if (v->IsArray()) {
if (t->index >= v->Size()) {
v->Reserve(t->index + 1, allocator);
while (t->index >= v->Size())
v->PushBack(ValueType().Move(), allocator);
exist = false;
}
v = &((*v)[t->index]);
}
else {
typename ValueType::MemberIterator m = v->FindMember(GenericValue<EncodingType>(GenericStringRef<Ch>(t->name, t->length)));
if (m == v->MemberEnd()) {
v->AddMember(ValueType(t->name, t->length, allocator).Move(), ValueType().Move(), allocator);
m = v->MemberEnd();
v = &(--m)->value; // Assumes AddMember() appends at the end
exist = false;
}
else
v = &m->value;
}
}
}
if (alreadyExist)
*alreadyExist = exist;
return *v;
}
//! Creates a value in a document.
/*!
\param document A document to be resolved.
\param alreadyExist If non-null, it stores whether the resolved value is already exist.
\return The resolved newly created, or already exists value.
*/
template <typename stackAllocator>
ValueType& Create(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, bool* alreadyExist = 0) const {
return Create(document, document.GetAllocator(), alreadyExist);
}
//@}
//!@name Compute URI
//@{
//! Compute the in-scope URI for a subtree.
// For use with JSON pointers into JSON schema documents.
/*!
\param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root.
\param rootUri Root URI
\param unresolvedTokenIndex If the pointer cannot resolve a token in the pointer, this parameter can obtain the index of unresolved token.
\param allocator Allocator for Uris
\return Uri if it can be resolved. Otherwise null.
\note
There are only 3 situations when a URI cannot be resolved:
1. A value in the path is not an array nor object.
2. An object value does not contain the token.
3. A token is out of range of an array value.
Use unresolvedTokenIndex to retrieve the token index.
*/
UriType GetUri(ValueType& root, const UriType& rootUri, size_t* unresolvedTokenIndex = 0, Allocator* allocator = 0) const {
static const Ch kIdString[] = { 'i', 'd', '\0' };
static const ValueType kIdValue(kIdString, 2);
UriType base = UriType(rootUri, allocator);
RAPIDJSON_ASSERT(IsValid());
ValueType* v = &root;
for (const Token *t = tokens_; t != tokens_ + tokenCount_; ++t) {
switch (v->GetType()) {
case kObjectType:
{
// See if we have an id, and if so resolve with the current base
typename ValueType::MemberIterator m = v->FindMember(kIdValue);
if (m != v->MemberEnd() && (m->value).IsString()) {
UriType here = UriType(m->value, allocator).Resolve(base, allocator);
base = here;
}
m = v->FindMember(GenericValue<EncodingType>(GenericStringRef<Ch>(t->name, t->length)));
if (m == v->MemberEnd())
break;
v = &m->value;
}
continue;
case kArrayType:
if (t->index == kPointerInvalidIndex || t->index >= v->Size())
break;
v = &((*v)[t->index]);
continue;
default:
break;
}
// Error: unresolved token
if (unresolvedTokenIndex)
*unresolvedTokenIndex = static_cast<size_t>(t - tokens_);
return UriType(allocator);
}
return base;
}
UriType GetUri(const ValueType& root, const UriType& rootUri, size_t* unresolvedTokenIndex = 0, Allocator* allocator = 0) const {
return GetUri(const_cast<ValueType&>(root), rootUri, unresolvedTokenIndex, allocator);
}
//!@name Query value
//@{
//! Query a value in a subtree.
/*!
\param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root.
\param unresolvedTokenIndex If the pointer cannot resolve a token in the pointer, this parameter can obtain the index of unresolved token.
\return Pointer to the value if it can be resolved. Otherwise null.
\note
There are only 3 situations when a value cannot be resolved:
1. A value in the path is not an array nor object.
2. An object value does not contain the token.
3. A token is out of range of an array value.
Use unresolvedTokenIndex to retrieve the token index.
*/
ValueType* Get(ValueType& root, size_t* unresolvedTokenIndex = 0) const {
RAPIDJSON_ASSERT(IsValid());
ValueType* v = &root;
for (const Token *t = tokens_; t != tokens_ + tokenCount_; ++t) {
switch (v->GetType()) {
case kObjectType:
{
typename ValueType::MemberIterator m = v->FindMember(GenericValue<EncodingType>(GenericStringRef<Ch>(t->name, t->length)));
if (m == v->MemberEnd())
break;
v = &m->value;
}
continue;
case kArrayType:
if (t->index == kPointerInvalidIndex || t->index >= v->Size())
break;
v = &((*v)[t->index]);
continue;
default:
break;
}
// Error: unresolved token
if (unresolvedTokenIndex)
*unresolvedTokenIndex = static_cast<size_t>(t - tokens_);
return 0;
}
return v;
}
//! Query a const value in a const subtree.
/*!
\param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root.
\return Pointer to the value if it can be resolved. Otherwise null.
*/
const ValueType* Get(const ValueType& root, size_t* unresolvedTokenIndex = 0) const {
return Get(const_cast<ValueType&>(root), unresolvedTokenIndex);
}
//@}
//!@name Query a value with default
//@{
//! Query a value in a subtree with default value.
/*!
Similar to Get(), but if the specified value do not exists, it creates all parents and clone the default value.
So that this function always succeed.
\param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root.
\param defaultValue Default value to be cloned if the value was not exists.
\param allocator Allocator for creating the values if the specified value or its parents are not exist.
\see Create()
*/
ValueType& GetWithDefault(ValueType& root, const ValueType& defaultValue, typename ValueType::AllocatorType& allocator) const {
bool alreadyExist;
ValueType& v = Create(root, allocator, &alreadyExist);
return alreadyExist ? v : v.CopyFrom(defaultValue, allocator);
}
//! Query a value in a subtree with default null-terminated string.
ValueType& GetWithDefault(ValueType& root, const Ch* defaultValue, typename ValueType::AllocatorType& allocator) const {
bool alreadyExist;
ValueType& v = Create(root, allocator, &alreadyExist);
return alreadyExist ? v : v.SetString(defaultValue, allocator);
}
#if RAPIDJSON_HAS_STDSTRING
//! Query a value in a subtree with default std::basic_string.
ValueType& GetWithDefault(ValueType& root, const std::basic_string<Ch>& defaultValue, typename ValueType::AllocatorType& allocator) const {
bool alreadyExist;
ValueType& v = Create(root, allocator, &alreadyExist);
return alreadyExist ? v : v.SetString(defaultValue, allocator);
}
#endif
//! Query a value in a subtree with default primitive value.
/*!
\tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t, \c bool
*/
template <typename T>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T>, internal::IsGenericValue<T> >), (ValueType&))
GetWithDefault(ValueType& root, T defaultValue, typename ValueType::AllocatorType& allocator) const {
return GetWithDefault(root, ValueType(defaultValue).Move(), allocator);
}
//! Query a value in a document with default value.
template <typename stackAllocator>
ValueType& GetWithDefault(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, const ValueType& defaultValue) const {
return GetWithDefault(document, defaultValue, document.GetAllocator());
}
//! Query a value in a document with default null-terminated string.
template <typename stackAllocator>
ValueType& GetWithDefault(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, const Ch* defaultValue) const {
return GetWithDefault(document, defaultValue, document.GetAllocator());
}
#if RAPIDJSON_HAS_STDSTRING
//! Query a value in a document with default std::basic_string.
template <typename stackAllocator>
ValueType& GetWithDefault(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, const std::basic_string<Ch>& defaultValue) const {
return GetWithDefault(document, defaultValue, document.GetAllocator());
}
#endif
//! Query a value in a document with default primitive value.
/*!
\tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t, \c bool
*/
template <typename T, typename stackAllocator>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T>, internal::IsGenericValue<T> >), (ValueType&))
GetWithDefault(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, T defaultValue) const {
return GetWithDefault(document, defaultValue, document.GetAllocator());
}
//@}
//!@name Set a value
//@{
//! Set a value in a subtree, with move semantics.
/*!
It creates all parents if they are not exist or types are different to the tokens.
So this function always succeeds but potentially remove existing values.
\param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root.
\param value Value to be set.
\param allocator Allocator for creating the values if the specified value or its parents are not exist.
\see Create()
*/
ValueType& Set(ValueType& root, ValueType& value, typename ValueType::AllocatorType& allocator) const {
return Create(root, allocator) = value;
}
//! Set a value in a subtree, with copy semantics.
ValueType& Set(ValueType& root, const ValueType& value, typename ValueType::AllocatorType& allocator) const {
return Create(root, allocator).CopyFrom(value, allocator);
}
//! Set a null-terminated string in a subtree.
ValueType& Set(ValueType& root, const Ch* value, typename ValueType::AllocatorType& allocator) const {
return Create(root, allocator) = ValueType(value, allocator).Move();
}
#if RAPIDJSON_HAS_STDSTRING
//! Set a std::basic_string in a subtree.
ValueType& Set(ValueType& root, const std::basic_string<Ch>& value, typename ValueType::AllocatorType& allocator) const {
return Create(root, allocator) = ValueType(value, allocator).Move();
}
#endif
//! Set a primitive value in a subtree.
/*!
\tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t, \c bool
*/
template <typename T>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T>, internal::IsGenericValue<T> >), (ValueType&))
Set(ValueType& root, T value, typename ValueType::AllocatorType& allocator) const {
return Create(root, allocator) = ValueType(value).Move();
}
//! Set a value in a document, with move semantics.
template <typename stackAllocator>
ValueType& Set(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, ValueType& value) const {
return Create(document) = value;
}
//! Set a value in a document, with copy semantics.
template <typename stackAllocator>
ValueType& Set(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, const ValueType& value) const {
return Create(document).CopyFrom(value, document.GetAllocator());
}
//! Set a null-terminated string in a document.
template <typename stackAllocator>
ValueType& Set(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, const Ch* value) const {
return Create(document) = ValueType(value, document.GetAllocator()).Move();
}
#if RAPIDJSON_HAS_STDSTRING
//! Sets a std::basic_string in a document.
template <typename stackAllocator>
ValueType& Set(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, const std::basic_string<Ch>& value) const {
return Create(document) = ValueType(value, document.GetAllocator()).Move();
}
#endif
//! Set a primitive value in a document.
/*!
\tparam T Either \ref Type, \c int, \c unsigned, \c int64_t, \c uint64_t, \c bool
*/
template <typename T, typename stackAllocator>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T>, internal::IsGenericValue<T> >), (ValueType&))
Set(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, T value) const {
return Create(document) = value;
}
//@}
//!@name Swap a value
//@{
//! Swap a value with a value in a subtree.
/*!
It creates all parents if they are not exist or types are different to the tokens.
So this function always succeeds but potentially remove existing values.
\param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root.
\param value Value to be swapped.
\param allocator Allocator for creating the values if the specified value or its parents are not exist.
\see Create()
*/
ValueType& Swap(ValueType& root, ValueType& value, typename ValueType::AllocatorType& allocator) const {
return Create(root, allocator).Swap(value);
}
//! Swap a value with a value in a document.
template <typename stackAllocator>
ValueType& Swap(GenericDocument<EncodingType, typename ValueType::AllocatorType, stackAllocator>& document, ValueType& value) const {
return Create(document).Swap(value);
}
//@}
//! Erase a value in a subtree.
/*!
\param root Root value of a DOM sub-tree to be resolved. It can be any value other than document root.
\return Whether the resolved value is found and erased.
\note Erasing with an empty pointer \c Pointer(""), i.e. the root, always fail and return false.
*/
bool Erase(ValueType& root) const {
RAPIDJSON_ASSERT(IsValid());
if (tokenCount_ == 0) // Cannot erase the root
return false;
ValueType* v = &root;
const Token* last = tokens_ + (tokenCount_ - 1);
for (const Token *t = tokens_; t != last; ++t) {
switch (v->GetType()) {
case kObjectType:
{
typename ValueType::MemberIterator m = v->FindMember(GenericValue<EncodingType>(GenericStringRef<Ch>(t->name, t->length)));
if (m == v->MemberEnd())
return false;
v = &m->value;
}
break;
case kArrayType:
if (t->index == kPointerInvalidIndex || t->index >= v->Size())
return false;
v = &((*v)[t->index]);
break;
default:
return false;
}
}
switch (v->GetType()) {
case kObjectType:
return v->EraseMember(GenericStringRef<Ch>(last->name, last->length));
case kArrayType:
if (last->index == kPointerInvalidIndex || last->index >= v->Size())
return false;
v->Erase(v->Begin() + last->index);
return true;
default:
return false;
}
}
private:
//! Clone the content from rhs to this.
/*!
\param rhs Source pointer.
\param extraToken Extra tokens to be allocated.
\param extraNameBufferSize Extra name buffer size (in number of Ch) to be allocated.
\return Start of non-occupied name buffer, for storing extra names.
*/
Ch* CopyFromRaw(const GenericPointer& rhs, size_t extraToken = 0, size_t extraNameBufferSize = 0) {
if (!allocator_) // allocator is independently owned.
ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator)();
size_t nameBufferSize = rhs.tokenCount_; // null terminators for tokens
for (Token *t = rhs.tokens_; t != rhs.tokens_ + rhs.tokenCount_; ++t)
nameBufferSize += t->length;
tokenCount_ = rhs.tokenCount_ + extraToken;
tokens_ = static_cast<Token *>(allocator_->Malloc(tokenCount_ * sizeof(Token) + (nameBufferSize + extraNameBufferSize) * sizeof(Ch)));
nameBuffer_ = reinterpret_cast<Ch *>(tokens_ + tokenCount_);
if (rhs.tokenCount_ > 0) {
std::memcpy(tokens_, rhs.tokens_, rhs.tokenCount_ * sizeof(Token));
}
if (nameBufferSize > 0) {
std::memcpy(nameBuffer_, rhs.nameBuffer_, nameBufferSize * sizeof(Ch));
}
// Adjust pointers to name buffer
std::ptrdiff_t diff = nameBuffer_ - rhs.nameBuffer_;
for (Token *t = tokens_; t != tokens_ + rhs.tokenCount_; ++t)
t->name += diff;
return nameBuffer_ + nameBufferSize;
}
//! Check whether a character should be percent-encoded.
/*!
According to RFC 3986 2.3 Unreserved Characters.
\param c The character (code unit) to be tested.
*/
bool NeedPercentEncode(Ch c) const {
return !((c >= '0' && c <= '9') || (c >= 'A' && c <='Z') || (c >= 'a' && c <= 'z') || c == '-' || c == '.' || c == '_' || c =='~');
}
//! Parse a JSON String or its URI fragment representation into tokens.
#ifndef __clang__ // -Wdocumentation
/*!
\param source Either a JSON Pointer string, or its URI fragment representation. Not need to be null terminated.
\param length Length of the source string.
\note Source cannot be JSON String Representation of JSON Pointer, e.g. In "/\u0000", \u0000 will not be unescaped.
*/
#endif
void Parse(const Ch* source, size_t length) {
RAPIDJSON_ASSERT(source != NULL);
RAPIDJSON_ASSERT(nameBuffer_ == 0);
RAPIDJSON_ASSERT(tokens_ == 0);
// Create own allocator if user did not supply.
if (!allocator_)
ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator)();
// Count number of '/' as tokenCount
tokenCount_ = 0;
for (const Ch* s = source; s != source + length; s++)
if (*s == '/')
tokenCount_++;
Token* token = tokens_ = static_cast<Token *>(allocator_->Malloc(tokenCount_ * sizeof(Token) + length * sizeof(Ch)));
Ch* name = nameBuffer_ = reinterpret_cast<Ch *>(tokens_ + tokenCount_);
size_t i = 0;
// Detect if it is a URI fragment
bool uriFragment = false;
if (source[i] == '#') {
uriFragment = true;
i++;
}
if (i != length && source[i] != '/') {
parseErrorCode_ = kPointerParseErrorTokenMustBeginWithSolidus;
goto error;
}
while (i < length) {
RAPIDJSON_ASSERT(source[i] == '/');
i++; // consumes '/'
token->name = name;
bool isNumber = true;
while (i < length && source[i] != '/') {
Ch c = source[i];
if (uriFragment) {
// Decoding percent-encoding for URI fragment
if (c == '%') {
PercentDecodeStream is(&source[i], source + length);
GenericInsituStringStream<EncodingType> os(name);
Ch* begin = os.PutBegin();
if (!Transcoder<UTF8<>, EncodingType>().Validate(is, os) || !is.IsValid()) {
parseErrorCode_ = kPointerParseErrorInvalidPercentEncoding;
goto error;
}
size_t len = os.PutEnd(begin);
i += is.Tell() - 1;
if (len == 1)
c = *name;
else {
name += len;
isNumber = false;
i++;
continue;
}
}
else if (NeedPercentEncode(c)) {
parseErrorCode_ = kPointerParseErrorCharacterMustPercentEncode;
goto error;
}
}
i++;
// Escaping "~0" -> '~', "~1" -> '/'
if (c == '~') {
if (i < length) {
c = source[i];
if (c == '0') c = '~';
else if (c == '1') c = '/';
else {
parseErrorCode_ = kPointerParseErrorInvalidEscape;
goto error;
}
i++;
}
else {
parseErrorCode_ = kPointerParseErrorInvalidEscape;
goto error;
}
}
// First check for index: all of characters are digit
if (c < '0' || c > '9')
isNumber = false;
*name++ = c;
}
token->length = static_cast<SizeType>(name - token->name);
if (token->length == 0)
isNumber = false;
*name++ = '\0'; // Null terminator
// Second check for index: more than one digit cannot have leading zero
if (isNumber && token->length > 1 && token->name[0] == '0')
isNumber = false;
// String to SizeType conversion
SizeType n = 0;
if (isNumber) {
for (size_t j = 0; j < token->length; j++) {
SizeType m = n * 10 + static_cast<SizeType>(token->name[j] - '0');
if (m < n) { // overflow detection
isNumber = false;
break;
}
n = m;
}
}
token->index = isNumber ? n : kPointerInvalidIndex;
token++;
}
RAPIDJSON_ASSERT(name <= nameBuffer_ + length); // Should not overflow buffer
parseErrorCode_ = kPointerParseErrorNone;
return;
error:
Allocator::Free(tokens_);
nameBuffer_ = 0;
tokens_ = 0;
tokenCount_ = 0;
parseErrorOffset_ = i;
return;
}
//! Stringify to string or URI fragment representation.
/*!
\tparam uriFragment True for stringifying to URI fragment representation. False for string representation.
\tparam OutputStream type of output stream.
\param os The output stream.
*/
template<bool uriFragment, typename OutputStream>
bool Stringify(OutputStream& os) const {
RAPIDJSON_ASSERT(IsValid());
if (uriFragment)
os.Put('#');
for (Token *t = tokens_; t != tokens_ + tokenCount_; ++t) {
os.Put('/');
for (size_t j = 0; j < t->length; j++) {
Ch c = t->name[j];
if (c == '~') {
os.Put('~');
os.Put('0');
}
else if (c == '/') {
os.Put('~');
os.Put('1');
}
else if (uriFragment && NeedPercentEncode(c)) {
// Transcode to UTF8 sequence
GenericStringStream<typename ValueType::EncodingType> source(&t->name[j]);
PercentEncodeStream<OutputStream> target(os);
if (!Transcoder<EncodingType, UTF8<> >().Validate(source, target))
return false;
j += source.Tell() - 1;
}
else
os.Put(c);
}
}
return true;
}
//! A helper stream for decoding a percent-encoded sequence into code unit.
/*!
This stream decodes %XY triplet into code unit (0-255).
If it encounters invalid characters, it sets output code unit as 0 and
mark invalid, and to be checked by IsValid().
*/
class PercentDecodeStream {
public:
typedef typename ValueType::Ch Ch;
//! Constructor
/*!
\param source Start of the stream
\param end Past-the-end of the stream.
*/
PercentDecodeStream(const Ch* source, const Ch* end) : src_(source), head_(source), end_(end), valid_(true) {}
Ch Take() {
if (*src_ != '%' || src_ + 3 > end_) { // %XY triplet
valid_ = false;
return 0;
}
src_++;
Ch c = 0;
for (int j = 0; j < 2; j++) {
c = static_cast<Ch>(c << 4);
Ch h = *src_;
if (h >= '0' && h <= '9') c = static_cast<Ch>(c + h - '0');
else if (h >= 'A' && h <= 'F') c = static_cast<Ch>(c + h - 'A' + 10);
else if (h >= 'a' && h <= 'f') c = static_cast<Ch>(c + h - 'a' + 10);
else {
valid_ = false;
return 0;
}
src_++;
}
return c;
}
size_t Tell() const { return static_cast<size_t>(src_ - head_); }
bool IsValid() const { return valid_; }
private:
const Ch* src_; //!< Current read position.
const Ch* head_; //!< Original head of the string.
const Ch* end_; //!< Past-the-end position.
bool valid_; //!< Whether the parsing is valid.
};
//! A helper stream to encode character (UTF-8 code unit) into percent-encoded sequence.
template <typename OutputStream>
class PercentEncodeStream {
public:
PercentEncodeStream(OutputStream& os) : os_(os) {}
void Put(char c) { // UTF-8 must be byte
unsigned char u = static_cast<unsigned char>(c);
static const char hexDigits[16] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
os_.Put('%');
os_.Put(static_cast<typename OutputStream::Ch>(hexDigits[u >> 4]));
os_.Put(static_cast<typename OutputStream::Ch>(hexDigits[u & 15]));
}
private:
OutputStream& os_;
};
Allocator* allocator_; //!< The current allocator. It is either user-supplied or equal to ownAllocator_.
Allocator* ownAllocator_; //!< Allocator owned by this Pointer.
Ch* nameBuffer_; //!< A buffer containing all names in tokens.
Token* tokens_; //!< A list of tokens.
size_t tokenCount_; //!< Number of tokens in tokens_.
size_t parseErrorOffset_; //!< Offset in code unit when parsing fail.
PointerParseErrorCode parseErrorCode_; //!< Parsing error code.
};
//! GenericPointer for Value (UTF-8, default allocator).
typedef GenericPointer<Value> Pointer;
//!@name Helper functions for GenericPointer
//@{
//////////////////////////////////////////////////////////////////////////////
template <typename T>
typename T::ValueType& CreateValueByPointer(T& root, const GenericPointer<typename T::ValueType>& pointer, typename T::AllocatorType& a) {
return pointer.Create(root, a);
}
template <typename T, typename CharType, size_t N>
typename T::ValueType& CreateValueByPointer(T& root, const CharType(&source)[N], typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).Create(root, a);
}
// No allocator parameter
template <typename DocumentType>
typename DocumentType::ValueType& CreateValueByPointer(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer) {
return pointer.Create(document);
}
template <typename DocumentType, typename CharType, size_t N>
typename DocumentType::ValueType& CreateValueByPointer(DocumentType& document, const CharType(&source)[N]) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).Create(document);
}
//////////////////////////////////////////////////////////////////////////////
template <typename T>
typename T::ValueType* GetValueByPointer(T& root, const GenericPointer<typename T::ValueType>& pointer, size_t* unresolvedTokenIndex = 0) {
return pointer.Get(root, unresolvedTokenIndex);
}
template <typename T>
const typename T::ValueType* GetValueByPointer(const T& root, const GenericPointer<typename T::ValueType>& pointer, size_t* unresolvedTokenIndex = 0) {
return pointer.Get(root, unresolvedTokenIndex);
}
template <typename T, typename CharType, size_t N>
typename T::ValueType* GetValueByPointer(T& root, const CharType (&source)[N], size_t* unresolvedTokenIndex = 0) {
return GenericPointer<typename T::ValueType>(source, N - 1).Get(root, unresolvedTokenIndex);
}
template <typename T, typename CharType, size_t N>
const typename T::ValueType* GetValueByPointer(const T& root, const CharType(&source)[N], size_t* unresolvedTokenIndex = 0) {
return GenericPointer<typename T::ValueType>(source, N - 1).Get(root, unresolvedTokenIndex);
}
//////////////////////////////////////////////////////////////////////////////
template <typename T>
typename T::ValueType& GetValueByPointerWithDefault(T& root, const GenericPointer<typename T::ValueType>& pointer, const typename T::ValueType& defaultValue, typename T::AllocatorType& a) {
return pointer.GetWithDefault(root, defaultValue, a);
}
template <typename T>
typename T::ValueType& GetValueByPointerWithDefault(T& root, const GenericPointer<typename T::ValueType>& pointer, const typename T::Ch* defaultValue, typename T::AllocatorType& a) {
return pointer.GetWithDefault(root, defaultValue, a);
}
#if RAPIDJSON_HAS_STDSTRING
template <typename T>
typename T::ValueType& GetValueByPointerWithDefault(T& root, const GenericPointer<typename T::ValueType>& pointer, const std::basic_string<typename T::Ch>& defaultValue, typename T::AllocatorType& a) {
return pointer.GetWithDefault(root, defaultValue, a);
}
#endif
template <typename T, typename T2>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T2>, internal::IsGenericValue<T2> >), (typename T::ValueType&))
GetValueByPointerWithDefault(T& root, const GenericPointer<typename T::ValueType>& pointer, T2 defaultValue, typename T::AllocatorType& a) {
return pointer.GetWithDefault(root, defaultValue, a);
}
template <typename T, typename CharType, size_t N>
typename T::ValueType& GetValueByPointerWithDefault(T& root, const CharType(&source)[N], const typename T::ValueType& defaultValue, typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).GetWithDefault(root, defaultValue, a);
}
template <typename T, typename CharType, size_t N>
typename T::ValueType& GetValueByPointerWithDefault(T& root, const CharType(&source)[N], const typename T::Ch* defaultValue, typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).GetWithDefault(root, defaultValue, a);
}
#if RAPIDJSON_HAS_STDSTRING
template <typename T, typename CharType, size_t N>
typename T::ValueType& GetValueByPointerWithDefault(T& root, const CharType(&source)[N], const std::basic_string<typename T::Ch>& defaultValue, typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).GetWithDefault(root, defaultValue, a);
}
#endif
template <typename T, typename CharType, size_t N, typename T2>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T2>, internal::IsGenericValue<T2> >), (typename T::ValueType&))
GetValueByPointerWithDefault(T& root, const CharType(&source)[N], T2 defaultValue, typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).GetWithDefault(root, defaultValue, a);
}
// No allocator parameter
template <typename DocumentType>
typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer, const typename DocumentType::ValueType& defaultValue) {
return pointer.GetWithDefault(document, defaultValue);
}
template <typename DocumentType>
typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer, const typename DocumentType::Ch* defaultValue) {
return pointer.GetWithDefault(document, defaultValue);
}
#if RAPIDJSON_HAS_STDSTRING
template <typename DocumentType>
typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer, const std::basic_string<typename DocumentType::Ch>& defaultValue) {
return pointer.GetWithDefault(document, defaultValue);
}
#endif
template <typename DocumentType, typename T2>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T2>, internal::IsGenericValue<T2> >), (typename DocumentType::ValueType&))
GetValueByPointerWithDefault(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer, T2 defaultValue) {
return pointer.GetWithDefault(document, defaultValue);
}
template <typename DocumentType, typename CharType, size_t N>
typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const CharType(&source)[N], const typename DocumentType::ValueType& defaultValue) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).GetWithDefault(document, defaultValue);
}
template <typename DocumentType, typename CharType, size_t N>
typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const CharType(&source)[N], const typename DocumentType::Ch* defaultValue) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).GetWithDefault(document, defaultValue);
}
#if RAPIDJSON_HAS_STDSTRING
template <typename DocumentType, typename CharType, size_t N>
typename DocumentType::ValueType& GetValueByPointerWithDefault(DocumentType& document, const CharType(&source)[N], const std::basic_string<typename DocumentType::Ch>& defaultValue) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).GetWithDefault(document, defaultValue);
}
#endif
template <typename DocumentType, typename CharType, size_t N, typename T2>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T2>, internal::IsGenericValue<T2> >), (typename DocumentType::ValueType&))
GetValueByPointerWithDefault(DocumentType& document, const CharType(&source)[N], T2 defaultValue) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).GetWithDefault(document, defaultValue);
}
//////////////////////////////////////////////////////////////////////////////
template <typename T>
typename T::ValueType& SetValueByPointer(T& root, const GenericPointer<typename T::ValueType>& pointer, typename T::ValueType& value, typename T::AllocatorType& a) {
return pointer.Set(root, value, a);
}
template <typename T>
typename T::ValueType& SetValueByPointer(T& root, const GenericPointer<typename T::ValueType>& pointer, const typename T::ValueType& value, typename T::AllocatorType& a) {
return pointer.Set(root, value, a);
}
template <typename T>
typename T::ValueType& SetValueByPointer(T& root, const GenericPointer<typename T::ValueType>& pointer, const typename T::Ch* value, typename T::AllocatorType& a) {
return pointer.Set(root, value, a);
}
#if RAPIDJSON_HAS_STDSTRING
template <typename T>
typename T::ValueType& SetValueByPointer(T& root, const GenericPointer<typename T::ValueType>& pointer, const std::basic_string<typename T::Ch>& value, typename T::AllocatorType& a) {
return pointer.Set(root, value, a);
}
#endif
template <typename T, typename T2>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T2>, internal::IsGenericValue<T2> >), (typename T::ValueType&))
SetValueByPointer(T& root, const GenericPointer<typename T::ValueType>& pointer, T2 value, typename T::AllocatorType& a) {
return pointer.Set(root, value, a);
}
template <typename T, typename CharType, size_t N>
typename T::ValueType& SetValueByPointer(T& root, const CharType(&source)[N], typename T::ValueType& value, typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).Set(root, value, a);
}
template <typename T, typename CharType, size_t N>
typename T::ValueType& SetValueByPointer(T& root, const CharType(&source)[N], const typename T::ValueType& value, typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).Set(root, value, a);
}
template <typename T, typename CharType, size_t N>
typename T::ValueType& SetValueByPointer(T& root, const CharType(&source)[N], const typename T::Ch* value, typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).Set(root, value, a);
}
#if RAPIDJSON_HAS_STDSTRING
template <typename T, typename CharType, size_t N>
typename T::ValueType& SetValueByPointer(T& root, const CharType(&source)[N], const std::basic_string<typename T::Ch>& value, typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).Set(root, value, a);
}
#endif
template <typename T, typename CharType, size_t N, typename T2>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T2>, internal::IsGenericValue<T2> >), (typename T::ValueType&))
SetValueByPointer(T& root, const CharType(&source)[N], T2 value, typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).Set(root, value, a);
}
// No allocator parameter
template <typename DocumentType>
typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer, typename DocumentType::ValueType& value) {
return pointer.Set(document, value);
}
template <typename DocumentType>
typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer, const typename DocumentType::ValueType& value) {
return pointer.Set(document, value);
}
template <typename DocumentType>
typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer, const typename DocumentType::Ch* value) {
return pointer.Set(document, value);
}
#if RAPIDJSON_HAS_STDSTRING
template <typename DocumentType>
typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer, const std::basic_string<typename DocumentType::Ch>& value) {
return pointer.Set(document, value);
}
#endif
template <typename DocumentType, typename T2>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T2>, internal::IsGenericValue<T2> >), (typename DocumentType::ValueType&))
SetValueByPointer(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer, T2 value) {
return pointer.Set(document, value);
}
template <typename DocumentType, typename CharType, size_t N>
typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const CharType(&source)[N], typename DocumentType::ValueType& value) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).Set(document, value);
}
template <typename DocumentType, typename CharType, size_t N>
typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const CharType(&source)[N], const typename DocumentType::ValueType& value) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).Set(document, value);
}
template <typename DocumentType, typename CharType, size_t N>
typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const CharType(&source)[N], const typename DocumentType::Ch* value) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).Set(document, value);
}
#if RAPIDJSON_HAS_STDSTRING
template <typename DocumentType, typename CharType, size_t N>
typename DocumentType::ValueType& SetValueByPointer(DocumentType& document, const CharType(&source)[N], const std::basic_string<typename DocumentType::Ch>& value) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).Set(document, value);
}
#endif
template <typename DocumentType, typename CharType, size_t N, typename T2>
RAPIDJSON_DISABLEIF_RETURN((internal::OrExpr<internal::IsPointer<T2>, internal::IsGenericValue<T2> >), (typename DocumentType::ValueType&))
SetValueByPointer(DocumentType& document, const CharType(&source)[N], T2 value) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).Set(document, value);
}
//////////////////////////////////////////////////////////////////////////////
template <typename T>
typename T::ValueType& SwapValueByPointer(T& root, const GenericPointer<typename T::ValueType>& pointer, typename T::ValueType& value, typename T::AllocatorType& a) {
return pointer.Swap(root, value, a);
}
template <typename T, typename CharType, size_t N>
typename T::ValueType& SwapValueByPointer(T& root, const CharType(&source)[N], typename T::ValueType& value, typename T::AllocatorType& a) {
return GenericPointer<typename T::ValueType>(source, N - 1).Swap(root, value, a);
}
template <typename DocumentType>
typename DocumentType::ValueType& SwapValueByPointer(DocumentType& document, const GenericPointer<typename DocumentType::ValueType>& pointer, typename DocumentType::ValueType& value) {
return pointer.Swap(document, value);
}
template <typename DocumentType, typename CharType, size_t N>
typename DocumentType::ValueType& SwapValueByPointer(DocumentType& document, const CharType(&source)[N], typename DocumentType::ValueType& value) {
return GenericPointer<typename DocumentType::ValueType>(source, N - 1).Swap(document, value);
}
//////////////////////////////////////////////////////////////////////////////
template <typename T>
bool EraseValueByPointer(T& root, const GenericPointer<typename T::ValueType>& pointer) {
return pointer.Erase(root);
}
template <typename T, typename CharType, size_t N>
bool EraseValueByPointer(T& root, const CharType(&source)[N]) {
return GenericPointer<typename T::ValueType>(source, N - 1).Erase(root);
}
//@}
RAPIDJSON_NAMESPACE_END
#if defined(__clang__) || defined(_MSC_VER)
RAPIDJSON_DIAG_POP
#endif
#endif // RAPIDJSON_POINTER_H_
// Copyright (C) 2011 Milo Yip // Tencent is pleased to support the open source community by making RapidJSON available.
// //
// Permission is hereby granted, free of charge, to any person obtaining a copy // Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip.
// of this software and associated documentation files (the "Software"), to deal //
// in the Software without restriction, including without limitation the rights // Licensed under the MIT License (the "License"); you may not use this file except
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // in compliance with the License. You may obtain a copy of the License at
// copies of the Software, and to permit persons to whom the Software is //
// furnished to do so, subject to the following conditions: // http://opensource.org/licenses/MIT
// //
// The above copyright notice and this permission notice shall be included in // Unless required by applicable law or agreed to in writing, software distributed
// all copies or substantial portions of the Software. // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// // CONDITIONS OF ANY KIND, either express or implied. See the License for the
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // specific language governing permissions and limitations under the License.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #ifndef RAPIDJSON_PRETTYWRITER_H_
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #define RAPIDJSON_PRETTYWRITER_H_
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #include "writer.h"
// THE SOFTWARE.
#ifdef __GNUC__
#ifndef RAPIDJSON_PRETTYWRITER_H_ RAPIDJSON_DIAG_PUSH
#define RAPIDJSON_PRETTYWRITER_H_ RAPIDJSON_DIAG_OFF(effc++)
#endif
#include "writer.h"
#if defined(__clang__)
#ifdef __GNUC__ RAPIDJSON_DIAG_PUSH
RAPIDJSON_DIAG_PUSH RAPIDJSON_DIAG_OFF(c++98-compat)
RAPIDJSON_DIAG_OFF(effc++) #endif
#endif
RAPIDJSON_NAMESPACE_BEGIN
RAPIDJSON_NAMESPACE_BEGIN
//! Combination of PrettyWriter format flags.
//! Writer with indentation and spacing. /*! \see PrettyWriter::SetFormatOptions
/*! */
\tparam OutputStream Type of ouptut os. enum PrettyFormatOptions {
\tparam SourceEncoding Encoding of source string. kFormatDefault = 0, //!< Default pretty formatting.
\tparam TargetEncoding Encoding of output stream. kFormatSingleLineArray = 1 //!< Format arrays on a single line.
\tparam StackAllocator Type of allocator for allocating memory of stack. };
*/
template<typename OutputStream, typename SourceEncoding = UTF8<>, typename TargetEncoding = UTF8<>, typename StackAllocator = CrtAllocator> //! Writer with indentation and spacing.
class PrettyWriter : public Writer<OutputStream, SourceEncoding, TargetEncoding, StackAllocator> { /*!
public: \tparam OutputStream Type of output os.
typedef Writer<OutputStream, SourceEncoding, TargetEncoding, StackAllocator> Base; \tparam SourceEncoding Encoding of source string.
typedef typename Base::Ch Ch; \tparam TargetEncoding Encoding of output stream.
\tparam StackAllocator Type of allocator for allocating memory of stack.
//! Constructor */
/*! \param os Output stream. template<typename OutputStream, typename SourceEncoding = UTF8<>, typename TargetEncoding = UTF8<>, typename StackAllocator = CrtAllocator, unsigned writeFlags = kWriteDefaultFlags>
\param allocator User supplied allocator. If it is null, it will create a private one. class PrettyWriter : public Writer<OutputStream, SourceEncoding, TargetEncoding, StackAllocator, writeFlags> {
\param levelDepth Initial capacity of stack. public:
*/ typedef Writer<OutputStream, SourceEncoding, TargetEncoding, StackAllocator, writeFlags> Base;
PrettyWriter(OutputStream& os, StackAllocator* allocator = 0, size_t levelDepth = Base::kDefaultLevelDepth) : typedef typename Base::Ch Ch;
Base(os, allocator, levelDepth), indentChar_(' '), indentCharCount_(4) {}
//! Constructor
//! Set custom indentation. /*! \param os Output stream.
/*! \param indentChar Character for indentation. Must be whitespace character (' ', '\\t', '\\n', '\\r'). \param allocator User supplied allocator. If it is null, it will create a private one.
\param indentCharCount Number of indent characters for each indentation level. \param levelDepth Initial capacity of stack.
\note The default indentation is 4 spaces. */
*/ explicit PrettyWriter(OutputStream& os, StackAllocator* allocator = 0, size_t levelDepth = Base::kDefaultLevelDepth) :
PrettyWriter& SetIndent(Ch indentChar, unsigned indentCharCount) { Base(os, allocator, levelDepth), indentChar_(' '), indentCharCount_(4), formatOptions_(kFormatDefault) {}
RAPIDJSON_ASSERT(indentChar == ' ' || indentChar == '\t' || indentChar == '\n' || indentChar == '\r');
indentChar_ = indentChar;
indentCharCount_ = indentCharCount; explicit PrettyWriter(StackAllocator* allocator = 0, size_t levelDepth = Base::kDefaultLevelDepth) :
return *this; Base(allocator, levelDepth), indentChar_(' '), indentCharCount_(4), formatOptions_(kFormatDefault) {}
}
#if RAPIDJSON_HAS_CXX11_RVALUE_REFS
/*! @name Implementation of Handler PrettyWriter(PrettyWriter&& rhs) :
\see Handler Base(std::forward<PrettyWriter>(rhs)), indentChar_(rhs.indentChar_), indentCharCount_(rhs.indentCharCount_), formatOptions_(rhs.formatOptions_) {}
*/ #endif
//@{
//! Set custom indentation.
bool Null() { PrettyPrefix(kNullType); return Base::WriteNull(); } /*! \param indentChar Character for indentation. Must be whitespace character (' ', '\\t', '\\n', '\\r').
bool Bool(bool b) { PrettyPrefix(b ? kTrueType : kFalseType); return Base::WriteBool(b); } \param indentCharCount Number of indent characters for each indentation level.
bool Int(int i) { PrettyPrefix(kNumberType); return Base::WriteInt(i); } \note The default indentation is 4 spaces.
bool Uint(unsigned u) { PrettyPrefix(kNumberType); return Base::WriteUint(u); } */
bool Int64(int64_t i64) { PrettyPrefix(kNumberType); return Base::WriteInt64(i64); } PrettyWriter& SetIndent(Ch indentChar, unsigned indentCharCount) {
bool Uint64(uint64_t u64) { PrettyPrefix(kNumberType); return Base::WriteUint64(u64); } RAPIDJSON_ASSERT(indentChar == ' ' || indentChar == '\t' || indentChar == '\n' || indentChar == '\r');
bool Double(double d) { PrettyPrefix(kNumberType); return Base::WriteDouble(d); } indentChar_ = indentChar;
indentCharCount_ = indentCharCount;
bool String(const Ch* str, SizeType length, bool copy = false) { return *this;
(void)copy; }
PrettyPrefix(kStringType);
return Base::WriteString(str, length); //! Set pretty writer formatting options.
} /*! \param options Formatting options.
*/
bool StartObject() { PrettyWriter& SetFormatOptions(PrettyFormatOptions options) {
PrettyPrefix(kObjectType); formatOptions_ = options;
new (Base::level_stack_.template Push<typename Base::Level>()) typename Base::Level(false); return *this;
return Base::WriteStartObject(); }
}
/*! @name Implementation of Handler
bool Key(const Ch* str, SizeType length, bool copy = false) { return String(str, length, copy); } \see Handler
*/
bool EndObject(SizeType memberCount = 0) { //@{
(void)memberCount;
RAPIDJSON_ASSERT(Base::level_stack_.GetSize() >= sizeof(typename Base::Level)); bool Null() { PrettyPrefix(kNullType); return Base::EndValue(Base::WriteNull()); }
RAPIDJSON_ASSERT(!Base::level_stack_.template Top<typename Base::Level>()->inArray); bool Bool(bool b) { PrettyPrefix(b ? kTrueType : kFalseType); return Base::EndValue(Base::WriteBool(b)); }
bool empty = Base::level_stack_.template Pop<typename Base::Level>(1)->valueCount == 0; bool Int(int i) { PrettyPrefix(kNumberType); return Base::EndValue(Base::WriteInt(i)); }
bool Uint(unsigned u) { PrettyPrefix(kNumberType); return Base::EndValue(Base::WriteUint(u)); }
if (!empty) { bool Int64(int64_t i64) { PrettyPrefix(kNumberType); return Base::EndValue(Base::WriteInt64(i64)); }
Base::os_->Put('\n'); bool Uint64(uint64_t u64) { PrettyPrefix(kNumberType); return Base::EndValue(Base::WriteUint64(u64)); }
WriteIndent(); bool Double(double d) { PrettyPrefix(kNumberType); return Base::EndValue(Base::WriteDouble(d)); }
}
if (!Base::WriteEndObject()) bool RawNumber(const Ch* str, SizeType length, bool copy = false) {
return false; RAPIDJSON_ASSERT(str != 0);
if (Base::level_stack_.Empty()) // end of json text (void)copy;
Base::os_->Flush(); PrettyPrefix(kNumberType);
return true; return Base::EndValue(Base::WriteString(str, length));
} }
bool StartArray() { bool String(const Ch* str, SizeType length, bool copy = false) {
PrettyPrefix(kArrayType); RAPIDJSON_ASSERT(str != 0);
new (Base::level_stack_.template Push<typename Base::Level>()) typename Base::Level(true); (void)copy;
return Base::WriteStartArray(); PrettyPrefix(kStringType);
} return Base::EndValue(Base::WriteString(str, length));
}
bool EndArray(SizeType memberCount = 0) {
(void)memberCount; #if RAPIDJSON_HAS_STDSTRING
RAPIDJSON_ASSERT(Base::level_stack_.GetSize() >= sizeof(typename Base::Level)); bool String(const std::basic_string<Ch>& str) {
RAPIDJSON_ASSERT(Base::level_stack_.template Top<typename Base::Level>()->inArray); return String(str.data(), SizeType(str.size()));
bool empty = Base::level_stack_.template Pop<typename Base::Level>(1)->valueCount == 0; }
#endif
if (!empty) {
Base::os_->Put('\n'); bool StartObject() {
WriteIndent(); PrettyPrefix(kObjectType);
} new (Base::level_stack_.template Push<typename Base::Level>()) typename Base::Level(false);
if (!Base::WriteEndArray()) return Base::WriteStartObject();
return false; }
if (Base::level_stack_.Empty()) // end of json text
Base::os_->Flush(); bool Key(const Ch* str, SizeType length, bool copy = false) { return String(str, length, copy); }
return true;
} #if RAPIDJSON_HAS_STDSTRING
bool Key(const std::basic_string<Ch>& str) {
//@} return Key(str.data(), SizeType(str.size()));
}
/*! @name Convenience extensions */ #endif
//@{
bool EndObject(SizeType memberCount = 0) {
//! Simpler but slower overload. (void)memberCount;
bool String(const Ch* str) { return String(str, internal::StrLen(str)); } RAPIDJSON_ASSERT(Base::level_stack_.GetSize() >= sizeof(typename Base::Level)); // not inside an Object
bool Key(const Ch* str) { return Key(str, internal::StrLen(str)); } RAPIDJSON_ASSERT(!Base::level_stack_.template Top<typename Base::Level>()->inArray); // currently inside an Array, not Object
RAPIDJSON_ASSERT(0 == Base::level_stack_.template Top<typename Base::Level>()->valueCount % 2); // Object has a Key without a Value
//@}
protected: bool empty = Base::level_stack_.template Pop<typename Base::Level>(1)->valueCount == 0;
void PrettyPrefix(Type type) {
(void)type; if (!empty) {
if (Base::level_stack_.GetSize() != 0) { // this value is not at root Base::os_->Put('\n');
typename Base::Level* level = Base::level_stack_.template Top<typename Base::Level>(); WriteIndent();
}
if (level->inArray) { bool ret = Base::EndValue(Base::WriteEndObject());
if (level->valueCount > 0) { (void)ret;
Base::os_->Put(','); // add comma if it is not the first element in array RAPIDJSON_ASSERT(ret == true);
Base::os_->Put('\n'); if (Base::level_stack_.Empty()) // end of json text
} Base::Flush();
else return true;
Base::os_->Put('\n'); }
WriteIndent();
} bool StartArray() {
else { // in object PrettyPrefix(kArrayType);
if (level->valueCount > 0) { new (Base::level_stack_.template Push<typename Base::Level>()) typename Base::Level(true);
if (level->valueCount % 2 == 0) { return Base::WriteStartArray();
Base::os_->Put(','); }
Base::os_->Put('\n');
} bool EndArray(SizeType memberCount = 0) {
else { (void)memberCount;
Base::os_->Put(':'); RAPIDJSON_ASSERT(Base::level_stack_.GetSize() >= sizeof(typename Base::Level));
Base::os_->Put(' '); RAPIDJSON_ASSERT(Base::level_stack_.template Top<typename Base::Level>()->inArray);
} bool empty = Base::level_stack_.template Pop<typename Base::Level>(1)->valueCount == 0;
}
else if (!empty && !(formatOptions_ & kFormatSingleLineArray)) {
Base::os_->Put('\n'); Base::os_->Put('\n');
WriteIndent();
if (level->valueCount % 2 == 0) }
WriteIndent(); bool ret = Base::EndValue(Base::WriteEndArray());
} (void)ret;
if (!level->inArray && level->valueCount % 2 == 0) RAPIDJSON_ASSERT(ret == true);
RAPIDJSON_ASSERT(type == kStringType); // if it's in object, then even number should be a name if (Base::level_stack_.Empty()) // end of json text
level->valueCount++; Base::Flush();
} return true;
else { }
RAPIDJSON_ASSERT(!Base::hasRoot_); // Should only has one and only one root.
Base::hasRoot_ = true; //@}
}
} /*! @name Convenience extensions */
//@{
void WriteIndent() {
size_t count = (Base::level_stack_.GetSize() / sizeof(typename Base::Level)) * indentCharCount_; //! Simpler but slower overload.
PutN(*Base::os_, indentChar_, count); bool String(const Ch* str) { return String(str, internal::StrLen(str)); }
} bool Key(const Ch* str) { return Key(str, internal::StrLen(str)); }
Ch indentChar_; //@}
unsigned indentCharCount_;
//! Write a raw JSON value.
private: /*!
// Prohibit copy constructor & assignment operator. For user to write a stringified JSON as a value.
PrettyWriter(const PrettyWriter&);
PrettyWriter& operator=(const PrettyWriter&); \param json A well-formed JSON value. It should not contain null character within [0, length - 1] range.
}; \param length Length of the json.
\param type Type of the root of json.
RAPIDJSON_NAMESPACE_END \note When using PrettyWriter::RawValue(), the result json may not be indented correctly.
*/
#ifdef __GNUC__ bool RawValue(const Ch* json, size_t length, Type type) {
RAPIDJSON_DIAG_POP RAPIDJSON_ASSERT(json != 0);
#endif PrettyPrefix(type);
return Base::EndValue(Base::WriteRawValue(json, length));
#endif // RAPIDJSON_RAPIDJSON_H_ }
protected:
void PrettyPrefix(Type type) {
(void)type;
if (Base::level_stack_.GetSize() != 0) { // this value is not at root
typename Base::Level* level = Base::level_stack_.template Top<typename Base::Level>();
if (level->inArray) {
if (level->valueCount > 0) {
Base::os_->Put(','); // add comma if it is not the first element in array
if (formatOptions_ & kFormatSingleLineArray)
Base::os_->Put(' ');
}
if (!(formatOptions_ & kFormatSingleLineArray)) {
Base::os_->Put('\n');
WriteIndent();
}
}
else { // in object
if (level->valueCount > 0) {
if (level->valueCount % 2 == 0) {
Base::os_->Put(',');
Base::os_->Put('\n');
}
else {
Base::os_->Put(':');
Base::os_->Put(' ');
}
}
else
Base::os_->Put('\n');
if (level->valueCount % 2 == 0)
WriteIndent();
}
if (!level->inArray && level->valueCount % 2 == 0)
RAPIDJSON_ASSERT(type == kStringType); // if it's in object, then even number should be a name
level->valueCount++;
}
else {
RAPIDJSON_ASSERT(!Base::hasRoot_); // Should only has one and only one root.
Base::hasRoot_ = true;
}
}
void WriteIndent() {
size_t count = (Base::level_stack_.GetSize() / sizeof(typename Base::Level)) * indentCharCount_;
PutN(*Base::os_, static_cast<typename OutputStream::Ch>(indentChar_), count);
}
Ch indentChar_;
unsigned indentCharCount_;
PrettyFormatOptions formatOptions_;
private:
// Prohibit copy constructor & assignment operator.
PrettyWriter(const PrettyWriter&);
PrettyWriter& operator=(const PrettyWriter&);
};
RAPIDJSON_NAMESPACE_END
#if defined(__clang__)
RAPIDJSON_DIAG_POP
#endif
#ifdef __GNUC__
RAPIDJSON_DIAG_POP
#endif
#endif // RAPIDJSON_RAPIDJSON_H_
// Copyright (C) 2011 Milo Yip // Tencent is pleased to support the open source community by making RapidJSON available.
// //
// Permission is hereby granted, free of charge, to any person obtaining a copy // Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip.
// of this software and associated documentation files (the "Software"), to deal //
// in the Software without restriction, including without limitation the rights // Licensed under the MIT License (the "License"); you may not use this file except
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // in compliance with the License. You may obtain a copy of the License at
// copies of the Software, and to permit persons to whom the Software is //
// furnished to do so, subject to the following conditions: // http://opensource.org/licenses/MIT
// //
// The above copyright notice and this permission notice shall be included in // Unless required by applicable law or agreed to in writing, software distributed
// all copies or substantial portions of the Software. // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// // CONDITIONS OF ANY KIND, either express or implied. See the License for the
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // specific language governing permissions and limitations under the License.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #ifndef RAPIDJSON_RAPIDJSON_H_
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #define RAPIDJSON_RAPIDJSON_H_
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN /*!\file rapidjson.h
// THE SOFTWARE. \brief common definitions and configuration
#ifndef RAPIDJSON_RAPIDJSON_H_ \see RAPIDJSON_CONFIG
#define RAPIDJSON_RAPIDJSON_H_ */
// Copyright (c) 2011 Milo Yip (miloyip@gmail.com) /*! \defgroup RAPIDJSON_CONFIG RapidJSON configuration
// Version 0.1 \brief Configuration macros for library features
/*!\file rapidjson.h Some RapidJSON features are configurable to adapt the library to a wide
\brief common definitions and configuration variety of platforms, environments and usage scenarios. Most of the
features can be configured in terms of overridden or predefined
\see RAPIDJSON_CONFIG preprocessor macros at compile-time.
*/
Some additional customization is available in the \ref RAPIDJSON_ERRORS APIs.
/*! \defgroup RAPIDJSON_CONFIG RapidJSON configuration
\brief Configuration macros for library features \note These macros should be given on the compiler command-line
(where applicable) to avoid inconsistent values when compiling
Some RapidJSON features are configurable to adapt the library to a wide different translation units of a single application.
variety of platforms, environments and usage scenarios. Most of the */
features can be configured in terms of overriden or predefined
preprocessor macros at compile-time. #include <cstdlib> // malloc(), realloc(), free(), size_t
#include <cstring> // memset(), memcpy(), memmove(), memcmp()
Some additional customization is available in the \ref RAPIDJSON_ERRORS APIs.
///////////////////////////////////////////////////////////////////////////////
\note These macros should be given on the compiler command-line // RAPIDJSON_VERSION_STRING
(where applicable) to avoid inconsistent values when compiling //
different translation units of a single application. // ALWAYS synchronize the following 3 macros with corresponding variables in /CMakeLists.txt.
*/ //
#include <cstdlib> // malloc(), realloc(), free(), size_t //!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
#include <cstring> // memset(), memcpy(), memmove(), memcmp() // token stringification
#define RAPIDJSON_STRINGIFY(x) RAPIDJSON_DO_STRINGIFY(x)
/////////////////////////////////////////////////////////////////////////////// #define RAPIDJSON_DO_STRINGIFY(x) #x
// RAPIDJSON_NAMESPACE_(BEGIN|END)
/*! \def RAPIDJSON_NAMESPACE // token concatenation
\ingroup RAPIDJSON_CONFIG #define RAPIDJSON_JOIN(X, Y) RAPIDJSON_DO_JOIN(X, Y)
\brief provide custom rapidjson namespace #define RAPIDJSON_DO_JOIN(X, Y) RAPIDJSON_DO_JOIN2(X, Y)
#define RAPIDJSON_DO_JOIN2(X, Y) X##Y
In order to avoid symbol clashes and/or "One Definition Rule" errors //!@endcond
between multiple inclusions of (different versions of) RapidJSON in
a single binary, users can customize the name of the main RapidJSON /*! \def RAPIDJSON_MAJOR_VERSION
namespace. \ingroup RAPIDJSON_CONFIG
\brief Major version of RapidJSON in integer.
In case of a single nesting level, defining \c RAPIDJSON_NAMESPACE */
to a custom name (e.g. \c MyRapidJSON) is sufficient. If multiple /*! \def RAPIDJSON_MINOR_VERSION
levels are needed, both \ref RAPIDJSON_NAMESPACE_BEGIN and \ref \ingroup RAPIDJSON_CONFIG
RAPIDJSON_NAMESPACE_END need to be defined as well: \brief Minor version of RapidJSON in integer.
*/
\code /*! \def RAPIDJSON_PATCH_VERSION
// in some .cpp file \ingroup RAPIDJSON_CONFIG
#define RAPIDJSON_NAMESPACE my::rapidjson \brief Patch version of RapidJSON in integer.
#define RAPIDJSON_NAMESPACE_BEGIN namespace my { namespace rapidjson { */
#define RAPIDJSON_NAMESPACE_END } } /*! \def RAPIDJSON_VERSION_STRING
#include "rapidjson/..." \ingroup RAPIDJSON_CONFIG
\endcode \brief Version of RapidJSON in "<major>.<minor>.<patch>" string format.
*/
\see rapidjson #define RAPIDJSON_MAJOR_VERSION 1
*/ #define RAPIDJSON_MINOR_VERSION 1
/*! \def RAPIDJSON_NAMESPACE_BEGIN #define RAPIDJSON_PATCH_VERSION 0
\ingroup RAPIDJSON_CONFIG #define RAPIDJSON_VERSION_STRING \
\brief provide custom rapidjson namespace (opening expression) RAPIDJSON_STRINGIFY(RAPIDJSON_MAJOR_VERSION.RAPIDJSON_MINOR_VERSION.RAPIDJSON_PATCH_VERSION)
\see RAPIDJSON_NAMESPACE
*/ ///////////////////////////////////////////////////////////////////////////////
/*! \def RAPIDJSON_NAMESPACE_END // RAPIDJSON_NAMESPACE_(BEGIN|END)
\ingroup RAPIDJSON_CONFIG /*! \def RAPIDJSON_NAMESPACE
\brief provide custom rapidjson namespace (closing expression) \ingroup RAPIDJSON_CONFIG
\see RAPIDJSON_NAMESPACE \brief provide custom rapidjson namespace
*/
#ifndef RAPIDJSON_NAMESPACE In order to avoid symbol clashes and/or "One Definition Rule" errors
#define RAPIDJSON_NAMESPACE rapidjson between multiple inclusions of (different versions of) RapidJSON in
#endif a single binary, users can customize the name of the main RapidJSON
#ifndef RAPIDJSON_NAMESPACE_BEGIN namespace.
#define RAPIDJSON_NAMESPACE_BEGIN namespace RAPIDJSON_NAMESPACE {
#endif In case of a single nesting level, defining \c RAPIDJSON_NAMESPACE
#ifndef RAPIDJSON_NAMESPACE_END to a custom name (e.g. \c MyRapidJSON) is sufficient. If multiple
#define RAPIDJSON_NAMESPACE_END } levels are needed, both \ref RAPIDJSON_NAMESPACE_BEGIN and \ref
#endif RAPIDJSON_NAMESPACE_END need to be defined as well:
/////////////////////////////////////////////////////////////////////////////// \code
// RAPIDJSON_NO_INT64DEFINE // in some .cpp file
#define RAPIDJSON_NAMESPACE my::rapidjson
/*! \def RAPIDJSON_NO_INT64DEFINE #define RAPIDJSON_NAMESPACE_BEGIN namespace my { namespace rapidjson {
\ingroup RAPIDJSON_CONFIG #define RAPIDJSON_NAMESPACE_END } }
\brief Use external 64-bit integer types. #include "rapidjson/..."
\endcode
RapidJSON requires the 64-bit integer types \c int64_t and \c uint64_t types
to be available at global scope. \see rapidjson
*/
If users have their own definition, define RAPIDJSON_NO_INT64DEFINE to /*! \def RAPIDJSON_NAMESPACE_BEGIN
prevent RapidJSON from defining its own types. \ingroup RAPIDJSON_CONFIG
*/ \brief provide custom rapidjson namespace (opening expression)
#ifndef RAPIDJSON_NO_INT64DEFINE \see RAPIDJSON_NAMESPACE
//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN */
#ifdef _MSC_VER /*! \def RAPIDJSON_NAMESPACE_END
#include "msinttypes/stdint.h" \ingroup RAPIDJSON_CONFIG
#include "msinttypes/inttypes.h" \brief provide custom rapidjson namespace (closing expression)
#else \see RAPIDJSON_NAMESPACE
// Other compilers should have this. */
#include <stdint.h> #ifndef RAPIDJSON_NAMESPACE
#include <inttypes.h> #define RAPIDJSON_NAMESPACE rapidjson
#endif #endif
//!@endcond #ifndef RAPIDJSON_NAMESPACE_BEGIN
#ifdef RAPIDJSON_DOXYGEN_RUNNING #define RAPIDJSON_NAMESPACE_BEGIN namespace RAPIDJSON_NAMESPACE {
#define RAPIDJSON_NO_INT64DEFINE #endif
#endif #ifndef RAPIDJSON_NAMESPACE_END
#endif // RAPIDJSON_NO_INT64TYPEDEF #define RAPIDJSON_NAMESPACE_END }
#endif
///////////////////////////////////////////////////////////////////////////////
// RAPIDJSON_FORCEINLINE ///////////////////////////////////////////////////////////////////////////////
// __cplusplus macro
#ifndef RAPIDJSON_FORCEINLINE
//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN //!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
#ifdef _MSC_VER
#define RAPIDJSON_FORCEINLINE __forceinline #if defined(_MSC_VER)
#elif defined(__GNUC__) && __GNUC__ >= 4 #define RAPIDJSON_CPLUSPLUS _MSVC_LANG
#define RAPIDJSON_FORCEINLINE __attribute__((always_inline)) #else
#else #define RAPIDJSON_CPLUSPLUS __cplusplus
#define RAPIDJSON_FORCEINLINE #endif
#endif
//!@endcond //!@endcond
#endif // RAPIDJSON_FORCEINLINE
///////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////// // RAPIDJSON_HAS_STDSTRING
// RAPIDJSON_ENDIAN
#define RAPIDJSON_LITTLEENDIAN 0 //!< Little endian machine #ifndef RAPIDJSON_HAS_STDSTRING
#define RAPIDJSON_BIGENDIAN 1 //!< Big endian machine #ifdef RAPIDJSON_DOXYGEN_RUNNING
#define RAPIDJSON_HAS_STDSTRING 1 // force generation of documentation
//! Endianness of the machine. #else
/*! #define RAPIDJSON_HAS_STDSTRING 0 // no std::string support by default
\def RAPIDJSON_ENDIAN #endif
\ingroup RAPIDJSON_CONFIG /*! \def RAPIDJSON_HAS_STDSTRING
\ingroup RAPIDJSON_CONFIG
GCC 4.6 provided macro for detecting endianness of the target machine. But other \brief Enable RapidJSON support for \c std::string
compilers may not have this. User can define RAPIDJSON_ENDIAN to either
\ref RAPIDJSON_LITTLEENDIAN or \ref RAPIDJSON_BIGENDIAN. By defining this preprocessor symbol to \c 1, several convenience functions for using
\ref rapidjson::GenericValue with \c std::string are enabled, especially
Default detection implemented with reference to for construction and comparison.
\li https://gcc.gnu.org/onlinedocs/gcc-4.6.0/cpp/Common-Predefined-Macros.html
\li http://www.boost.org/doc/libs/1_42_0/boost/detail/endian.hpp \hideinitializer
*/ */
#ifndef RAPIDJSON_ENDIAN #endif // !defined(RAPIDJSON_HAS_STDSTRING)
// Detect with GCC 4.6's macro
# ifdef __BYTE_ORDER__ #if RAPIDJSON_HAS_STDSTRING
# if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ #include <string>
# define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN #endif // RAPIDJSON_HAS_STDSTRING
# elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
# define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN ///////////////////////////////////////////////////////////////////////////////
# else // RAPIDJSON_USE_MEMBERSMAP
# error Unknown machine endianess detected. User needs to define RAPIDJSON_ENDIAN.
# endif // __BYTE_ORDER__ /*! \def RAPIDJSON_USE_MEMBERSMAP
// Detect with GLIBC's endian.h \ingroup RAPIDJSON_CONFIG
# elif defined(__GLIBC__) \brief Enable RapidJSON support for object members handling in a \c std::multimap
# include <endian.h>
# if (__BYTE_ORDER == __LITTLE_ENDIAN) By defining this preprocessor symbol to \c 1, \ref rapidjson::GenericValue object
# define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN members are stored in a \c std::multimap for faster lookup and deletion times, a
# elif (__BYTE_ORDER == __BIG_ENDIAN) trade off with a slightly slower insertion time and a small object allocat(or)ed
# define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN memory overhead.
# else
# error Unknown machine endianess detected. User needs to define RAPIDJSON_ENDIAN. \hideinitializer
# endif // __GLIBC__ */
// Detect with _LITTLE_ENDIAN and _BIG_ENDIAN macro #ifndef RAPIDJSON_USE_MEMBERSMAP
# elif defined(_LITTLE_ENDIAN) && !defined(_BIG_ENDIAN) #define RAPIDJSON_USE_MEMBERSMAP 0 // not by default
# define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN #endif
# elif defined(_BIG_ENDIAN) && !defined(_LITTLE_ENDIAN)
# define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN ///////////////////////////////////////////////////////////////////////////////
// Detect with architecture macros // RAPIDJSON_NO_INT64DEFINE
# elif defined(__sparc) || defined(__sparc__) || defined(_POWER) || defined(__powerpc__) || defined(__ppc__) || defined(__hpux) || defined(__hppa) || defined(_MIPSEB) || defined(_POWER) || defined(__s390__)
# define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN /*! \def RAPIDJSON_NO_INT64DEFINE
# elif defined(__i386__) || defined(__alpha__) || defined(__ia64) || defined(__ia64__) || defined(_M_IX86) || defined(_M_IA64) || defined(_M_ALPHA) || defined(__amd64) || defined(__amd64__) || defined(_M_AMD64) || defined(__x86_64) || defined(__x86_64__) || defined(_M_X64) || defined(__bfin__) \ingroup RAPIDJSON_CONFIG
# define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN \brief Use external 64-bit integer types.
# elif defined(RAPIDJSON_DOXYGEN_RUNNING)
# define RAPIDJSON_ENDIAN RapidJSON requires the 64-bit integer types \c int64_t and \c uint64_t types
# else to be available at global scope.
# error Unknown machine endianess detected. User needs to define RAPIDJSON_ENDIAN.
# endif If users have their own definition, define RAPIDJSON_NO_INT64DEFINE to
#endif // RAPIDJSON_ENDIAN prevent RapidJSON from defining its own types.
*/
/////////////////////////////////////////////////////////////////////////////// #ifndef RAPIDJSON_NO_INT64DEFINE
// RAPIDJSON_64BIT //!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
#if defined(_MSC_VER) && (_MSC_VER < 1800) // Visual Studio 2013
//! Whether using 64-bit architecture #include "msinttypes/stdint.h"
#ifndef RAPIDJSON_64BIT #include "msinttypes/inttypes.h"
#if defined(__LP64__) || defined(_WIN64) #else
#define RAPIDJSON_64BIT 1 // Other compilers should have this.
#else #include <stdint.h>
#define RAPIDJSON_64BIT 0 #include <inttypes.h>
#endif #endif
#endif // RAPIDJSON_64BIT //!@endcond
#ifdef RAPIDJSON_DOXYGEN_RUNNING
/////////////////////////////////////////////////////////////////////////////// #define RAPIDJSON_NO_INT64DEFINE
// RAPIDJSON_ALIGN #endif
#endif // RAPIDJSON_NO_INT64TYPEDEF
//! Data alignment of the machine.
/*! \ingroup RAPIDJSON_CONFIG ///////////////////////////////////////////////////////////////////////////////
\param x pointer to align // RAPIDJSON_FORCEINLINE
Some machines require strict data alignment. Currently the default uses 4 bytes #ifndef RAPIDJSON_FORCEINLINE
alignment. User can customize by defining the RAPIDJSON_ALIGN function macro., //!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
*/ #if defined(_MSC_VER) && defined(NDEBUG)
#ifndef RAPIDJSON_ALIGN #define RAPIDJSON_FORCEINLINE __forceinline
#define RAPIDJSON_ALIGN(x) ((x + 3u) & ~3u) #elif defined(__GNUC__) && __GNUC__ >= 4 && defined(NDEBUG)
#endif #define RAPIDJSON_FORCEINLINE __attribute__((always_inline))
#else
/////////////////////////////////////////////////////////////////////////////// #define RAPIDJSON_FORCEINLINE
// RAPIDJSON_UINT64_C2 #endif
//!@endcond
//! Construct a 64-bit literal by a pair of 32-bit integer. #endif // RAPIDJSON_FORCEINLINE
/*!
64-bit literal with or without ULL suffix is prone to compiler warnings. ///////////////////////////////////////////////////////////////////////////////
UINT64_C() is C macro which cause compilation problems. // RAPIDJSON_ENDIAN
Use this macro to define 64-bit constants by a pair of 32-bit integer. #define RAPIDJSON_LITTLEENDIAN 0 //!< Little endian machine
*/ #define RAPIDJSON_BIGENDIAN 1 //!< Big endian machine
#ifndef RAPIDJSON_UINT64_C2
#define RAPIDJSON_UINT64_C2(high32, low32) ((static_cast<uint64_t>(high32) << 32) | static_cast<uint64_t>(low32)) //! Endianness of the machine.
#endif /*!
\def RAPIDJSON_ENDIAN
/////////////////////////////////////////////////////////////////////////////// \ingroup RAPIDJSON_CONFIG
// RAPIDJSON_SSE2/RAPIDJSON_SSE42/RAPIDJSON_SIMD
GCC 4.6 provided macro for detecting endianness of the target machine. But other
/*! \def RAPIDJSON_SIMD compilers may not have this. User can define RAPIDJSON_ENDIAN to either
\ingroup RAPIDJSON_CONFIG \ref RAPIDJSON_LITTLEENDIAN or \ref RAPIDJSON_BIGENDIAN.
\brief Enable SSE2/SSE4.2 optimization.
Default detection implemented with reference to
RapidJSON supports optimized implementations for some parsing operations \li https://gcc.gnu.org/onlinedocs/gcc-4.6.0/cpp/Common-Predefined-Macros.html
based on the SSE2 or SSE4.2 SIMD extensions on modern Intel-compatible \li http://www.boost.org/doc/libs/1_42_0/boost/detail/endian.hpp
processors. */
#ifndef RAPIDJSON_ENDIAN
To enable these optimizations, two different symbols can be defined; // Detect with GCC 4.6's macro
\code # ifdef __BYTE_ORDER__
// Enable SSE2 optimization. # if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
#define RAPIDJSON_SSE2 # define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN
# elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
// Enable SSE4.2 optimization. # define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN
#define RAPIDJSON_SSE42 # else
\endcode # error Unknown machine endianness detected. User needs to define RAPIDJSON_ENDIAN.
# endif // __BYTE_ORDER__
\c RAPIDJSON_SSE42 takes precedence, if both are defined. // Detect with GLIBC's endian.h
# elif defined(__GLIBC__)
If any of these symbols is defined, RapidJSON defines the macro # include <endian.h>
\c RAPIDJSON_SIMD to indicate the availability of the optimized code. # if (__BYTE_ORDER == __LITTLE_ENDIAN)
*/ # define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN
#if defined(RAPIDJSON_SSE2) || defined(RAPIDJSON_SSE42) \ # elif (__BYTE_ORDER == __BIG_ENDIAN)
|| defined(RAPIDJSON_DOXYGEN_RUNNING) # define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN
#define RAPIDJSON_SIMD # else
#endif # error Unknown machine endianness detected. User needs to define RAPIDJSON_ENDIAN.
# endif // __GLIBC__
/////////////////////////////////////////////////////////////////////////////// // Detect with _LITTLE_ENDIAN and _BIG_ENDIAN macro
// RAPIDJSON_NO_SIZETYPEDEFINE # elif defined(_LITTLE_ENDIAN) && !defined(_BIG_ENDIAN)
# define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN
#ifndef RAPIDJSON_NO_SIZETYPEDEFINE # elif defined(_BIG_ENDIAN) && !defined(_LITTLE_ENDIAN)
/*! \def RAPIDJSON_NO_SIZETYPEDEFINE # define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN
\ingroup RAPIDJSON_CONFIG // Detect with architecture macros
\brief User-provided \c SizeType definition. # elif defined(__sparc) || defined(__sparc__) || defined(_POWER) || defined(__powerpc__) || defined(__ppc__) || defined(__hpux) || defined(__hppa) || defined(_MIPSEB) || defined(_POWER) || defined(__s390__)
# define RAPIDJSON_ENDIAN RAPIDJSON_BIGENDIAN
In order to avoid using 32-bit size types for indexing strings and arrays, # elif defined(__i386__) || defined(__alpha__) || defined(__ia64) || defined(__ia64__) || defined(_M_IX86) || defined(_M_IA64) || defined(_M_ALPHA) || defined(__amd64) || defined(__amd64__) || defined(_M_AMD64) || defined(__x86_64) || defined(__x86_64__) || defined(_M_X64) || defined(__bfin__)
define this preprocessor symbol and provide the type rapidjson::SizeType # define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN
before including RapidJSON: # elif defined(_MSC_VER) && (defined(_M_ARM) || defined(_M_ARM64))
\code # define RAPIDJSON_ENDIAN RAPIDJSON_LITTLEENDIAN
#define RAPIDJSON_NO_SIZETYPEDEFINE # elif defined(RAPIDJSON_DOXYGEN_RUNNING)
namespace rapidjson { typedef ::std::size_t SizeType; } # define RAPIDJSON_ENDIAN
#include "rapidjson/..." # else
\endcode # error Unknown machine endianness detected. User needs to define RAPIDJSON_ENDIAN.
# endif
\see rapidjson::SizeType #endif // RAPIDJSON_ENDIAN
*/
#ifdef RAPIDJSON_DOXYGEN_RUNNING ///////////////////////////////////////////////////////////////////////////////
#define RAPIDJSON_NO_SIZETYPEDEFINE // RAPIDJSON_64BIT
#endif
RAPIDJSON_NAMESPACE_BEGIN //! Whether using 64-bit architecture
//! Size type (for string lengths, array sizes, etc.) #ifndef RAPIDJSON_64BIT
/*! RapidJSON uses 32-bit array/string indices even on 64-bit platforms, #if defined(__LP64__) || (defined(__x86_64__) && defined(__ILP32__)) || defined(_WIN64) || defined(__EMSCRIPTEN__)
instead of using \c size_t. Users may override the SizeType by defining #define RAPIDJSON_64BIT 1
\ref RAPIDJSON_NO_SIZETYPEDEFINE. #else
*/ #define RAPIDJSON_64BIT 0
typedef unsigned SizeType; #endif
RAPIDJSON_NAMESPACE_END #endif // RAPIDJSON_64BIT
#endif
///////////////////////////////////////////////////////////////////////////////
// always import std::size_t to rapidjson namespace // RAPIDJSON_ALIGN
RAPIDJSON_NAMESPACE_BEGIN
using std::size_t; //! Data alignment of the machine.
RAPIDJSON_NAMESPACE_END /*! \ingroup RAPIDJSON_CONFIG
\param x pointer to align
///////////////////////////////////////////////////////////////////////////////
// RAPIDJSON_ASSERT Some machines require strict data alignment. The default is 8 bytes.
User can customize by defining the RAPIDJSON_ALIGN function macro.
//! Assertion. */
/*! \ingroup RAPIDJSON_CONFIG #ifndef RAPIDJSON_ALIGN
By default, rapidjson uses C \c assert() for internal assertions. #define RAPIDJSON_ALIGN(x) (((x) + static_cast<size_t>(7u)) & ~static_cast<size_t>(7u))
User can override it by defining RAPIDJSON_ASSERT(x) macro. #endif
\note Parsing errors are handled and can be customized by the ///////////////////////////////////////////////////////////////////////////////
\ref RAPIDJSON_ERRORS APIs. // RAPIDJSON_UINT64_C2
*/
#ifndef RAPIDJSON_ASSERT //! Construct a 64-bit literal by a pair of 32-bit integer.
#include <cassert> /*!
#define RAPIDJSON_ASSERT(x) assert(x) 64-bit literal with or without ULL suffix is prone to compiler warnings.
#endif // RAPIDJSON_ASSERT UINT64_C() is C macro which cause compilation problems.
Use this macro to define 64-bit constants by a pair of 32-bit integer.
/////////////////////////////////////////////////////////////////////////////// */
// RAPIDJSON_STATIC_ASSERT #ifndef RAPIDJSON_UINT64_C2
#define RAPIDJSON_UINT64_C2(high32, low32) ((static_cast<uint64_t>(high32) << 32) | static_cast<uint64_t>(low32))
// Adopt from boost #endif
#ifndef RAPIDJSON_STATIC_ASSERT
//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN ///////////////////////////////////////////////////////////////////////////////
RAPIDJSON_NAMESPACE_BEGIN // RAPIDJSON_48BITPOINTER_OPTIMIZATION
template <bool x> struct STATIC_ASSERTION_FAILURE;
template <> struct STATIC_ASSERTION_FAILURE<true> { enum { value = 1 }; }; //! Use only lower 48-bit address for some pointers.
template<int x> struct StaticAssertTest {}; /*!
RAPIDJSON_NAMESPACE_END \ingroup RAPIDJSON_CONFIG
#define RAPIDJSON_JOIN(X, Y) RAPIDJSON_DO_JOIN(X, Y) This optimization uses the fact that current X86-64 architecture only implement lower 48-bit virtual address.
#define RAPIDJSON_DO_JOIN(X, Y) RAPIDJSON_DO_JOIN2(X, Y) The higher 16-bit can be used for storing other data.
#define RAPIDJSON_DO_JOIN2(X, Y) X##Y \c GenericValue uses this optimization to reduce its size form 24 bytes to 16 bytes in 64-bit architecture.
*/
#if defined(__GNUC__) #ifndef RAPIDJSON_48BITPOINTER_OPTIMIZATION
#define RAPIDJSON_STATIC_ASSERT_UNUSED_ATTRIBUTE __attribute__((unused)) #if defined(__amd64__) || defined(__amd64) || defined(__x86_64__) || defined(__x86_64) || defined(_M_X64) || defined(_M_AMD64)
#else #define RAPIDJSON_48BITPOINTER_OPTIMIZATION 1
#define RAPIDJSON_STATIC_ASSERT_UNUSED_ATTRIBUTE #else
#endif #define RAPIDJSON_48BITPOINTER_OPTIMIZATION 0
//!@endcond #endif
#endif // RAPIDJSON_48BITPOINTER_OPTIMIZATION
/*! \def RAPIDJSON_STATIC_ASSERT
\brief (Internal) macro to check for conditions at compile-time #if RAPIDJSON_48BITPOINTER_OPTIMIZATION == 1
\param x compile-time condition #if RAPIDJSON_64BIT != 1
\hideinitializer #error RAPIDJSON_48BITPOINTER_OPTIMIZATION can only be set to 1 when RAPIDJSON_64BIT=1
*/ #endif
#define RAPIDJSON_STATIC_ASSERT(x) \ #define RAPIDJSON_SETPOINTER(type, p, x) (p = reinterpret_cast<type *>((reinterpret_cast<uintptr_t>(p) & static_cast<uintptr_t>(RAPIDJSON_UINT64_C2(0xFFFF0000, 0x00000000))) | reinterpret_cast<uintptr_t>(reinterpret_cast<const void*>(x))))
typedef ::RAPIDJSON_NAMESPACE::StaticAssertTest< \ #define RAPIDJSON_GETPOINTER(type, p) (reinterpret_cast<type *>(reinterpret_cast<uintptr_t>(p) & static_cast<uintptr_t>(RAPIDJSON_UINT64_C2(0x0000FFFF, 0xFFFFFFFF))))
sizeof(::RAPIDJSON_NAMESPACE::STATIC_ASSERTION_FAILURE<bool(x) >)> \ #else
RAPIDJSON_JOIN(StaticAssertTypedef, __LINE__) RAPIDJSON_STATIC_ASSERT_UNUSED_ATTRIBUTE #define RAPIDJSON_SETPOINTER(type, p, x) (p = (x))
#endif #define RAPIDJSON_GETPOINTER(type, p) (p)
#endif
///////////////////////////////////////////////////////////////////////////////
// Helpers ///////////////////////////////////////////////////////////////////////////////
// RAPIDJSON_SSE2/RAPIDJSON_SSE42/RAPIDJSON_NEON/RAPIDJSON_SIMD
//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
/*! \def RAPIDJSON_SIMD
#define RAPIDJSON_MULTILINEMACRO_BEGIN do { \ingroup RAPIDJSON_CONFIG
#define RAPIDJSON_MULTILINEMACRO_END \ \brief Enable SSE2/SSE4.2/Neon optimization.
} while((void)0, 0)
RapidJSON supports optimized implementations for some parsing operations
// adopted from Boost based on the SSE2, SSE4.2 or NEon SIMD extensions on modern Intel
#define RAPIDJSON_VERSION_CODE(x,y,z) \ or ARM compatible processors.
(((x)*100000) + ((y)*100) + (z))
To enable these optimizations, three different symbols can be defined;
// token stringification \code
#define RAPIDJSON_STRINGIFY(x) RAPIDJSON_DO_STRINGIFY(x) // Enable SSE2 optimization.
#define RAPIDJSON_DO_STRINGIFY(x) #x #define RAPIDJSON_SSE2
/////////////////////////////////////////////////////////////////////////////// // Enable SSE4.2 optimization.
// RAPIDJSON_DIAG_PUSH/POP, RAPIDJSON_DIAG_OFF #define RAPIDJSON_SSE42
\endcode
#if defined(__GNUC__)
#define RAPIDJSON_GNUC \ // Enable ARM Neon optimization.
RAPIDJSON_VERSION_CODE(__GNUC__,__GNUC_MINOR__,__GNUC_PATCHLEVEL__) #define RAPIDJSON_NEON
#endif \endcode
#if defined(__clang__) || (defined(RAPIDJSON_GNUC) && RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,2,0)) \c RAPIDJSON_SSE42 takes precedence over SSE2, if both are defined.
#define RAPIDJSON_PRAGMA(x) _Pragma(RAPIDJSON_STRINGIFY(x)) If any of these symbols is defined, RapidJSON defines the macro
#define RAPIDJSON_DIAG_PRAGMA(x) RAPIDJSON_PRAGMA(GCC diagnostic x) \c RAPIDJSON_SIMD to indicate the availability of the optimized code.
#define RAPIDJSON_DIAG_OFF(x) \ */
RAPIDJSON_DIAG_PRAGMA(ignored RAPIDJSON_STRINGIFY(RAPIDJSON_JOIN(-W,x))) #if defined(RAPIDJSON_SSE2) || defined(RAPIDJSON_SSE42) \
|| defined(RAPIDJSON_NEON) || defined(RAPIDJSON_DOXYGEN_RUNNING)
// push/pop support in Clang and GCC>=4.6 #define RAPIDJSON_SIMD
#if defined(__clang__) || (defined(RAPIDJSON_GNUC) && RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,6,0)) #endif
#define RAPIDJSON_DIAG_PUSH RAPIDJSON_DIAG_PRAGMA(push)
#define RAPIDJSON_DIAG_POP RAPIDJSON_DIAG_PRAGMA(pop) ///////////////////////////////////////////////////////////////////////////////
#else // GCC >= 4.2, < 4.6 // RAPIDJSON_NO_SIZETYPEDEFINE
#define RAPIDJSON_DIAG_PUSH /* ignored */
#define RAPIDJSON_DIAG_POP /* ignored */ #ifndef RAPIDJSON_NO_SIZETYPEDEFINE
#endif /*! \def RAPIDJSON_NO_SIZETYPEDEFINE
\ingroup RAPIDJSON_CONFIG
#elif defined(_MSC_VER) \brief User-provided \c SizeType definition.
// pragma (MSVC specific) In order to avoid using 32-bit size types for indexing strings and arrays,
#define RAPIDJSON_PRAGMA(x) __pragma(x) define this preprocessor symbol and provide the type rapidjson::SizeType
#define RAPIDJSON_DIAG_PRAGMA(x) RAPIDJSON_PRAGMA(warning(x)) before including RapidJSON:
\code
#define RAPIDJSON_DIAG_OFF(x) RAPIDJSON_DIAG_PRAGMA(disable: x) #define RAPIDJSON_NO_SIZETYPEDEFINE
#define RAPIDJSON_DIAG_PUSH RAPIDJSON_DIAG_PRAGMA(push) namespace rapidjson { typedef ::std::size_t SizeType; }
#define RAPIDJSON_DIAG_POP RAPIDJSON_DIAG_PRAGMA(pop) #include "rapidjson/..."
\endcode
#else
\see rapidjson::SizeType
#define RAPIDJSON_DIAG_OFF(x) /* ignored */ */
#define RAPIDJSON_DIAG_PUSH /* ignored */ #ifdef RAPIDJSON_DOXYGEN_RUNNING
#define RAPIDJSON_DIAG_POP /* ignored */ #define RAPIDJSON_NO_SIZETYPEDEFINE
#endif
#endif // RAPIDJSON_DIAG_* RAPIDJSON_NAMESPACE_BEGIN
//! Size type (for string lengths, array sizes, etc.)
/////////////////////////////////////////////////////////////////////////////// /*! RapidJSON uses 32-bit array/string indices even on 64-bit platforms,
// C++11 features instead of using \c size_t. Users may override the SizeType by defining
\ref RAPIDJSON_NO_SIZETYPEDEFINE.
#ifndef RAPIDJSON_HAS_CXX11_RVALUE_REFS */
#if defined(__clang__) typedef unsigned SizeType;
#define RAPIDJSON_HAS_CXX11_RVALUE_REFS __has_feature(cxx_rvalue_references) RAPIDJSON_NAMESPACE_END
#elif (defined(RAPIDJSON_GNUC) && (RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,3,0)) && defined(__GXX_EXPERIMENTAL_CXX0X__)) || \ #endif
(defined(_MSC_VER) && _MSC_VER >= 1600)
// always import std::size_t to rapidjson namespace
#define RAPIDJSON_HAS_CXX11_RVALUE_REFS 1 RAPIDJSON_NAMESPACE_BEGIN
#else using std::size_t;
#define RAPIDJSON_HAS_CXX11_RVALUE_REFS 0 RAPIDJSON_NAMESPACE_END
#endif
#endif // RAPIDJSON_HAS_CXX11_RVALUE_REFS ///////////////////////////////////////////////////////////////////////////////
// RAPIDJSON_ASSERT
#ifndef RAPIDJSON_HAS_CXX11_NOEXCEPT
#if defined(__clang__) //! Assertion.
#define RAPIDJSON_HAS_CXX11_NOEXCEPT __has_feature(cxx_noexcept) /*! \ingroup RAPIDJSON_CONFIG
#elif (defined(RAPIDJSON_GNUC) && (RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,6,0)) && defined(__GXX_EXPERIMENTAL_CXX0X__)) By default, rapidjson uses C \c assert() for internal assertions.
// (defined(_MSC_VER) && _MSC_VER >= ????) // not yet supported User can override it by defining RAPIDJSON_ASSERT(x) macro.
#define RAPIDJSON_HAS_CXX11_NOEXCEPT 1
#else \note Parsing errors are handled and can be customized by the
#define RAPIDJSON_HAS_CXX11_NOEXCEPT 0 \ref RAPIDJSON_ERRORS APIs.
#endif */
#endif #ifndef RAPIDJSON_ASSERT
#if RAPIDJSON_HAS_CXX11_NOEXCEPT #include <cassert>
#define RAPIDJSON_NOEXCEPT noexcept #define RAPIDJSON_ASSERT(x) assert(x)
#else #endif // RAPIDJSON_ASSERT
#define RAPIDJSON_NOEXCEPT /* noexcept */
#endif // RAPIDJSON_HAS_CXX11_NOEXCEPT ///////////////////////////////////////////////////////////////////////////////
// RAPIDJSON_STATIC_ASSERT
// no automatic detection, yet
#ifndef RAPIDJSON_HAS_CXX11_TYPETRAITS // Prefer C++11 static_assert, if available
#define RAPIDJSON_HAS_CXX11_TYPETRAITS 0 #ifndef RAPIDJSON_STATIC_ASSERT
#endif #if RAPIDJSON_CPLUSPLUS >= 201103L || ( defined(_MSC_VER) && _MSC_VER >= 1800 )
#define RAPIDJSON_STATIC_ASSERT(x) \
//!@endcond static_assert(x, RAPIDJSON_STRINGIFY(x))
#endif // C++11
/////////////////////////////////////////////////////////////////////////////// #endif // RAPIDJSON_STATIC_ASSERT
// new/delete
// Adopt C++03 implementation from boost
#ifndef RAPIDJSON_NEW #ifndef RAPIDJSON_STATIC_ASSERT
///! customization point for global \c new #ifndef __clang__
#define RAPIDJSON_NEW(x) new x //!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
#endif #endif
#ifndef RAPIDJSON_DELETE RAPIDJSON_NAMESPACE_BEGIN
///! customization point for global \c delete template <bool x> struct STATIC_ASSERTION_FAILURE;
#define RAPIDJSON_DELETE(x) delete x template <> struct STATIC_ASSERTION_FAILURE<true> { enum { value = 1 }; };
#endif template <size_t x> struct StaticAssertTest {};
RAPIDJSON_NAMESPACE_END
///////////////////////////////////////////////////////////////////////////////
// Allocators and Encodings #if defined(__GNUC__) || defined(__clang__)
#define RAPIDJSON_STATIC_ASSERT_UNUSED_ATTRIBUTE __attribute__((unused))
#include "allocators.h" #else
#include "encodings.h" #define RAPIDJSON_STATIC_ASSERT_UNUSED_ATTRIBUTE
#endif
/*! \namespace rapidjson #ifndef __clang__
\brief main RapidJSON namespace //!@endcond
\see RAPIDJSON_NAMESPACE #endif
*/
RAPIDJSON_NAMESPACE_BEGIN /*! \def RAPIDJSON_STATIC_ASSERT
\brief (Internal) macro to check for conditions at compile-time
/////////////////////////////////////////////////////////////////////////////// \param x compile-time condition
// Stream \hideinitializer
*/
/*! \class rapidjson::Stream #define RAPIDJSON_STATIC_ASSERT(x) \
\brief Concept for reading and writing characters. typedef ::RAPIDJSON_NAMESPACE::StaticAssertTest< \
sizeof(::RAPIDJSON_NAMESPACE::STATIC_ASSERTION_FAILURE<bool(x) >)> \
For read-only stream, no need to implement PutBegin(), Put(), Flush() and PutEnd(). RAPIDJSON_JOIN(StaticAssertTypedef, __LINE__) RAPIDJSON_STATIC_ASSERT_UNUSED_ATTRIBUTE
#endif // RAPIDJSON_STATIC_ASSERT
For write-only stream, only need to implement Put() and Flush().
///////////////////////////////////////////////////////////////////////////////
\code // RAPIDJSON_LIKELY, RAPIDJSON_UNLIKELY
concept Stream {
typename Ch; //!< Character type of the stream. //! Compiler branching hint for expression with high probability to be true.
/*!
//! Read the current character from stream without moving the read cursor. \ingroup RAPIDJSON_CONFIG
Ch Peek() const; \param x Boolean expression likely to be true.
*/
//! Read the current character from stream and moving the read cursor to next character. #ifndef RAPIDJSON_LIKELY
Ch Take(); #if defined(__GNUC__) || defined(__clang__)
#define RAPIDJSON_LIKELY(x) __builtin_expect(!!(x), 1)
//! Get the current read cursor. #else
//! \return Number of characters read from start. #define RAPIDJSON_LIKELY(x) (x)
size_t Tell(); #endif
#endif
//! Begin writing operation at the current read pointer.
//! \return The begin writer pointer. //! Compiler branching hint for expression with low probability to be true.
Ch* PutBegin(); /*!
\ingroup RAPIDJSON_CONFIG
//! Write a character. \param x Boolean expression unlikely to be true.
void Put(Ch c); */
#ifndef RAPIDJSON_UNLIKELY
//! Flush the buffer. #if defined(__GNUC__) || defined(__clang__)
void Flush(); #define RAPIDJSON_UNLIKELY(x) __builtin_expect(!!(x), 0)
#else
//! End the writing operation. #define RAPIDJSON_UNLIKELY(x) (x)
//! \param begin The begin write pointer returned by PutBegin(). #endif
//! \return Number of characters written. #endif
size_t PutEnd(Ch* begin);
} ///////////////////////////////////////////////////////////////////////////////
\endcode // Helpers
*/
//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
//! Provides additional information for stream.
/*! #define RAPIDJSON_MULTILINEMACRO_BEGIN do {
By using traits pattern, this type provides a default configuration for stream. #define RAPIDJSON_MULTILINEMACRO_END \
For custom stream, this type can be specialized for other configuration. } while((void)0, 0)
See TEST(Reader, CustomStringStream) in readertest.cpp for example.
*/ // adopted from Boost
template<typename Stream> #define RAPIDJSON_VERSION_CODE(x,y,z) \
struct StreamTraits { (((x)*100000) + ((y)*100) + (z))
//! Whether to make local copy of stream for optimization during parsing.
/*! #if defined(__has_builtin)
By default, for safety, streams do not use local copy optimization. #define RAPIDJSON_HAS_BUILTIN(x) __has_builtin(x)
Stream that can be copied fast should specialize this, like StreamTraits<StringStream>. #else
*/ #define RAPIDJSON_HAS_BUILTIN(x) 0
enum { copyOptimization = 0 }; #endif
};
///////////////////////////////////////////////////////////////////////////////
//! Put N copies of a character to a stream. // RAPIDJSON_DIAG_PUSH/POP, RAPIDJSON_DIAG_OFF
template<typename Stream, typename Ch>
inline void PutN(Stream& stream, Ch c, size_t n) { #if defined(__GNUC__)
for (size_t i = 0; i < n; i++) #define RAPIDJSON_GNUC \
stream.Put(c); RAPIDJSON_VERSION_CODE(__GNUC__,__GNUC_MINOR__,__GNUC_PATCHLEVEL__)
} #endif
/////////////////////////////////////////////////////////////////////////////// #if defined(__clang__) || (defined(RAPIDJSON_GNUC) && RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,2,0))
// StringStream
#define RAPIDJSON_PRAGMA(x) _Pragma(RAPIDJSON_STRINGIFY(x))
//! Read-only string stream. #define RAPIDJSON_DIAG_PRAGMA(x) RAPIDJSON_PRAGMA(GCC diagnostic x)
/*! \note implements Stream concept #define RAPIDJSON_DIAG_OFF(x) \
*/ RAPIDJSON_DIAG_PRAGMA(ignored RAPIDJSON_STRINGIFY(RAPIDJSON_JOIN(-W,x)))
template <typename Encoding>
struct GenericStringStream { // push/pop support in Clang and GCC>=4.6
typedef typename Encoding::Ch Ch; #if defined(__clang__) || (defined(RAPIDJSON_GNUC) && RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,6,0))
#define RAPIDJSON_DIAG_PUSH RAPIDJSON_DIAG_PRAGMA(push)
GenericStringStream(const Ch *src) : src_(src), head_(src) {} #define RAPIDJSON_DIAG_POP RAPIDJSON_DIAG_PRAGMA(pop)
#else // GCC >= 4.2, < 4.6
Ch Peek() const { return *src_; } #define RAPIDJSON_DIAG_PUSH /* ignored */
Ch Take() { return *src_++; } #define RAPIDJSON_DIAG_POP /* ignored */
size_t Tell() const { return static_cast<size_t>(src_ - head_); } #endif
Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; } #elif defined(_MSC_VER)
void Put(Ch) { RAPIDJSON_ASSERT(false); }
void Flush() { RAPIDJSON_ASSERT(false); } // pragma (MSVC specific)
size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; } #define RAPIDJSON_PRAGMA(x) __pragma(x)
#define RAPIDJSON_DIAG_PRAGMA(x) RAPIDJSON_PRAGMA(warning(x))
const Ch* src_; //!< Current read position.
const Ch* head_; //!< Original head of the string. #define RAPIDJSON_DIAG_OFF(x) RAPIDJSON_DIAG_PRAGMA(disable: x)
}; #define RAPIDJSON_DIAG_PUSH RAPIDJSON_DIAG_PRAGMA(push)
#define RAPIDJSON_DIAG_POP RAPIDJSON_DIAG_PRAGMA(pop)
template <typename Encoding>
struct StreamTraits<GenericStringStream<Encoding> > { #else
enum { copyOptimization = 1 };
}; #define RAPIDJSON_DIAG_OFF(x) /* ignored */
#define RAPIDJSON_DIAG_PUSH /* ignored */
//! String stream with UTF8 encoding. #define RAPIDJSON_DIAG_POP /* ignored */
typedef GenericStringStream<UTF8<> > StringStream;
#endif // RAPIDJSON_DIAG_*
///////////////////////////////////////////////////////////////////////////////
// InsituStringStream ///////////////////////////////////////////////////////////////////////////////
// C++11 features
//! A read-write string stream.
/*! This string stream is particularly designed for in-situ parsing. #ifndef RAPIDJSON_HAS_CXX11
\note implements Stream concept #define RAPIDJSON_HAS_CXX11 (RAPIDJSON_CPLUSPLUS >= 201103L)
*/ #endif
template <typename Encoding>
struct GenericInsituStringStream { #ifndef RAPIDJSON_HAS_CXX11_RVALUE_REFS
typedef typename Encoding::Ch Ch; #if RAPIDJSON_HAS_CXX11
#define RAPIDJSON_HAS_CXX11_RVALUE_REFS 1
GenericInsituStringStream(Ch *src) : src_(src), dst_(0), head_(src) {} #elif defined(__clang__)
#if __has_feature(cxx_rvalue_references) && \
// Read (defined(_MSC_VER) || defined(_LIBCPP_VERSION) || defined(__GLIBCXX__) && __GLIBCXX__ >= 20080306)
Ch Peek() { return *src_; } #define RAPIDJSON_HAS_CXX11_RVALUE_REFS 1
Ch Take() { return *src_++; } #else
size_t Tell() { return static_cast<size_t>(src_ - head_); } #define RAPIDJSON_HAS_CXX11_RVALUE_REFS 0
#endif
// Write #elif (defined(RAPIDJSON_GNUC) && (RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,3,0)) && defined(__GXX_EXPERIMENTAL_CXX0X__)) || \
void Put(Ch c) { RAPIDJSON_ASSERT(dst_ != 0); *dst_++ = c; } (defined(_MSC_VER) && _MSC_VER >= 1600) || \
(defined(__SUNPRO_CC) && __SUNPRO_CC >= 0x5140 && defined(__GXX_EXPERIMENTAL_CXX0X__))
Ch* PutBegin() { return dst_ = src_; }
size_t PutEnd(Ch* begin) { return static_cast<size_t>(dst_ - begin); } #define RAPIDJSON_HAS_CXX11_RVALUE_REFS 1
void Flush() {} #else
#define RAPIDJSON_HAS_CXX11_RVALUE_REFS 0
Ch* Push(size_t count) { Ch* begin = dst_; dst_ += count; return begin; } #endif
void Pop(size_t count) { dst_ -= count; } #endif // RAPIDJSON_HAS_CXX11_RVALUE_REFS
Ch* src_; #if RAPIDJSON_HAS_CXX11_RVALUE_REFS
Ch* dst_; #include <utility> // std::move
Ch* head_; #endif
};
#ifndef RAPIDJSON_HAS_CXX11_NOEXCEPT
template <typename Encoding> #if RAPIDJSON_HAS_CXX11
struct StreamTraits<GenericInsituStringStream<Encoding> > { #define RAPIDJSON_HAS_CXX11_NOEXCEPT 1
enum { copyOptimization = 1 }; #elif defined(__clang__)
}; #define RAPIDJSON_HAS_CXX11_NOEXCEPT __has_feature(cxx_noexcept)
#elif (defined(RAPIDJSON_GNUC) && (RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,6,0)) && defined(__GXX_EXPERIMENTAL_CXX0X__)) || \
//! Insitu string stream with UTF8 encoding. (defined(_MSC_VER) && _MSC_VER >= 1900) || \
typedef GenericInsituStringStream<UTF8<> > InsituStringStream; (defined(__SUNPRO_CC) && __SUNPRO_CC >= 0x5140 && defined(__GXX_EXPERIMENTAL_CXX0X__))
#define RAPIDJSON_HAS_CXX11_NOEXCEPT 1
/////////////////////////////////////////////////////////////////////////////// #else
// Type #define RAPIDJSON_HAS_CXX11_NOEXCEPT 0
#endif
//! Type of JSON value #endif
enum Type { #ifndef RAPIDJSON_NOEXCEPT
kNullType = 0, //!< null #if RAPIDJSON_HAS_CXX11_NOEXCEPT
kFalseType = 1, //!< false #define RAPIDJSON_NOEXCEPT noexcept
kTrueType = 2, //!< true #else
kObjectType = 3, //!< object #define RAPIDJSON_NOEXCEPT throw()
kArrayType = 4, //!< array #endif // RAPIDJSON_HAS_CXX11_NOEXCEPT
kStringType = 5, //!< string #endif
kNumberType = 6 //!< number
}; // no automatic detection, yet
#ifndef RAPIDJSON_HAS_CXX11_TYPETRAITS
RAPIDJSON_NAMESPACE_END #if (defined(_MSC_VER) && _MSC_VER >= 1700)
#define RAPIDJSON_HAS_CXX11_TYPETRAITS 1
#endif // RAPIDJSON_RAPIDJSON_H_ #else
#define RAPIDJSON_HAS_CXX11_TYPETRAITS 0
#endif
#endif
#ifndef RAPIDJSON_HAS_CXX11_RANGE_FOR
#if defined(__clang__)
#define RAPIDJSON_HAS_CXX11_RANGE_FOR __has_feature(cxx_range_for)
#elif (defined(RAPIDJSON_GNUC) && (RAPIDJSON_GNUC >= RAPIDJSON_VERSION_CODE(4,6,0)) && defined(__GXX_EXPERIMENTAL_CXX0X__)) || \
(defined(_MSC_VER) && _MSC_VER >= 1700) || \
(defined(__SUNPRO_CC) && __SUNPRO_CC >= 0x5140 && defined(__GXX_EXPERIMENTAL_CXX0X__))
#define RAPIDJSON_HAS_CXX11_RANGE_FOR 1
#else
#define RAPIDJSON_HAS_CXX11_RANGE_FOR 0
#endif
#endif // RAPIDJSON_HAS_CXX11_RANGE_FOR
///////////////////////////////////////////////////////////////////////////////
// C++17 features
#ifndef RAPIDJSON_HAS_CXX17
#define RAPIDJSON_HAS_CXX17 (RAPIDJSON_CPLUSPLUS >= 201703L)
#endif
#if RAPIDJSON_HAS_CXX17
# define RAPIDJSON_DELIBERATE_FALLTHROUGH [[fallthrough]]
#elif defined(__has_cpp_attribute)
# if __has_cpp_attribute(clang::fallthrough)
# define RAPIDJSON_DELIBERATE_FALLTHROUGH [[clang::fallthrough]]
# elif __has_cpp_attribute(fallthrough)
# define RAPIDJSON_DELIBERATE_FALLTHROUGH __attribute__((fallthrough))
# else
# define RAPIDJSON_DELIBERATE_FALLTHROUGH
# endif
#else
# define RAPIDJSON_DELIBERATE_FALLTHROUGH
#endif
//!@endcond
//! Assertion (in non-throwing contexts).
/*! \ingroup RAPIDJSON_CONFIG
Some functions provide a \c noexcept guarantee, if the compiler supports it.
In these cases, the \ref RAPIDJSON_ASSERT macro cannot be overridden to
throw an exception. This macro adds a separate customization point for
such cases.
Defaults to C \c assert() (as \ref RAPIDJSON_ASSERT), if \c noexcept is
supported, and to \ref RAPIDJSON_ASSERT otherwise.
*/
///////////////////////////////////////////////////////////////////////////////
// RAPIDJSON_NOEXCEPT_ASSERT
#ifndef RAPIDJSON_NOEXCEPT_ASSERT
#ifdef RAPIDJSON_ASSERT_THROWS
#include <cassert>
#define RAPIDJSON_NOEXCEPT_ASSERT(x) assert(x)
#else
#define RAPIDJSON_NOEXCEPT_ASSERT(x) RAPIDJSON_ASSERT(x)
#endif // RAPIDJSON_ASSERT_THROWS
#endif // RAPIDJSON_NOEXCEPT_ASSERT
///////////////////////////////////////////////////////////////////////////////
// malloc/realloc/free
#ifndef RAPIDJSON_MALLOC
///! customization point for global \c malloc
#define RAPIDJSON_MALLOC(size) std::malloc(size)
#endif
#ifndef RAPIDJSON_REALLOC
///! customization point for global \c realloc
#define RAPIDJSON_REALLOC(ptr, new_size) std::realloc(ptr, new_size)
#endif
#ifndef RAPIDJSON_FREE
///! customization point for global \c free
#define RAPIDJSON_FREE(ptr) std::free(ptr)
#endif
///////////////////////////////////////////////////////////////////////////////
// new/delete
#ifndef RAPIDJSON_NEW
///! customization point for global \c new
#define RAPIDJSON_NEW(TypeName) new TypeName
#endif
#ifndef RAPIDJSON_DELETE
///! customization point for global \c delete
#define RAPIDJSON_DELETE(x) delete x
#endif
///////////////////////////////////////////////////////////////////////////////
// Type
/*! \namespace rapidjson
\brief main RapidJSON namespace
\see RAPIDJSON_NAMESPACE
*/
RAPIDJSON_NAMESPACE_BEGIN
//! Type of JSON value
enum Type {
kNullType = 0, //!< null
kFalseType = 1, //!< false
kTrueType = 2, //!< true
kObjectType = 3, //!< object
kArrayType = 4, //!< array
kStringType = 5, //!< string
kNumberType = 6 //!< number
};
RAPIDJSON_NAMESPACE_END
#endif // RAPIDJSON_RAPIDJSON_H_
// Copyright (C) 2011 Milo Yip // Tencent is pleased to support the open source community by making RapidJSON available.
// //
// Permission is hereby granted, free of charge, to any person obtaining a copy // Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip.
// of this software and associated documentation files (the "Software"), to deal //
// in the Software without restriction, including without limitation the rights // Licensed under the MIT License (the "License"); you may not use this file except
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // in compliance with the License. You may obtain a copy of the License at
// copies of the Software, and to permit persons to whom the Software is //
// furnished to do so, subject to the following conditions: // http://opensource.org/licenses/MIT
// //
// The above copyright notice and this permission notice shall be included in // Unless required by applicable law or agreed to in writing, software distributed
// all copies or substantial portions of the Software. // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// // CONDITIONS OF ANY KIND, either express or implied. See the License for the
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // specific language governing permissions and limitations under the License.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #ifndef RAPIDJSON_READER_H_
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #define RAPIDJSON_READER_H_
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN /*! \file reader.h */
// THE SOFTWARE.
#include "allocators.h"
#ifndef RAPIDJSON_READER_H_ #include "stream.h"
#define RAPIDJSON_READER_H_ #include "encodedstream.h"
#include "internal/clzll.h"
/*! \file reader.h */ #include "internal/meta.h"
#include "internal/stack.h"
#include "rapidjson.h" #include "internal/strtod.h"
#include "encodings.h" #include <limits>
#include "internal/meta.h"
#include "internal/stack.h" #if defined(RAPIDJSON_SIMD) && defined(_MSC_VER)
#include "internal/strtod.h" #include <intrin.h>
#pragma intrinsic(_BitScanForward)
#if defined(RAPIDJSON_SIMD) && defined(_MSC_VER) #endif
#include <intrin.h> #ifdef RAPIDJSON_SSE42
#pragma intrinsic(_BitScanForward) #include <nmmintrin.h>
#endif #elif defined(RAPIDJSON_SSE2)
#ifdef RAPIDJSON_SSE42 #include <emmintrin.h>
#include <nmmintrin.h> #elif defined(RAPIDJSON_NEON)
#elif defined(RAPIDJSON_SSE2) #include <arm_neon.h>
#include <emmintrin.h> #endif
#endif
#ifdef __clang__
#ifdef _MSC_VER RAPIDJSON_DIAG_PUSH
RAPIDJSON_DIAG_PUSH RAPIDJSON_DIAG_OFF(old-style-cast)
RAPIDJSON_DIAG_OFF(4127) // conditional expression is constant RAPIDJSON_DIAG_OFF(padded)
RAPIDJSON_DIAG_OFF(4702) // unreachable code RAPIDJSON_DIAG_OFF(switch-enum)
#endif #elif defined(_MSC_VER)
RAPIDJSON_DIAG_PUSH
#ifdef __GNUC__ RAPIDJSON_DIAG_OFF(4127) // conditional expression is constant
RAPIDJSON_DIAG_PUSH RAPIDJSON_DIAG_OFF(4702) // unreachable code
RAPIDJSON_DIAG_OFF(effc++) #endif
#endif
#ifdef __GNUC__
//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN RAPIDJSON_DIAG_PUSH
#define RAPIDJSON_NOTHING /* deliberately empty */ RAPIDJSON_DIAG_OFF(effc++)
#ifndef RAPIDJSON_PARSE_ERROR_EARLY_RETURN #endif
#define RAPIDJSON_PARSE_ERROR_EARLY_RETURN(value) \
RAPIDJSON_MULTILINEMACRO_BEGIN \ //!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
if (HasParseError()) { return value; } \ #define RAPIDJSON_NOTHING /* deliberately empty */
RAPIDJSON_MULTILINEMACRO_END #ifndef RAPIDJSON_PARSE_ERROR_EARLY_RETURN
#endif #define RAPIDJSON_PARSE_ERROR_EARLY_RETURN(value) \
#define RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID \ RAPIDJSON_MULTILINEMACRO_BEGIN \
RAPIDJSON_PARSE_ERROR_EARLY_RETURN(RAPIDJSON_NOTHING) if (RAPIDJSON_UNLIKELY(HasParseError())) { return value; } \
//!@endcond RAPIDJSON_MULTILINEMACRO_END
#endif
/*! \def RAPIDJSON_PARSE_ERROR_NORETURN #define RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID \
\ingroup RAPIDJSON_ERRORS RAPIDJSON_PARSE_ERROR_EARLY_RETURN(RAPIDJSON_NOTHING)
\brief Macro to indicate a parse error. //!@endcond
\param parseErrorCode \ref rapidjson::ParseErrorCode of the error
\param offset position of the error in JSON input (\c size_t) /*! \def RAPIDJSON_PARSE_ERROR_NORETURN
\ingroup RAPIDJSON_ERRORS
This macros can be used as a customization point for the internal \brief Macro to indicate a parse error.
error handling mechanism of RapidJSON. \param parseErrorCode \ref rapidjson::ParseErrorCode of the error
\param offset position of the error in JSON input (\c size_t)
A common usage model is to throw an exception instead of requiring the
caller to explicitly check the \ref rapidjson::GenericReader::Parse's This macros can be used as a customization point for the internal
return value: error handling mechanism of RapidJSON.
\code A common usage model is to throw an exception instead of requiring the
#define RAPIDJSON_PARSE_ERROR_NORETURN(parseErrorCode,offset) \ caller to explicitly check the \ref rapidjson::GenericReader::Parse's
throw ParseException(parseErrorCode, #parseErrorCode, offset) return value:
#include <stdexcept> // std::runtime_error \code
#include "rapidjson/error/error.h" // rapidjson::ParseResult #define RAPIDJSON_PARSE_ERROR_NORETURN(parseErrorCode,offset) \
throw ParseException(parseErrorCode, #parseErrorCode, offset)
struct ParseException : std::runtime_error, rapidjson::ParseResult {
ParseException(rapidjson::ParseErrorCode code, const char* msg, size_t offset) #include <stdexcept> // std::runtime_error
: std::runtime_error(msg), ParseResult(code, offset) {} #include "rapidjson/error/error.h" // rapidjson::ParseResult
};
struct ParseException : std::runtime_error, rapidjson::ParseResult {
#include "rapidjson/reader.h" ParseException(rapidjson::ParseErrorCode code, const char* msg, size_t offset)
\endcode : std::runtime_error(msg), ParseResult(code, offset) {}
};
\see RAPIDJSON_PARSE_ERROR, rapidjson::GenericReader::Parse
*/ #include "rapidjson/reader.h"
#ifndef RAPIDJSON_PARSE_ERROR_NORETURN \endcode
#define RAPIDJSON_PARSE_ERROR_NORETURN(parseErrorCode, offset) \
RAPIDJSON_MULTILINEMACRO_BEGIN \ \see RAPIDJSON_PARSE_ERROR, rapidjson::GenericReader::Parse
RAPIDJSON_ASSERT(!HasParseError()); /* Error can only be assigned once */ \ */
SetParseError(parseErrorCode, offset); \ #ifndef RAPIDJSON_PARSE_ERROR_NORETURN
RAPIDJSON_MULTILINEMACRO_END #define RAPIDJSON_PARSE_ERROR_NORETURN(parseErrorCode, offset) \
#endif RAPIDJSON_MULTILINEMACRO_BEGIN \
RAPIDJSON_ASSERT(!HasParseError()); /* Error can only be assigned once */ \
/*! \def RAPIDJSON_PARSE_ERROR SetParseError(parseErrorCode, offset); \
\ingroup RAPIDJSON_ERRORS RAPIDJSON_MULTILINEMACRO_END
\brief (Internal) macro to indicate and handle a parse error. #endif
\param parseErrorCode \ref rapidjson::ParseErrorCode of the error
\param offset position of the error in JSON input (\c size_t) /*! \def RAPIDJSON_PARSE_ERROR
\ingroup RAPIDJSON_ERRORS
Invokes RAPIDJSON_PARSE_ERROR_NORETURN and stops the parsing. \brief (Internal) macro to indicate and handle a parse error.
\param parseErrorCode \ref rapidjson::ParseErrorCode of the error
\see RAPIDJSON_PARSE_ERROR_NORETURN \param offset position of the error in JSON input (\c size_t)
\hideinitializer
*/ Invokes RAPIDJSON_PARSE_ERROR_NORETURN and stops the parsing.
#ifndef RAPIDJSON_PARSE_ERROR
#define RAPIDJSON_PARSE_ERROR(parseErrorCode, offset) \ \see RAPIDJSON_PARSE_ERROR_NORETURN
RAPIDJSON_MULTILINEMACRO_BEGIN \ \hideinitializer
RAPIDJSON_PARSE_ERROR_NORETURN(parseErrorCode, offset); \ */
RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; \ #ifndef RAPIDJSON_PARSE_ERROR
RAPIDJSON_MULTILINEMACRO_END #define RAPIDJSON_PARSE_ERROR(parseErrorCode, offset) \
#endif RAPIDJSON_MULTILINEMACRO_BEGIN \
RAPIDJSON_PARSE_ERROR_NORETURN(parseErrorCode, offset); \
#include "error/error.h" // ParseErrorCode, ParseResult RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; \
RAPIDJSON_MULTILINEMACRO_END
RAPIDJSON_NAMESPACE_BEGIN #endif
/////////////////////////////////////////////////////////////////////////////// #include "error/error.h" // ParseErrorCode, ParseResult
// ParseFlag
RAPIDJSON_NAMESPACE_BEGIN
/*! \def RAPIDJSON_PARSE_DEFAULT_FLAGS
\ingroup RAPIDJSON_CONFIG ///////////////////////////////////////////////////////////////////////////////
\brief User-defined kParseDefaultFlags definition. // ParseFlag
User can define this as any \c ParseFlag combinations. /*! \def RAPIDJSON_PARSE_DEFAULT_FLAGS
*/ \ingroup RAPIDJSON_CONFIG
#ifndef RAPIDJSON_PARSE_DEFAULT_FLAGS \brief User-defined kParseDefaultFlags definition.
#define RAPIDJSON_PARSE_DEFAULT_FLAGS kParseNoFlags
#endif User can define this as any \c ParseFlag combinations.
*/
//! Combination of parseFlags #ifndef RAPIDJSON_PARSE_DEFAULT_FLAGS
/*! \see Reader::Parse, Document::Parse, Document::ParseInsitu, Document::ParseStream #define RAPIDJSON_PARSE_DEFAULT_FLAGS kParseNoFlags
*/ #endif
enum ParseFlag {
kParseNoFlags = 0, //!< No flags are set. //! Combination of parseFlags
kParseInsituFlag = 1, //!< In-situ(destructive) parsing. /*! \see Reader::Parse, Document::Parse, Document::ParseInsitu, Document::ParseStream
kParseValidateEncodingFlag = 2, //!< Validate encoding of JSON strings. */
kParseIterativeFlag = 4, //!< Iterative(constant complexity in terms of function call stack size) parsing. enum ParseFlag {
kParseStopWhenDoneFlag = 8, //!< After parsing a complete JSON root from stream, stop further processing the rest of stream. When this flag is used, parser will not generate kParseErrorDocumentRootNotSingular error. kParseNoFlags = 0, //!< No flags are set.
kParseFullPrecisionFlag = 16, //!< Parse number in full precision (but slower). kParseInsituFlag = 1, //!< In-situ(destructive) parsing.
kParseDefaultFlags = RAPIDJSON_PARSE_DEFAULT_FLAGS //!< Default parse flags. Can be customized by defining RAPIDJSON_PARSE_DEFAULT_FLAGS kParseValidateEncodingFlag = 2, //!< Validate encoding of JSON strings.
}; kParseIterativeFlag = 4, //!< Iterative(constant complexity in terms of function call stack size) parsing.
kParseStopWhenDoneFlag = 8, //!< After parsing a complete JSON root from stream, stop further processing the rest of stream. When this flag is used, parser will not generate kParseErrorDocumentRootNotSingular error.
/////////////////////////////////////////////////////////////////////////////// kParseFullPrecisionFlag = 16, //!< Parse number in full precision (but slower).
// Handler kParseCommentsFlag = 32, //!< Allow one-line (//) and multi-line (/**/) comments.
kParseNumbersAsStringsFlag = 64, //!< Parse all numbers (ints/doubles) as strings.
/*! \class rapidjson::Handler kParseTrailingCommasFlag = 128, //!< Allow trailing commas at the end of objects and arrays.
\brief Concept for receiving events from GenericReader upon parsing. kParseNanAndInfFlag = 256, //!< Allow parsing NaN, Inf, Infinity, -Inf and -Infinity as doubles.
The functions return true if no error occurs. If they return false, kParseEscapedApostropheFlag = 512, //!< Allow escaped apostrophe in strings.
the event publisher should terminate the process. kParseDefaultFlags = RAPIDJSON_PARSE_DEFAULT_FLAGS //!< Default parse flags. Can be customized by defining RAPIDJSON_PARSE_DEFAULT_FLAGS
\code };
concept Handler {
typename Ch; ///////////////////////////////////////////////////////////////////////////////
// Handler
bool Null();
bool Bool(bool b); /*! \class rapidjson::Handler
bool Int(int i); \brief Concept for receiving events from GenericReader upon parsing.
bool Uint(unsigned i); The functions return true if no error occurs. If they return false,
bool Int64(int64_t i); the event publisher should terminate the process.
bool Uint64(uint64_t i); \code
bool Double(double d); concept Handler {
bool String(const Ch* str, SizeType length, bool copy); typename Ch;
bool StartObject();
bool Key(const Ch* str, SizeType length, bool copy); bool Null();
bool EndObject(SizeType memberCount); bool Bool(bool b);
bool StartArray(); bool Int(int i);
bool EndArray(SizeType elementCount); bool Uint(unsigned i);
}; bool Int64(int64_t i);
\endcode bool Uint64(uint64_t i);
*/ bool Double(double d);
/////////////////////////////////////////////////////////////////////////////// /// enabled via kParseNumbersAsStringsFlag, string is not null-terminated (use length)
// BaseReaderHandler bool RawNumber(const Ch* str, SizeType length, bool copy);
bool String(const Ch* str, SizeType length, bool copy);
//! Default implementation of Handler. bool StartObject();
/*! This can be used as base class of any reader handler. bool Key(const Ch* str, SizeType length, bool copy);
\note implements Handler concept bool EndObject(SizeType memberCount);
*/ bool StartArray();
template<typename Encoding = UTF8<>, typename Derived = void> bool EndArray(SizeType elementCount);
struct BaseReaderHandler { };
typedef typename Encoding::Ch Ch; \endcode
*/
typedef typename internal::SelectIf<internal::IsSame<Derived, void>, BaseReaderHandler, Derived>::Type Override; ///////////////////////////////////////////////////////////////////////////////
// BaseReaderHandler
bool Default() { return true; }
bool Null() { return static_cast<Override&>(*this).Default(); } //! Default implementation of Handler.
bool Bool(bool) { return static_cast<Override&>(*this).Default(); } /*! This can be used as base class of any reader handler.
bool Int(int) { return static_cast<Override&>(*this).Default(); } \note implements Handler concept
bool Uint(unsigned) { return static_cast<Override&>(*this).Default(); } */
bool Int64(int64_t) { return static_cast<Override&>(*this).Default(); } template<typename Encoding = UTF8<>, typename Derived = void>
bool Uint64(uint64_t) { return static_cast<Override&>(*this).Default(); } struct BaseReaderHandler {
bool Double(double) { return static_cast<Override&>(*this).Default(); } typedef typename Encoding::Ch Ch;
bool String(const Ch*, SizeType, bool) { return static_cast<Override&>(*this).Default(); }
bool StartObject() { return static_cast<Override&>(*this).Default(); } typedef typename internal::SelectIf<internal::IsSame<Derived, void>, BaseReaderHandler, Derived>::Type Override;
bool Key(const Ch* str, SizeType len, bool copy) { return static_cast<Override&>(*this).String(str, len, copy); }
bool EndObject(SizeType) { return static_cast<Override&>(*this).Default(); } bool Default() { return true; }
bool StartArray() { return static_cast<Override&>(*this).Default(); } bool Null() { return static_cast<Override&>(*this).Default(); }
bool EndArray(SizeType) { return static_cast<Override&>(*this).Default(); } bool Bool(bool) { return static_cast<Override&>(*this).Default(); }
}; bool Int(int) { return static_cast<Override&>(*this).Default(); }
bool Uint(unsigned) { return static_cast<Override&>(*this).Default(); }
/////////////////////////////////////////////////////////////////////////////// bool Int64(int64_t) { return static_cast<Override&>(*this).Default(); }
// StreamLocalCopy bool Uint64(uint64_t) { return static_cast<Override&>(*this).Default(); }
bool Double(double) { return static_cast<Override&>(*this).Default(); }
namespace internal { /// enabled via kParseNumbersAsStringsFlag, string is not null-terminated (use length)
bool RawNumber(const Ch* str, SizeType len, bool copy) { return static_cast<Override&>(*this).String(str, len, copy); }
template<typename Stream, int = StreamTraits<Stream>::copyOptimization> bool String(const Ch*, SizeType, bool) { return static_cast<Override&>(*this).Default(); }
class StreamLocalCopy; bool StartObject() { return static_cast<Override&>(*this).Default(); }
bool Key(const Ch* str, SizeType len, bool copy) { return static_cast<Override&>(*this).String(str, len, copy); }
//! Do copy optimization. bool EndObject(SizeType) { return static_cast<Override&>(*this).Default(); }
template<typename Stream> bool StartArray() { return static_cast<Override&>(*this).Default(); }
class StreamLocalCopy<Stream, 1> { bool EndArray(SizeType) { return static_cast<Override&>(*this).Default(); }
public: };
StreamLocalCopy(Stream& original) : s(original), original_(original) {}
~StreamLocalCopy() { original_ = s; } ///////////////////////////////////////////////////////////////////////////////
// StreamLocalCopy
Stream s;
namespace internal {
private:
StreamLocalCopy& operator=(const StreamLocalCopy&) /* = delete */; template<typename Stream, int = StreamTraits<Stream>::copyOptimization>
class StreamLocalCopy;
Stream& original_;
}; //! Do copy optimization.
template<typename Stream>
//! Keep reference. class StreamLocalCopy<Stream, 1> {
template<typename Stream> public:
class StreamLocalCopy<Stream, 0> { StreamLocalCopy(Stream& original) : s(original), original_(original) {}
public: ~StreamLocalCopy() { original_ = s; }
StreamLocalCopy(Stream& original) : s(original) {}
Stream s;
Stream& s;
private:
private: StreamLocalCopy& operator=(const StreamLocalCopy&) /* = delete */;
StreamLocalCopy& operator=(const StreamLocalCopy&) /* = delete */;
}; Stream& original_;
};
} // namespace internal
//! Keep reference.
/////////////////////////////////////////////////////////////////////////////// template<typename Stream>
// SkipWhitespace class StreamLocalCopy<Stream, 0> {
public:
//! Skip the JSON white spaces in a stream. StreamLocalCopy(Stream& original) : s(original) {}
/*! \param is A input stream for skipping white spaces.
\note This function has SSE2/SSE4.2 specialization. Stream& s;
*/
template<typename InputStream> private:
void SkipWhitespace(InputStream& is) { StreamLocalCopy& operator=(const StreamLocalCopy&) /* = delete */;
internal::StreamLocalCopy<InputStream> copy(is); };
InputStream& s(copy.s);
} // namespace internal
while (s.Peek() == ' ' || s.Peek() == '\n' || s.Peek() == '\r' || s.Peek() == '\t')
s.Take(); ///////////////////////////////////////////////////////////////////////////////
} // SkipWhitespace
#ifdef RAPIDJSON_SSE42 //! Skip the JSON white spaces in a stream.
//! Skip whitespace with SSE 4.2 pcmpistrm instruction, testing 16 8-byte characters at once. /*! \param is A input stream for skipping white spaces.
inline const char *SkipWhitespace_SIMD(const char* p) { \note This function has SSE2/SSE4.2 specialization.
// Fast return for single non-whitespace */
if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t') template<typename InputStream>
++p; void SkipWhitespace(InputStream& is) {
else internal::StreamLocalCopy<InputStream> copy(is);
return p; InputStream& s(copy.s);
// 16-byte align to the next boundary typename InputStream::Ch c;
const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & ~15); while ((c = s.Peek()) == ' ' || c == '\n' || c == '\r' || c == '\t')
while (p != nextAligned) s.Take();
if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t') }
++p;
else inline const char* SkipWhitespace(const char* p, const char* end) {
return p; while (p != end && (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t'))
++p;
// The rest of string using SIMD return p;
static const char whitespace[16] = " \n\r\t"; }
const __m128i w = _mm_load_si128((const __m128i *)&whitespace[0]);
#ifdef RAPIDJSON_SSE42
for (;; p += 16) { //! Skip whitespace with SSE 4.2 pcmpistrm instruction, testing 16 8-byte characters at once.
const __m128i s = _mm_load_si128((const __m128i *)p); inline const char *SkipWhitespace_SIMD(const char* p) {
const unsigned r = _mm_cvtsi128_si32(_mm_cmpistrm(w, s, _SIDD_UBYTE_OPS | _SIDD_CMP_EQUAL_ANY | _SIDD_BIT_MASK | _SIDD_NEGATIVE_POLARITY)); // Fast return for single non-whitespace
if (r != 0) { // some of characters is non-whitespace if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t')
#ifdef _MSC_VER // Find the index of first non-whitespace ++p;
unsigned long offset; else
_BitScanForward(&offset, r); return p;
return p + offset;
#else // 16-byte align to the next boundary
return p + __builtin_ffs(r) - 1; const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
#endif while (p != nextAligned)
} if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t')
} ++p;
} else
return p;
#elif defined(RAPIDJSON_SSE2)
// The rest of string using SIMD
//! Skip whitespace with SSE2 instructions, testing 16 8-byte characters at once. static const char whitespace[16] = " \n\r\t";
inline const char *SkipWhitespace_SIMD(const char* p) { const __m128i w = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&whitespace[0]));
// Fast return for single non-whitespace
if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t') for (;; p += 16) {
++p; const __m128i s = _mm_load_si128(reinterpret_cast<const __m128i *>(p));
else const int r = _mm_cmpistri(w, s, _SIDD_UBYTE_OPS | _SIDD_CMP_EQUAL_ANY | _SIDD_LEAST_SIGNIFICANT | _SIDD_NEGATIVE_POLARITY);
return p; if (r != 16) // some of characters is non-whitespace
return p + r;
// 16-byte align to the next boundary }
const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & ~15); }
while (p != nextAligned)
if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t') inline const char *SkipWhitespace_SIMD(const char* p, const char* end) {
++p; // Fast return for single non-whitespace
else if (p != end && (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t'))
return p; ++p;
else
// The rest of string return p;
static const char whitespaces[4][17] = {
" ", // The middle of string using SIMD
"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", static const char whitespace[16] = " \n\r\t";
"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r", const __m128i w = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&whitespace[0]));
"\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t"};
for (; p <= end - 16; p += 16) {
const __m128i w0 = _mm_loadu_si128((const __m128i *)&whitespaces[0][0]); const __m128i s = _mm_loadu_si128(reinterpret_cast<const __m128i *>(p));
const __m128i w1 = _mm_loadu_si128((const __m128i *)&whitespaces[1][0]); const int r = _mm_cmpistri(w, s, _SIDD_UBYTE_OPS | _SIDD_CMP_EQUAL_ANY | _SIDD_LEAST_SIGNIFICANT | _SIDD_NEGATIVE_POLARITY);
const __m128i w2 = _mm_loadu_si128((const __m128i *)&whitespaces[2][0]); if (r != 16) // some of characters is non-whitespace
const __m128i w3 = _mm_loadu_si128((const __m128i *)&whitespaces[3][0]); return p + r;
}
for (;; p += 16) {
const __m128i s = _mm_load_si128((const __m128i *)p); return SkipWhitespace(p, end);
__m128i x = _mm_cmpeq_epi8(s, w0); }
x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w1));
x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w2)); #elif defined(RAPIDJSON_SSE2)
x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w3));
unsigned short r = (unsigned short)~_mm_movemask_epi8(x); //! Skip whitespace with SSE2 instructions, testing 16 8-byte characters at once.
if (r != 0) { // some of characters may be non-whitespace inline const char *SkipWhitespace_SIMD(const char* p) {
#ifdef _MSC_VER // Find the index of first non-whitespace // Fast return for single non-whitespace
unsigned long offset; if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t')
_BitScanForward(&offset, r); ++p;
return p + offset; else
#else return p;
return p + __builtin_ffs(r) - 1;
#endif // 16-byte align to the next boundary
} const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
} while (p != nextAligned)
} if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t')
++p;
#endif // RAPIDJSON_SSE2 else
return p;
#ifdef RAPIDJSON_SIMD
//! Template function specialization for InsituStringStream // The rest of string
template<> inline void SkipWhitespace(InsituStringStream& is) { #define C16(c) { c, c, c, c, c, c, c, c, c, c, c, c, c, c, c, c }
is.src_ = const_cast<char*>(SkipWhitespace_SIMD(is.src_)); static const char whitespaces[4][16] = { C16(' '), C16('\n'), C16('\r'), C16('\t') };
} #undef C16
//! Template function specialization for StringStream const __m128i w0 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&whitespaces[0][0]));
template<> inline void SkipWhitespace(StringStream& is) { const __m128i w1 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&whitespaces[1][0]));
is.src_ = SkipWhitespace_SIMD(is.src_); const __m128i w2 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&whitespaces[2][0]));
} const __m128i w3 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&whitespaces[3][0]));
#endif // RAPIDJSON_SIMD
for (;; p += 16) {
/////////////////////////////////////////////////////////////////////////////// const __m128i s = _mm_load_si128(reinterpret_cast<const __m128i *>(p));
// GenericReader __m128i x = _mm_cmpeq_epi8(s, w0);
x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w1));
//! SAX-style JSON parser. Use \ref Reader for UTF8 encoding and default allocator. x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w2));
/*! GenericReader parses JSON text from a stream, and send events synchronously to an x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w3));
object implementing Handler concept. unsigned short r = static_cast<unsigned short>(~_mm_movemask_epi8(x));
if (r != 0) { // some of characters may be non-whitespace
It needs to allocate a stack for storing a single decoded string during #ifdef _MSC_VER // Find the index of first non-whitespace
non-destructive parsing. unsigned long offset;
_BitScanForward(&offset, r);
For in-situ parsing, the decoded string is directly written to the source return p + offset;
text string, no temporary buffer is required. #else
return p + __builtin_ffs(r) - 1;
A GenericReader object can be reused for parsing multiple JSON text. #endif
}
\tparam SourceEncoding Encoding of the input stream. }
\tparam TargetEncoding Encoding of the parse output. }
\tparam StackAllocator Allocator type for stack.
*/ inline const char *SkipWhitespace_SIMD(const char* p, const char* end) {
template <typename SourceEncoding, typename TargetEncoding, typename StackAllocator = CrtAllocator> // Fast return for single non-whitespace
class GenericReader { if (p != end && (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t'))
public: ++p;
typedef typename SourceEncoding::Ch Ch; //!< SourceEncoding character type else
return p;
//! Constructor.
/*! \param allocator Optional allocator for allocating stack memory. (Only use for non-destructive parsing) // The rest of string
\param stackCapacity stack capacity in bytes for storing a single decoded string. (Only use for non-destructive parsing) #define C16(c) { c, c, c, c, c, c, c, c, c, c, c, c, c, c, c, c }
*/ static const char whitespaces[4][16] = { C16(' '), C16('\n'), C16('\r'), C16('\t') };
GenericReader(StackAllocator* stackAllocator = 0, size_t stackCapacity = kDefaultStackCapacity) : stack_(stackAllocator, stackCapacity), parseResult_() {} #undef C16
//! Parse JSON text. const __m128i w0 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&whitespaces[0][0]));
/*! \tparam parseFlags Combination of \ref ParseFlag. const __m128i w1 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&whitespaces[1][0]));
\tparam InputStream Type of input stream, implementing Stream concept. const __m128i w2 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&whitespaces[2][0]));
\tparam Handler Type of handler, implementing Handler concept. const __m128i w3 = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&whitespaces[3][0]));
\param is Input stream to be parsed.
\param handler The handler to receive events. for (; p <= end - 16; p += 16) {
\return Whether the parsing is successful. const __m128i s = _mm_loadu_si128(reinterpret_cast<const __m128i *>(p));
*/ __m128i x = _mm_cmpeq_epi8(s, w0);
template <unsigned parseFlags, typename InputStream, typename Handler> x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w1));
ParseResult Parse(InputStream& is, Handler& handler) { x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w2));
if (parseFlags & kParseIterativeFlag) x = _mm_or_si128(x, _mm_cmpeq_epi8(s, w3));
return IterativeParse<parseFlags>(is, handler); unsigned short r = static_cast<unsigned short>(~_mm_movemask_epi8(x));
if (r != 0) { // some of characters may be non-whitespace
parseResult_.Clear(); #ifdef _MSC_VER // Find the index of first non-whitespace
unsigned long offset;
ClearStackOnExit scope(*this); _BitScanForward(&offset, r);
return p + offset;
SkipWhitespace(is); #else
return p + __builtin_ffs(r) - 1;
if (is.Peek() == '\0') { #endif
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorDocumentEmpty, is.Tell()); }
RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_); }
}
else { return SkipWhitespace(p, end);
ParseValue<parseFlags>(is, handler); }
RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_);
#elif defined(RAPIDJSON_NEON)
if (!(parseFlags & kParseStopWhenDoneFlag)) {
SkipWhitespace(is); //! Skip whitespace with ARM Neon instructions, testing 16 8-byte characters at once.
inline const char *SkipWhitespace_SIMD(const char* p) {
if (is.Peek() != '\0') { // Fast return for single non-whitespace
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorDocumentRootNotSingular, is.Tell()); if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t')
RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_); ++p;
} else
} return p;
}
// 16-byte align to the next boundary
return parseResult_; const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
} while (p != nextAligned)
if (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t')
//! Parse JSON text (with \ref kParseDefaultFlags) ++p;
/*! \tparam InputStream Type of input stream, implementing Stream concept else
\tparam Handler Type of handler, implementing Handler concept. return p;
\param is Input stream to be parsed.
\param handler The handler to receive events. const uint8x16_t w0 = vmovq_n_u8(' ');
\return Whether the parsing is successful. const uint8x16_t w1 = vmovq_n_u8('\n');
*/ const uint8x16_t w2 = vmovq_n_u8('\r');
template <typename InputStream, typename Handler> const uint8x16_t w3 = vmovq_n_u8('\t');
ParseResult Parse(InputStream& is, Handler& handler) {
return Parse<kParseDefaultFlags>(is, handler); for (;; p += 16) {
} const uint8x16_t s = vld1q_u8(reinterpret_cast<const uint8_t *>(p));
uint8x16_t x = vceqq_u8(s, w0);
//! Whether a parse error has occured in the last parsing. x = vorrq_u8(x, vceqq_u8(s, w1));
bool HasParseError() const { return parseResult_.IsError(); } x = vorrq_u8(x, vceqq_u8(s, w2));
x = vorrq_u8(x, vceqq_u8(s, w3));
//! Get the \ref ParseErrorCode of last parsing.
ParseErrorCode GetParseErrorCode() const { return parseResult_.Code(); } x = vmvnq_u8(x); // Negate
x = vrev64q_u8(x); // Rev in 64
//! Get the position of last parsing error in input, 0 otherwise. uint64_t low = vgetq_lane_u64(vreinterpretq_u64_u8(x), 0); // extract
size_t GetErrorOffset() const { return parseResult_.Offset(); } uint64_t high = vgetq_lane_u64(vreinterpretq_u64_u8(x), 1); // extract
protected: if (low == 0) {
void SetParseError(ParseErrorCode code, size_t offset) { parseResult_.Set(code, offset); } if (high != 0) {
uint32_t lz = internal::clzll(high);
private: return p + 8 + (lz >> 3);
// Prohibit copy constructor & assignment operator. }
GenericReader(const GenericReader&); } else {
GenericReader& operator=(const GenericReader&); uint32_t lz = internal::clzll(low);
return p + (lz >> 3);
void ClearStack() { stack_.Clear(); } }
}
// clear stack on any exit from ParseStream, e.g. due to exception }
struct ClearStackOnExit {
explicit ClearStackOnExit(GenericReader& r) : r_(r) {} inline const char *SkipWhitespace_SIMD(const char* p, const char* end) {
~ClearStackOnExit() { r_.ClearStack(); } // Fast return for single non-whitespace
private: if (p != end && (*p == ' ' || *p == '\n' || *p == '\r' || *p == '\t'))
GenericReader& r_; ++p;
ClearStackOnExit(const ClearStackOnExit&); else
ClearStackOnExit& operator=(const ClearStackOnExit&); return p;
};
const uint8x16_t w0 = vmovq_n_u8(' ');
// Parse object: { string : value, ... } const uint8x16_t w1 = vmovq_n_u8('\n');
template<unsigned parseFlags, typename InputStream, typename Handler> const uint8x16_t w2 = vmovq_n_u8('\r');
void ParseObject(InputStream& is, Handler& handler) { const uint8x16_t w3 = vmovq_n_u8('\t');
RAPIDJSON_ASSERT(is.Peek() == '{');
is.Take(); // Skip '{' for (; p <= end - 16; p += 16) {
const uint8x16_t s = vld1q_u8(reinterpret_cast<const uint8_t *>(p));
if (!handler.StartObject()) uint8x16_t x = vceqq_u8(s, w0);
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); x = vorrq_u8(x, vceqq_u8(s, w1));
x = vorrq_u8(x, vceqq_u8(s, w2));
SkipWhitespace(is); x = vorrq_u8(x, vceqq_u8(s, w3));
if (is.Peek() == '}') { x = vmvnq_u8(x); // Negate
is.Take(); x = vrev64q_u8(x); // Rev in 64
if (!handler.EndObject(0)) // empty object uint64_t low = vgetq_lane_u64(vreinterpretq_u64_u8(x), 0); // extract
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); uint64_t high = vgetq_lane_u64(vreinterpretq_u64_u8(x), 1); // extract
return;
} if (low == 0) {
if (high != 0) {
for (SizeType memberCount = 0;;) { uint32_t lz = internal::clzll(high);
if (is.Peek() != '"') return p + 8 + (lz >> 3);
RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissName, is.Tell()); }
} else {
ParseString<parseFlags>(is, handler, true); uint32_t lz = internal::clzll(low);
RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; return p + (lz >> 3);
}
SkipWhitespace(is); }
if (is.Take() != ':') return SkipWhitespace(p, end);
RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissColon, is.Tell()); }
SkipWhitespace(is); #endif // RAPIDJSON_NEON
ParseValue<parseFlags>(is, handler); #ifdef RAPIDJSON_SIMD
RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; //! Template function specialization for InsituStringStream
template<> inline void SkipWhitespace(InsituStringStream& is) {
SkipWhitespace(is); is.src_ = const_cast<char*>(SkipWhitespace_SIMD(is.src_));
}
++memberCount;
//! Template function specialization for StringStream
switch (is.Take()) { template<> inline void SkipWhitespace(StringStream& is) {
case ',': SkipWhitespace(is); break; is.src_ = SkipWhitespace_SIMD(is.src_);
case '}': }
if (!handler.EndObject(memberCount))
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); template<> inline void SkipWhitespace(EncodedInputStream<UTF8<>, MemoryStream>& is) {
return; is.is_.src_ = SkipWhitespace_SIMD(is.is_.src_, is.is_.end_);
default: RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissCommaOrCurlyBracket, is.Tell()); }
} #endif // RAPIDJSON_SIMD
}
} ///////////////////////////////////////////////////////////////////////////////
// GenericReader
// Parse array: [ value, ... ]
template<unsigned parseFlags, typename InputStream, typename Handler> //! SAX-style JSON parser. Use \ref Reader for UTF8 encoding and default allocator.
void ParseArray(InputStream& is, Handler& handler) { /*! GenericReader parses JSON text from a stream, and send events synchronously to an
RAPIDJSON_ASSERT(is.Peek() == '['); object implementing Handler concept.
is.Take(); // Skip '['
It needs to allocate a stack for storing a single decoded string during
if (!handler.StartArray()) non-destructive parsing.
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
For in-situ parsing, the decoded string is directly written to the source
SkipWhitespace(is); text string, no temporary buffer is required.
if (is.Peek() == ']') { A GenericReader object can be reused for parsing multiple JSON text.
is.Take();
if (!handler.EndArray(0)) // empty array \tparam SourceEncoding Encoding of the input stream.
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); \tparam TargetEncoding Encoding of the parse output.
return; \tparam StackAllocator Allocator type for stack.
} */
template <typename SourceEncoding, typename TargetEncoding, typename StackAllocator = CrtAllocator>
for (SizeType elementCount = 0;;) { class GenericReader {
ParseValue<parseFlags>(is, handler); public:
RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; typedef typename SourceEncoding::Ch Ch; //!< SourceEncoding character type
++elementCount; //! Constructor.
SkipWhitespace(is); /*! \param stackAllocator Optional allocator for allocating stack memory. (Only use for non-destructive parsing)
\param stackCapacity stack capacity in bytes for storing a single decoded string. (Only use for non-destructive parsing)
switch (is.Take()) { */
case ',': SkipWhitespace(is); break; GenericReader(StackAllocator* stackAllocator = 0, size_t stackCapacity = kDefaultStackCapacity) :
case ']': stack_(stackAllocator, stackCapacity), parseResult_(), state_(IterativeParsingStartState) {}
if (!handler.EndArray(elementCount))
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); //! Parse JSON text.
return; /*! \tparam parseFlags Combination of \ref ParseFlag.
default: RAPIDJSON_PARSE_ERROR(kParseErrorArrayMissCommaOrSquareBracket, is.Tell()); \tparam InputStream Type of input stream, implementing Stream concept.
} \tparam Handler Type of handler, implementing Handler concept.
} \param is Input stream to be parsed.
} \param handler The handler to receive events.
\return Whether the parsing is successful.
template<unsigned parseFlags, typename InputStream, typename Handler> */
void ParseNull(InputStream& is, Handler& handler) { template <unsigned parseFlags, typename InputStream, typename Handler>
RAPIDJSON_ASSERT(is.Peek() == 'n'); ParseResult Parse(InputStream& is, Handler& handler) {
is.Take(); if (parseFlags & kParseIterativeFlag)
return IterativeParse<parseFlags>(is, handler);
if (is.Take() == 'u' && is.Take() == 'l' && is.Take() == 'l') {
if (!handler.Null()) parseResult_.Clear();
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
} ClearStackOnExit scope(*this);
else
RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell() - 1); SkipWhitespaceAndComments<parseFlags>(is);
} RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_);
template<unsigned parseFlags, typename InputStream, typename Handler> if (RAPIDJSON_UNLIKELY(is.Peek() == '\0')) {
void ParseTrue(InputStream& is, Handler& handler) { RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorDocumentEmpty, is.Tell());
RAPIDJSON_ASSERT(is.Peek() == 't'); RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_);
is.Take(); }
else {
if (is.Take() == 'r' && is.Take() == 'u' && is.Take() == 'e') { ParseValue<parseFlags>(is, handler);
if (!handler.Bool(true)) RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_);
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
} if (!(parseFlags & kParseStopWhenDoneFlag)) {
else SkipWhitespaceAndComments<parseFlags>(is);
RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell() - 1); RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_);
}
if (RAPIDJSON_UNLIKELY(is.Peek() != '\0')) {
template<unsigned parseFlags, typename InputStream, typename Handler> RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorDocumentRootNotSingular, is.Tell());
void ParseFalse(InputStream& is, Handler& handler) { RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_);
RAPIDJSON_ASSERT(is.Peek() == 'f'); }
is.Take(); }
}
if (is.Take() == 'a' && is.Take() == 'l' && is.Take() == 's' && is.Take() == 'e') {
if (!handler.Bool(false)) return parseResult_;
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell()); }
}
else //! Parse JSON text (with \ref kParseDefaultFlags)
RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell() - 1); /*! \tparam InputStream Type of input stream, implementing Stream concept
} \tparam Handler Type of handler, implementing Handler concept.
\param is Input stream to be parsed.
// Helper function to parse four hexidecimal digits in \uXXXX in ParseString(). \param handler The handler to receive events.
template<typename InputStream> \return Whether the parsing is successful.
unsigned ParseHex4(InputStream& is) { */
unsigned codepoint = 0; template <typename InputStream, typename Handler>
for (int i = 0; i < 4; i++) { ParseResult Parse(InputStream& is, Handler& handler) {
Ch c = is.Take(); return Parse<kParseDefaultFlags>(is, handler);
codepoint <<= 4; }
codepoint += static_cast<unsigned>(c);
if (c >= '0' && c <= '9') //! Initialize JSON text token-by-token parsing
codepoint -= '0'; /*!
else if (c >= 'A' && c <= 'F') */
codepoint -= 'A' - 10; void IterativeParseInit() {
else if (c >= 'a' && c <= 'f') parseResult_.Clear();
codepoint -= 'a' - 10; state_ = IterativeParsingStartState;
else { }
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorStringUnicodeEscapeInvalidHex, is.Tell() - 1);
RAPIDJSON_PARSE_ERROR_EARLY_RETURN(0); //! Parse one token from JSON text
} /*! \tparam InputStream Type of input stream, implementing Stream concept
} \tparam Handler Type of handler, implementing Handler concept.
return codepoint; \param is Input stream to be parsed.
} \param handler The handler to receive events.
\return Whether the parsing is successful.
template <typename CharType> */
class StackStream { template <unsigned parseFlags, typename InputStream, typename Handler>
public: bool IterativeParseNext(InputStream& is, Handler& handler) {
typedef CharType Ch; while (RAPIDJSON_LIKELY(is.Peek() != '\0')) {
SkipWhitespaceAndComments<parseFlags>(is);
StackStream(internal::Stack<StackAllocator>& stack) : stack_(stack), length_(0) {}
RAPIDJSON_FORCEINLINE void Put(Ch c) { Token t = Tokenize(is.Peek());
*stack_.template Push<Ch>() = c; IterativeParsingState n = Predict(state_, t);
++length_; IterativeParsingState d = Transit<parseFlags>(state_, t, n, is, handler);
}
size_t Length() const { return length_; } // If we've finished or hit an error...
Ch* Pop() { if (RAPIDJSON_UNLIKELY(IsIterativeParsingCompleteState(d))) {
return stack_.template Pop<Ch>(length_); // Report errors.
} if (d == IterativeParsingErrorState) {
HandleError(state_, is);
private: return false;
StackStream(const StackStream&); }
StackStream& operator=(const StackStream&);
// Transition to the finish state.
internal::Stack<StackAllocator>& stack_; RAPIDJSON_ASSERT(d == IterativeParsingFinishState);
SizeType length_; state_ = d;
};
// If StopWhenDone is not set...
// Parse string and generate String event. Different code paths for kParseInsituFlag. if (!(parseFlags & kParseStopWhenDoneFlag)) {
template<unsigned parseFlags, typename InputStream, typename Handler> // ... and extra non-whitespace data is found...
void ParseString(InputStream& is, Handler& handler, bool isKey = false) { SkipWhitespaceAndComments<parseFlags>(is);
internal::StreamLocalCopy<InputStream> copy(is); if (is.Peek() != '\0') {
InputStream& s(copy.s); // ... this is considered an error.
HandleError(state_, is);
bool success = false; return false;
if (parseFlags & kParseInsituFlag) { }
typename InputStream::Ch *head = s.PutBegin(); }
ParseStringToStream<parseFlags, SourceEncoding, SourceEncoding>(s, s);
RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; // Success! We are done!
size_t length = s.PutEnd(head) - 1; return true;
RAPIDJSON_ASSERT(length <= 0xFFFFFFFF); }
const typename TargetEncoding::Ch* const str = (typename TargetEncoding::Ch*)head;
success = (isKey ? handler.Key(str, SizeType(length), false) : handler.String(str, SizeType(length), false)); // Transition to the new state.
} state_ = d;
else {
StackStream<typename TargetEncoding::Ch> stackStream(stack_); // If we parsed anything other than a delimiter, we invoked the handler, so we can return true now.
ParseStringToStream<parseFlags, SourceEncoding, TargetEncoding>(s, stackStream); if (!IsIterativeParsingDelimiterState(n))
RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID; return true;
SizeType length = static_cast<SizeType>(stackStream.Length()) - 1; }
const typename TargetEncoding::Ch* const str = stackStream.Pop();
success = (isKey ? handler.Key(str, length, true) : handler.String(str, length, true)); // We reached the end of file.
} stack_.Clear();
if (!success)
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, s.Tell()); if (state_ != IterativeParsingFinishState) {
} HandleError(state_, is);
return false;
// Parse string to an output is }
// This function handles the prefix/suffix double quotes, escaping, and optional encoding validation.
template<unsigned parseFlags, typename SEncoding, typename TEncoding, typename InputStream, typename OutputStream> return true;
RAPIDJSON_FORCEINLINE void ParseStringToStream(InputStream& is, OutputStream& os) { }
//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
#define Z16 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 //! Check if token-by-token parsing JSON text is complete
static const char escape[256] = { /*! \return Whether the JSON has been fully decoded.
Z16, Z16, 0, 0,'\"', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,'/', */
Z16, Z16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,'\\', 0, 0, 0, RAPIDJSON_FORCEINLINE bool IterativeParseComplete() const {
0, 0,'\b', 0, 0, 0,'\f', 0, 0, 0, 0, 0, 0, 0,'\n', 0, return IsIterativeParsingCompleteState(state_);
0, 0,'\r', 0,'\t', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }
Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16
}; //! Whether a parse error has occurred in the last parsing.
#undef Z16 bool HasParseError() const { return parseResult_.IsError(); }
//!@endcond
//! Get the \ref ParseErrorCode of last parsing.
RAPIDJSON_ASSERT(is.Peek() == '\"'); ParseErrorCode GetParseErrorCode() const { return parseResult_.Code(); }
is.Take(); // Skip '\"'
//! Get the position of last parsing error in input, 0 otherwise.
for (;;) { size_t GetErrorOffset() const { return parseResult_.Offset(); }
Ch c = is.Peek();
if (c == '\\') { // Escape protected:
is.Take(); void SetParseError(ParseErrorCode code, size_t offset) { parseResult_.Set(code, offset); }
Ch e = is.Take();
if ((sizeof(Ch) == 1 || unsigned(e) < 256) && escape[(unsigned char)e]) { private:
os.Put(escape[(unsigned char)e]); // Prohibit copy constructor & assignment operator.
} GenericReader(const GenericReader&);
else if (e == 'u') { // Unicode GenericReader& operator=(const GenericReader&);
unsigned codepoint = ParseHex4(is);
if (codepoint >= 0xD800 && codepoint <= 0xDBFF) { void ClearStack() { stack_.Clear(); }
// Handle UTF-16 surrogate pair
if (is.Take() != '\\' || is.Take() != 'u') // clear stack on any exit from ParseStream, e.g. due to exception
RAPIDJSON_PARSE_ERROR(kParseErrorStringUnicodeSurrogateInvalid, is.Tell() - 2); struct ClearStackOnExit {
unsigned codepoint2 = ParseHex4(is); explicit ClearStackOnExit(GenericReader& r) : r_(r) {}
if (codepoint2 < 0xDC00 || codepoint2 > 0xDFFF) ~ClearStackOnExit() { r_.ClearStack(); }
RAPIDJSON_PARSE_ERROR(kParseErrorStringUnicodeSurrogateInvalid, is.Tell() - 2); private:
codepoint = (((codepoint - 0xD800) << 10) | (codepoint2 - 0xDC00)) + 0x10000; GenericReader& r_;
} ClearStackOnExit(const ClearStackOnExit&);
TEncoding::Encode(os, codepoint); ClearStackOnExit& operator=(const ClearStackOnExit&);
} };
else
RAPIDJSON_PARSE_ERROR(kParseErrorStringEscapeInvalid, is.Tell() - 1); template<unsigned parseFlags, typename InputStream>
} void SkipWhitespaceAndComments(InputStream& is) {
else if (c == '"') { // Closing double quote SkipWhitespace(is);
is.Take();
os.Put('\0'); // null-terminate the string if (parseFlags & kParseCommentsFlag) {
return; while (RAPIDJSON_UNLIKELY(Consume(is, '/'))) {
} if (Consume(is, '*')) {
else if (c == '\0') while (true) {
RAPIDJSON_PARSE_ERROR(kParseErrorStringMissQuotationMark, is.Tell() - 1); if (RAPIDJSON_UNLIKELY(is.Peek() == '\0'))
else if ((unsigned)c < 0x20) // RFC 4627: unescaped = %x20-21 / %x23-5B / %x5D-10FFFF RAPIDJSON_PARSE_ERROR(kParseErrorUnspecificSyntaxError, is.Tell());
RAPIDJSON_PARSE_ERROR(kParseErrorStringEscapeInvalid, is.Tell() - 1); else if (Consume(is, '*')) {
else { if (Consume(is, '/'))
if (parseFlags & kParseValidateEncodingFlag ? break;
!Transcoder<SEncoding, TEncoding>::Validate(is, os) : }
!Transcoder<SEncoding, TEncoding>::Transcode(is, os)) else
RAPIDJSON_PARSE_ERROR(kParseErrorStringInvalidEncoding, is.Tell()); is.Take();
} }
} }
} else if (RAPIDJSON_LIKELY(Consume(is, '/')))
while (is.Peek() != '\0' && is.Take() != '\n') {}
template<typename InputStream, bool backup> else
class NumberStream {}; RAPIDJSON_PARSE_ERROR(kParseErrorUnspecificSyntaxError, is.Tell());
template<typename InputStream> SkipWhitespace(is);
class NumberStream<InputStream, false> { }
public: }
NumberStream(GenericReader& reader, InputStream& is) : is(is) { (void)reader; } }
~NumberStream() {}
// Parse object: { string : value, ... }
RAPIDJSON_FORCEINLINE Ch Peek() const { return is.Peek(); } template<unsigned parseFlags, typename InputStream, typename Handler>
RAPIDJSON_FORCEINLINE Ch TakePush() { return is.Take(); } void ParseObject(InputStream& is, Handler& handler) {
RAPIDJSON_FORCEINLINE Ch Take() { return is.Take(); } RAPIDJSON_ASSERT(is.Peek() == '{');
size_t Tell() { return is.Tell(); } is.Take(); // Skip '{'
size_t Length() { return 0; }
const char* Pop() { return 0; } if (RAPIDJSON_UNLIKELY(!handler.StartObject()))
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
protected:
NumberStream& operator=(const NumberStream&); SkipWhitespaceAndComments<parseFlags>(is);
RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
InputStream& is;
}; if (Consume(is, '}')) {
if (RAPIDJSON_UNLIKELY(!handler.EndObject(0))) // empty object
template<typename InputStream> RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
class NumberStream<InputStream, true> : public NumberStream<InputStream, false> { return;
typedef NumberStream<InputStream, false> Base; }
public:
NumberStream(GenericReader& reader, InputStream& is) : NumberStream<InputStream, false>(reader, is), stackStream(reader.stack_) {} for (SizeType memberCount = 0;;) {
~NumberStream() {} if (RAPIDJSON_UNLIKELY(is.Peek() != '"'))
RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissName, is.Tell());
RAPIDJSON_FORCEINLINE Ch TakePush() {
stackStream.Put((char)Base::is.Peek()); ParseString<parseFlags>(is, handler, true);
return Base::is.Take(); RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
}
SkipWhitespaceAndComments<parseFlags>(is);
size_t Length() { return stackStream.Length(); } RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
const char* Pop() { if (RAPIDJSON_UNLIKELY(!Consume(is, ':')))
stackStream.Put('\0'); RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissColon, is.Tell());
return stackStream.Pop();
} SkipWhitespaceAndComments<parseFlags>(is);
RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
private:
StackStream<char> stackStream; ParseValue<parseFlags>(is, handler);
}; RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
template<unsigned parseFlags, typename InputStream, typename Handler> SkipWhitespaceAndComments<parseFlags>(is);
void ParseNumber(InputStream& is, Handler& handler) { RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
internal::StreamLocalCopy<InputStream> copy(is);
NumberStream<InputStream, (parseFlags & kParseFullPrecisionFlag) != 0> s(*this, copy.s); ++memberCount;
// Parse minus switch (is.Peek()) {
bool minus = false; case ',':
if (s.Peek() == '-') { is.Take();
minus = true; SkipWhitespaceAndComments<parseFlags>(is);
s.Take(); RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
} break;
case '}':
// Parse int: zero / ( digit1-9 *DIGIT ) is.Take();
unsigned i = 0; if (RAPIDJSON_UNLIKELY(!handler.EndObject(memberCount)))
uint64_t i64 = 0; RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
bool use64bit = false; return;
int significandDigit = 0; default:
if (s.Peek() == '0') { RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissCommaOrCurlyBracket, is.Tell()); break; // This useless break is only for making warning and coverage happy
i = 0; }
s.TakePush();
} if (parseFlags & kParseTrailingCommasFlag) {
else if (s.Peek() >= '1' && s.Peek() <= '9') { if (is.Peek() == '}') {
i = static_cast<unsigned>(s.TakePush() - '0'); if (RAPIDJSON_UNLIKELY(!handler.EndObject(memberCount)))
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
if (minus) is.Take();
while (s.Peek() >= '0' && s.Peek() <= '9') { return;
if (i >= 214748364) { // 2^31 = 2147483648 }
if (i != 214748364 || s.Peek() > '8') { }
i64 = i; }
use64bit = true; }
break;
} // Parse array: [ value, ... ]
} template<unsigned parseFlags, typename InputStream, typename Handler>
i = i * 10 + static_cast<unsigned>(s.TakePush() - '0'); void ParseArray(InputStream& is, Handler& handler) {
significandDigit++; RAPIDJSON_ASSERT(is.Peek() == '[');
} is.Take(); // Skip '['
else
while (s.Peek() >= '0' && s.Peek() <= '9') { if (RAPIDJSON_UNLIKELY(!handler.StartArray()))
if (i >= 429496729) { // 2^32 - 1 = 4294967295 RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
if (i != 429496729 || s.Peek() > '5') {
i64 = i; SkipWhitespaceAndComments<parseFlags>(is);
use64bit = true; RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
break;
} if (Consume(is, ']')) {
} if (RAPIDJSON_UNLIKELY(!handler.EndArray(0))) // empty array
i = i * 10 + static_cast<unsigned>(s.TakePush() - '0'); RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
significandDigit++; return;
} }
}
else for (SizeType elementCount = 0;;) {
RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, s.Tell()); ParseValue<parseFlags>(is, handler);
RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
// Parse 64bit int
bool useDouble = false; ++elementCount;
double d = 0.0; SkipWhitespaceAndComments<parseFlags>(is);
if (use64bit) { RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
if (minus)
while (s.Peek() >= '0' && s.Peek() <= '9') { if (Consume(is, ',')) {
if (i64 >= RAPIDJSON_UINT64_C2(0x0CCCCCCC, 0xCCCCCCCC)) // 2^63 = 9223372036854775808 SkipWhitespaceAndComments<parseFlags>(is);
if (i64 != RAPIDJSON_UINT64_C2(0x0CCCCCCC, 0xCCCCCCCC) || s.Peek() > '8') { RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
d = i64; }
useDouble = true; else if (Consume(is, ']')) {
break; if (RAPIDJSON_UNLIKELY(!handler.EndArray(elementCount)))
} RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
i64 = i64 * 10 + static_cast<unsigned>(s.TakePush() - '0'); return;
significandDigit++; }
} else
else RAPIDJSON_PARSE_ERROR(kParseErrorArrayMissCommaOrSquareBracket, is.Tell());
while (s.Peek() >= '0' && s.Peek() <= '9') {
if (i64 >= RAPIDJSON_UINT64_C2(0x19999999, 0x99999999)) // 2^64 - 1 = 18446744073709551615 if (parseFlags & kParseTrailingCommasFlag) {
if (i64 != RAPIDJSON_UINT64_C2(0x19999999, 0x99999999) || s.Peek() > '5') { if (is.Peek() == ']') {
d = i64; if (RAPIDJSON_UNLIKELY(!handler.EndArray(elementCount)))
useDouble = true; RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
break; is.Take();
} return;
i64 = i64 * 10 + static_cast<unsigned>(s.TakePush() - '0'); }
significandDigit++; }
} }
} }
// Force double for big integer template<unsigned parseFlags, typename InputStream, typename Handler>
if (useDouble) { void ParseNull(InputStream& is, Handler& handler) {
while (s.Peek() >= '0' && s.Peek() <= '9') { RAPIDJSON_ASSERT(is.Peek() == 'n');
if (d >= 1.7976931348623157e307) // DBL_MAX / 10.0 is.Take();
RAPIDJSON_PARSE_ERROR(kParseErrorNumberTooBig, s.Tell());
d = d * 10 + (s.TakePush() - '0'); if (RAPIDJSON_LIKELY(Consume(is, 'u') && Consume(is, 'l') && Consume(is, 'l'))) {
} if (RAPIDJSON_UNLIKELY(!handler.Null()))
} RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
}
// Parse frac = decimal-point 1*DIGIT else
int expFrac = 0; RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell());
size_t decimalPosition; }
if (s.Peek() == '.') {
s.Take(); template<unsigned parseFlags, typename InputStream, typename Handler>
decimalPosition = s.Length(); void ParseTrue(InputStream& is, Handler& handler) {
RAPIDJSON_ASSERT(is.Peek() == 't');
if (!(s.Peek() >= '0' && s.Peek() <= '9')) is.Take();
RAPIDJSON_PARSE_ERROR(kParseErrorNumberMissFraction, s.Tell());
if (RAPIDJSON_LIKELY(Consume(is, 'r') && Consume(is, 'u') && Consume(is, 'e'))) {
if (!useDouble) { if (RAPIDJSON_UNLIKELY(!handler.Bool(true)))
#if RAPIDJSON_64BIT RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
// Use i64 to store significand in 64-bit architecture }
if (!use64bit) else
i64 = i; RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell());
}
while (s.Peek() >= '0' && s.Peek() <= '9') {
if (i64 > RAPIDJSON_UINT64_C2(0x1FFFFF, 0xFFFFFFFF)) // 2^53 - 1 for fast path template<unsigned parseFlags, typename InputStream, typename Handler>
break; void ParseFalse(InputStream& is, Handler& handler) {
else { RAPIDJSON_ASSERT(is.Peek() == 'f');
i64 = i64 * 10 + static_cast<unsigned>(s.TakePush() - '0'); is.Take();
--expFrac;
if (i64 != 0) if (RAPIDJSON_LIKELY(Consume(is, 'a') && Consume(is, 'l') && Consume(is, 's') && Consume(is, 'e'))) {
significandDigit++; if (RAPIDJSON_UNLIKELY(!handler.Bool(false)))
} RAPIDJSON_PARSE_ERROR(kParseErrorTermination, is.Tell());
} }
else
d = (double)i64; RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell());
#else }
// Use double to store significand in 32-bit architecture
d = use64bit ? (double)i64 : (double)i; template<typename InputStream>
#endif RAPIDJSON_FORCEINLINE static bool Consume(InputStream& is, typename InputStream::Ch expect) {
useDouble = true; if (RAPIDJSON_LIKELY(is.Peek() == expect)) {
} is.Take();
return true;
while (s.Peek() >= '0' && s.Peek() <= '9') { }
if (significandDigit < 17) { else
d = d * 10.0 + (s.TakePush() - '0'); return false;
--expFrac; }
if (d != 0.0)
significandDigit++; // Helper function to parse four hexadecimal digits in \uXXXX in ParseString().
} template<typename InputStream>
else unsigned ParseHex4(InputStream& is, size_t escapeOffset) {
s.TakePush(); unsigned codepoint = 0;
} for (int i = 0; i < 4; i++) {
} Ch c = is.Peek();
else codepoint <<= 4;
decimalPosition = s.Length(); // decimal position at the end of integer. codepoint += static_cast<unsigned>(c);
if (c >= '0' && c <= '9')
// Parse exp = e [ minus / plus ] 1*DIGIT codepoint -= '0';
int exp = 0; else if (c >= 'A' && c <= 'F')
if (s.Peek() == 'e' || s.Peek() == 'E') { codepoint -= 'A' - 10;
if (!useDouble) { else if (c >= 'a' && c <= 'f')
d = use64bit ? i64 : i; codepoint -= 'a' - 10;
useDouble = true; else {
} RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorStringUnicodeEscapeInvalidHex, escapeOffset);
s.Take(); RAPIDJSON_PARSE_ERROR_EARLY_RETURN(0);
}
bool expMinus = false; is.Take();
if (s.Peek() == '+') }
s.Take(); return codepoint;
else if (s.Peek() == '-') { }
s.Take();
expMinus = true; template <typename CharType>
} class StackStream {
public:
if (s.Peek() >= '0' && s.Peek() <= '9') { typedef CharType Ch;
exp = s.Take() - '0';
while (s.Peek() >= '0' && s.Peek() <= '9') { StackStream(internal::Stack<StackAllocator>& stack) : stack_(stack), length_(0) {}
exp = exp * 10 + (s.Take() - '0'); RAPIDJSON_FORCEINLINE void Put(Ch c) {
if (exp > 308 && !expMinus) // exp > 308 should be rare, so it should be checked first. *stack_.template Push<Ch>() = c;
RAPIDJSON_PARSE_ERROR(kParseErrorNumberTooBig, s.Tell()); ++length_;
} }
}
else RAPIDJSON_FORCEINLINE void* Push(SizeType count) {
RAPIDJSON_PARSE_ERROR(kParseErrorNumberMissExponent, s.Tell()); length_ += count;
return stack_.template Push<Ch>(count);
if (expMinus) }
exp = -exp;
} size_t Length() const { return length_; }
// Finish parsing, call event according to the type of number. Ch* Pop() {
bool cont = true; return stack_.template Pop<Ch>(length_);
size_t length = s.Length(); }
const char* decimal = s.Pop(); // Pop stack no matter if it will be used or not.
private:
if (useDouble) { StackStream(const StackStream&);
int p = exp + expFrac; StackStream& operator=(const StackStream&);
if (parseFlags & kParseFullPrecisionFlag)
d = internal::StrtodFullPrecision(d, p, decimal, length, decimalPosition, exp); internal::Stack<StackAllocator>& stack_;
else SizeType length_;
d = internal::StrtodNormalPrecision(d, p); };
cont = handler.Double(minus ? -d : d); // Parse string and generate String event. Different code paths for kParseInsituFlag.
} template<unsigned parseFlags, typename InputStream, typename Handler>
else { void ParseString(InputStream& is, Handler& handler, bool isKey = false) {
if (use64bit) { internal::StreamLocalCopy<InputStream> copy(is);
if (minus) InputStream& s(copy.s);
cont = handler.Int64(-(int64_t)i64);
else RAPIDJSON_ASSERT(s.Peek() == '\"');
cont = handler.Uint64(i64); s.Take(); // Skip '\"'
}
else { bool success = false;
if (minus) if (parseFlags & kParseInsituFlag) {
cont = handler.Int(-(int)i); typename InputStream::Ch *head = s.PutBegin();
else ParseStringToStream<parseFlags, SourceEncoding, SourceEncoding>(s, s);
cont = handler.Uint(i); RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
} size_t length = s.PutEnd(head) - 1;
} RAPIDJSON_ASSERT(length <= 0xFFFFFFFF);
if (!cont) const typename TargetEncoding::Ch* const str = reinterpret_cast<typename TargetEncoding::Ch*>(head);
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, s.Tell()); success = (isKey ? handler.Key(str, SizeType(length), false) : handler.String(str, SizeType(length), false));
} }
else {
// Parse any JSON value StackStream<typename TargetEncoding::Ch> stackStream(stack_);
template<unsigned parseFlags, typename InputStream, typename Handler> ParseStringToStream<parseFlags, SourceEncoding, TargetEncoding>(s, stackStream);
void ParseValue(InputStream& is, Handler& handler) { RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
switch (is.Peek()) { SizeType length = static_cast<SizeType>(stackStream.Length()) - 1;
case 'n': ParseNull <parseFlags>(is, handler); break; const typename TargetEncoding::Ch* const str = stackStream.Pop();
case 't': ParseTrue <parseFlags>(is, handler); break; success = (isKey ? handler.Key(str, length, true) : handler.String(str, length, true));
case 'f': ParseFalse <parseFlags>(is, handler); break; }
case '"': ParseString<parseFlags>(is, handler); break; if (RAPIDJSON_UNLIKELY(!success))
case '{': ParseObject<parseFlags>(is, handler); break; RAPIDJSON_PARSE_ERROR(kParseErrorTermination, s.Tell());
case '[': ParseArray <parseFlags>(is, handler); break; }
default : ParseNumber<parseFlags>(is, handler);
} // Parse string to an output is
} // This function handles the prefix/suffix double quotes, escaping, and optional encoding validation.
template<unsigned parseFlags, typename SEncoding, typename TEncoding, typename InputStream, typename OutputStream>
// Iterative Parsing RAPIDJSON_FORCEINLINE void ParseStringToStream(InputStream& is, OutputStream& os) {
//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
// States #define Z16 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
enum IterativeParsingState { static const char escape[256] = {
IterativeParsingStartState = 0, Z16, Z16, 0, 0,'\"', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '/',
IterativeParsingFinishState, Z16, Z16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,'\\', 0, 0, 0,
IterativeParsingErrorState, 0, 0,'\b', 0, 0, 0,'\f', 0, 0, 0, 0, 0, 0, 0,'\n', 0,
0, 0,'\r', 0,'\t', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// Object states Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16
IterativeParsingObjectInitialState, };
IterativeParsingMemberKeyState, #undef Z16
IterativeParsingKeyValueDelimiterState, //!@endcond
IterativeParsingMemberValueState,
IterativeParsingMemberDelimiterState, for (;;) {
IterativeParsingObjectFinishState, // Scan and copy string before "\\\"" or < 0x20. This is an optional optimzation.
if (!(parseFlags & kParseValidateEncodingFlag))
// Array states ScanCopyUnescapedString(is, os);
IterativeParsingArrayInitialState,
IterativeParsingElementState, Ch c = is.Peek();
IterativeParsingElementDelimiterState, if (RAPIDJSON_UNLIKELY(c == '\\')) { // Escape
IterativeParsingArrayFinishState, size_t escapeOffset = is.Tell(); // For invalid escaping, report the initial '\\' as error offset
is.Take();
// Single value state Ch e = is.Peek();
IterativeParsingValueState, if ((sizeof(Ch) == 1 || unsigned(e) < 256) && RAPIDJSON_LIKELY(escape[static_cast<unsigned char>(e)])) {
is.Take();
cIterativeParsingStateCount os.Put(static_cast<typename TEncoding::Ch>(escape[static_cast<unsigned char>(e)]));
}; }
else if ((parseFlags & kParseEscapedApostropheFlag) && RAPIDJSON_LIKELY(e == '\'')) { // Allow escaped apostrophe
// Tokens is.Take();
enum Token { os.Put('\'');
LeftBracketToken = 0, }
RightBracketToken, else if (RAPIDJSON_LIKELY(e == 'u')) { // Unicode
is.Take();
LeftCurlyBracketToken, unsigned codepoint = ParseHex4(is, escapeOffset);
RightCurlyBracketToken, RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
if (RAPIDJSON_UNLIKELY(codepoint >= 0xD800 && codepoint <= 0xDFFF)) {
CommaToken, // high surrogate, check if followed by valid low surrogate
ColonToken, if (RAPIDJSON_LIKELY(codepoint <= 0xDBFF)) {
// Handle UTF-16 surrogate pair
StringToken, if (RAPIDJSON_UNLIKELY(!Consume(is, '\\') || !Consume(is, 'u')))
FalseToken, RAPIDJSON_PARSE_ERROR(kParseErrorStringUnicodeSurrogateInvalid, escapeOffset);
TrueToken, unsigned codepoint2 = ParseHex4(is, escapeOffset);
NullToken, RAPIDJSON_PARSE_ERROR_EARLY_RETURN_VOID;
NumberToken, if (RAPIDJSON_UNLIKELY(codepoint2 < 0xDC00 || codepoint2 > 0xDFFF))
RAPIDJSON_PARSE_ERROR(kParseErrorStringUnicodeSurrogateInvalid, escapeOffset);
kTokenCount codepoint = (((codepoint - 0xD800) << 10) | (codepoint2 - 0xDC00)) + 0x10000;
}; }
// single low surrogate
RAPIDJSON_FORCEINLINE Token Tokenize(Ch c) { else
{
//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN RAPIDJSON_PARSE_ERROR(kParseErrorStringUnicodeSurrogateInvalid, escapeOffset);
#define N NumberToken }
#define N16 N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N }
// Maps from ASCII to Token TEncoding::Encode(os, codepoint);
static const unsigned char tokenMap[256] = { }
N16, // 00~0F else
N16, // 10~1F RAPIDJSON_PARSE_ERROR(kParseErrorStringEscapeInvalid, escapeOffset);
N, N, StringToken, N, N, N, N, N, N, N, N, N, CommaToken, N, N, N, // 20~2F }
N, N, N, N, N, N, N, N, N, N, ColonToken, N, N, N, N, N, // 30~3F else if (RAPIDJSON_UNLIKELY(c == '"')) { // Closing double quote
N16, // 40~4F is.Take();
N, N, N, N, N, N, N, N, N, N, N, LeftBracketToken, N, RightBracketToken, N, N, // 50~5F os.Put('\0'); // null-terminate the string
N, N, N, N, N, N, FalseToken, N, N, N, N, N, N, N, NullToken, N, // 60~6F return;
N, N, N, N, TrueToken, N, N, N, N, N, N, LeftCurlyBracketToken, N, RightCurlyBracketToken, N, N, // 70~7F }
N16, N16, N16, N16, N16, N16, N16, N16 // 80~FF else if (RAPIDJSON_UNLIKELY(static_cast<unsigned>(c) < 0x20)) { // RFC 4627: unescaped = %x20-21 / %x23-5B / %x5D-10FFFF
}; if (c == '\0')
#undef N RAPIDJSON_PARSE_ERROR(kParseErrorStringMissQuotationMark, is.Tell());
#undef N16 else
//!@endcond RAPIDJSON_PARSE_ERROR(kParseErrorStringInvalidEncoding, is.Tell());
}
if (sizeof(Ch) == 1 || unsigned(c) < 256) else {
return (Token)tokenMap[(unsigned char)c]; size_t offset = is.Tell();
else if (RAPIDJSON_UNLIKELY((parseFlags & kParseValidateEncodingFlag ?
return NumberToken; !Transcoder<SEncoding, TEncoding>::Validate(is, os) :
} !Transcoder<SEncoding, TEncoding>::Transcode(is, os))))
RAPIDJSON_PARSE_ERROR(kParseErrorStringInvalidEncoding, offset);
RAPIDJSON_FORCEINLINE IterativeParsingState Predict(IterativeParsingState state, Token token) { }
// current state x one lookahead token -> new state }
static const char G[cIterativeParsingStateCount][kTokenCount] = { }
// Start
{ template<typename InputStream, typename OutputStream>
IterativeParsingArrayInitialState, // Left bracket static RAPIDJSON_FORCEINLINE void ScanCopyUnescapedString(InputStream&, OutputStream&) {
IterativeParsingErrorState, // Right bracket // Do nothing for generic version
IterativeParsingObjectInitialState, // Left curly bracket }
IterativeParsingErrorState, // Right curly bracket
IterativeParsingErrorState, // Comma #if defined(RAPIDJSON_SSE2) || defined(RAPIDJSON_SSE42)
IterativeParsingErrorState, // Colon // StringStream -> StackStream<char>
IterativeParsingValueState, // String static RAPIDJSON_FORCEINLINE void ScanCopyUnescapedString(StringStream& is, StackStream<char>& os) {
IterativeParsingValueState, // False const char* p = is.src_;
IterativeParsingValueState, // True
IterativeParsingValueState, // Null // Scan one by one until alignment (unaligned load may cross page boundary and cause crash)
IterativeParsingValueState // Number const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
}, while (p != nextAligned)
// Finish(sink state) if (RAPIDJSON_UNLIKELY(*p == '\"') || RAPIDJSON_UNLIKELY(*p == '\\') || RAPIDJSON_UNLIKELY(static_cast<unsigned>(*p) < 0x20)) {
{ is.src_ = p;
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, return;
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, }
IterativeParsingErrorState else
}, os.Put(*p++);
// Error(sink state)
{ // The rest of string using SIMD
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, static const char dquote[16] = { '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"' };
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, static const char bslash[16] = { '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\' };
IterativeParsingErrorState static const char space[16] = { 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F };
}, const __m128i dq = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&dquote[0]));
// ObjectInitial const __m128i bs = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&bslash[0]));
{ const __m128i sp = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&space[0]));
IterativeParsingErrorState, // Left bracket
IterativeParsingErrorState, // Right bracket for (;; p += 16) {
IterativeParsingErrorState, // Left curly bracket const __m128i s = _mm_load_si128(reinterpret_cast<const __m128i *>(p));
IterativeParsingObjectFinishState, // Right curly bracket const __m128i t1 = _mm_cmpeq_epi8(s, dq);
IterativeParsingErrorState, // Comma const __m128i t2 = _mm_cmpeq_epi8(s, bs);
IterativeParsingErrorState, // Colon const __m128i t3 = _mm_cmpeq_epi8(_mm_max_epu8(s, sp), sp); // s < 0x20 <=> max(s, 0x1F) == 0x1F
IterativeParsingMemberKeyState, // String const __m128i x = _mm_or_si128(_mm_or_si128(t1, t2), t3);
IterativeParsingErrorState, // False unsigned short r = static_cast<unsigned short>(_mm_movemask_epi8(x));
IterativeParsingErrorState, // True if (RAPIDJSON_UNLIKELY(r != 0)) { // some of characters is escaped
IterativeParsingErrorState, // Null SizeType length;
IterativeParsingErrorState // Number #ifdef _MSC_VER // Find the index of first escaped
}, unsigned long offset;
// MemberKey _BitScanForward(&offset, r);
{ length = offset;
IterativeParsingErrorState, // Left bracket #else
IterativeParsingErrorState, // Right bracket length = static_cast<SizeType>(__builtin_ffs(r) - 1);
IterativeParsingErrorState, // Left curly bracket #endif
IterativeParsingErrorState, // Right curly bracket if (length != 0) {
IterativeParsingErrorState, // Comma char* q = reinterpret_cast<char*>(os.Push(length));
IterativeParsingKeyValueDelimiterState, // Colon for (size_t i = 0; i < length; i++)
IterativeParsingErrorState, // String q[i] = p[i];
IterativeParsingErrorState, // False
IterativeParsingErrorState, // True p += length;
IterativeParsingErrorState, // Null }
IterativeParsingErrorState // Number break;
}, }
// KeyValueDelimiter _mm_storeu_si128(reinterpret_cast<__m128i *>(os.Push(16)), s);
{ }
IterativeParsingArrayInitialState, // Left bracket(push MemberValue state)
IterativeParsingErrorState, // Right bracket is.src_ = p;
IterativeParsingObjectInitialState, // Left curly bracket(push MemberValue state) }
IterativeParsingErrorState, // Right curly bracket
IterativeParsingErrorState, // Comma // InsituStringStream -> InsituStringStream
IterativeParsingErrorState, // Colon static RAPIDJSON_FORCEINLINE void ScanCopyUnescapedString(InsituStringStream& is, InsituStringStream& os) {
IterativeParsingMemberValueState, // String RAPIDJSON_ASSERT(&is == &os);
IterativeParsingMemberValueState, // False (void)os;
IterativeParsingMemberValueState, // True
IterativeParsingMemberValueState, // Null if (is.src_ == is.dst_) {
IterativeParsingMemberValueState // Number SkipUnescapedString(is);
}, return;
// MemberValue }
{
IterativeParsingErrorState, // Left bracket char* p = is.src_;
IterativeParsingErrorState, // Right bracket char *q = is.dst_;
IterativeParsingErrorState, // Left curly bracket
IterativeParsingObjectFinishState, // Right curly bracket // Scan one by one until alignment (unaligned load may cross page boundary and cause crash)
IterativeParsingMemberDelimiterState, // Comma const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
IterativeParsingErrorState, // Colon while (p != nextAligned)
IterativeParsingErrorState, // String if (RAPIDJSON_UNLIKELY(*p == '\"') || RAPIDJSON_UNLIKELY(*p == '\\') || RAPIDJSON_UNLIKELY(static_cast<unsigned>(*p) < 0x20)) {
IterativeParsingErrorState, // False is.src_ = p;
IterativeParsingErrorState, // True is.dst_ = q;
IterativeParsingErrorState, // Null return;
IterativeParsingErrorState // Number }
}, else
// MemberDelimiter *q++ = *p++;
{
IterativeParsingErrorState, // Left bracket // The rest of string using SIMD
IterativeParsingErrorState, // Right bracket static const char dquote[16] = { '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"' };
IterativeParsingErrorState, // Left curly bracket static const char bslash[16] = { '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\' };
IterativeParsingErrorState, // Right curly bracket static const char space[16] = { 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F };
IterativeParsingErrorState, // Comma const __m128i dq = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&dquote[0]));
IterativeParsingErrorState, // Colon const __m128i bs = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&bslash[0]));
IterativeParsingMemberKeyState, // String const __m128i sp = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&space[0]));
IterativeParsingErrorState, // False
IterativeParsingErrorState, // True for (;; p += 16, q += 16) {
IterativeParsingErrorState, // Null const __m128i s = _mm_load_si128(reinterpret_cast<const __m128i *>(p));
IterativeParsingErrorState // Number const __m128i t1 = _mm_cmpeq_epi8(s, dq);
}, const __m128i t2 = _mm_cmpeq_epi8(s, bs);
// ObjectFinish(sink state) const __m128i t3 = _mm_cmpeq_epi8(_mm_max_epu8(s, sp), sp); // s < 0x20 <=> max(s, 0x1F) == 0x1F
{ const __m128i x = _mm_or_si128(_mm_or_si128(t1, t2), t3);
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, unsigned short r = static_cast<unsigned short>(_mm_movemask_epi8(x));
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, if (RAPIDJSON_UNLIKELY(r != 0)) { // some of characters is escaped
IterativeParsingErrorState size_t length;
}, #ifdef _MSC_VER // Find the index of first escaped
// ArrayInitial unsigned long offset;
{ _BitScanForward(&offset, r);
IterativeParsingArrayInitialState, // Left bracket(push Element state) length = offset;
IterativeParsingArrayFinishState, // Right bracket #else
IterativeParsingObjectInitialState, // Left curly bracket(push Element state) length = static_cast<size_t>(__builtin_ffs(r) - 1);
IterativeParsingErrorState, // Right curly bracket #endif
IterativeParsingErrorState, // Comma for (const char* pend = p + length; p != pend; )
IterativeParsingErrorState, // Colon *q++ = *p++;
IterativeParsingElementState, // String break;
IterativeParsingElementState, // False }
IterativeParsingElementState, // True _mm_storeu_si128(reinterpret_cast<__m128i *>(q), s);
IterativeParsingElementState, // Null }
IterativeParsingElementState // Number
}, is.src_ = p;
// Element is.dst_ = q;
{ }
IterativeParsingErrorState, // Left bracket
IterativeParsingArrayFinishState, // Right bracket // When read/write pointers are the same for insitu stream, just skip unescaped characters
IterativeParsingErrorState, // Left curly bracket static RAPIDJSON_FORCEINLINE void SkipUnescapedString(InsituStringStream& is) {
IterativeParsingErrorState, // Right curly bracket RAPIDJSON_ASSERT(is.src_ == is.dst_);
IterativeParsingElementDelimiterState, // Comma char* p = is.src_;
IterativeParsingErrorState, // Colon
IterativeParsingErrorState, // String // Scan one by one until alignment (unaligned load may cross page boundary and cause crash)
IterativeParsingErrorState, // False const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
IterativeParsingErrorState, // True for (; p != nextAligned; p++)
IterativeParsingErrorState, // Null if (RAPIDJSON_UNLIKELY(*p == '\"') || RAPIDJSON_UNLIKELY(*p == '\\') || RAPIDJSON_UNLIKELY(static_cast<unsigned>(*p) < 0x20)) {
IterativeParsingErrorState // Number is.src_ = is.dst_ = p;
}, return;
// ElementDelimiter }
{
IterativeParsingArrayInitialState, // Left bracket(push Element state) // The rest of string using SIMD
IterativeParsingErrorState, // Right bracket static const char dquote[16] = { '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"' };
IterativeParsingObjectInitialState, // Left curly bracket(push Element state) static const char bslash[16] = { '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\' };
IterativeParsingErrorState, // Right curly bracket static const char space[16] = { 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F };
IterativeParsingErrorState, // Comma const __m128i dq = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&dquote[0]));
IterativeParsingErrorState, // Colon const __m128i bs = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&bslash[0]));
IterativeParsingElementState, // String const __m128i sp = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&space[0]));
IterativeParsingElementState, // False
IterativeParsingElementState, // True for (;; p += 16) {
IterativeParsingElementState, // Null const __m128i s = _mm_load_si128(reinterpret_cast<const __m128i *>(p));
IterativeParsingElementState // Number const __m128i t1 = _mm_cmpeq_epi8(s, dq);
}, const __m128i t2 = _mm_cmpeq_epi8(s, bs);
// ArrayFinish(sink state) const __m128i t3 = _mm_cmpeq_epi8(_mm_max_epu8(s, sp), sp); // s < 0x20 <=> max(s, 0x1F) == 0x1F
{ const __m128i x = _mm_or_si128(_mm_or_si128(t1, t2), t3);
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, unsigned short r = static_cast<unsigned short>(_mm_movemask_epi8(x));
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, if (RAPIDJSON_UNLIKELY(r != 0)) { // some of characters is escaped
IterativeParsingErrorState size_t length;
}, #ifdef _MSC_VER // Find the index of first escaped
// Single Value (sink state) unsigned long offset;
{ _BitScanForward(&offset, r);
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, length = offset;
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, #else
IterativeParsingErrorState length = static_cast<size_t>(__builtin_ffs(r) - 1);
} #endif
}; // End of G p += length;
break;
return (IterativeParsingState)G[state][token]; }
} }
// Make an advance in the token stream and state based on the candidate destination state which was returned by Transit(). is.src_ = is.dst_ = p;
// May return a new state on state pop. }
template <unsigned parseFlags, typename InputStream, typename Handler> #elif defined(RAPIDJSON_NEON)
RAPIDJSON_FORCEINLINE IterativeParsingState Transit(IterativeParsingState src, Token token, IterativeParsingState dst, InputStream& is, Handler& handler) { // StringStream -> StackStream<char>
switch (dst) { static RAPIDJSON_FORCEINLINE void ScanCopyUnescapedString(StringStream& is, StackStream<char>& os) {
case IterativeParsingStartState: const char* p = is.src_;
RAPIDJSON_ASSERT(false);
return IterativeParsingErrorState; // Scan one by one until alignment (unaligned load may cross page boundary and cause crash)
const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
case IterativeParsingFinishState: while (p != nextAligned)
return dst; if (RAPIDJSON_UNLIKELY(*p == '\"') || RAPIDJSON_UNLIKELY(*p == '\\') || RAPIDJSON_UNLIKELY(static_cast<unsigned>(*p) < 0x20)) {
is.src_ = p;
case IterativeParsingErrorState: return;
return dst; }
else
case IterativeParsingObjectInitialState: os.Put(*p++);
case IterativeParsingArrayInitialState:
{ // The rest of string using SIMD
// Push the state(Element or MemeberValue) if we are nested in another array or value of member. const uint8x16_t s0 = vmovq_n_u8('"');
// In this way we can get the correct state on ObjectFinish or ArrayFinish by frame pop. const uint8x16_t s1 = vmovq_n_u8('\\');
IterativeParsingState n = src; const uint8x16_t s2 = vmovq_n_u8('\b');
if (src == IterativeParsingArrayInitialState || src == IterativeParsingElementDelimiterState) const uint8x16_t s3 = vmovq_n_u8(32);
n = IterativeParsingElementState;
else if (src == IterativeParsingKeyValueDelimiterState) for (;; p += 16) {
n = IterativeParsingMemberValueState; const uint8x16_t s = vld1q_u8(reinterpret_cast<const uint8_t *>(p));
// Push current state. uint8x16_t x = vceqq_u8(s, s0);
*stack_.template Push<SizeType>(1) = n; x = vorrq_u8(x, vceqq_u8(s, s1));
// Initialize and push the member/element count. x = vorrq_u8(x, vceqq_u8(s, s2));
*stack_.template Push<SizeType>(1) = 0; x = vorrq_u8(x, vcltq_u8(s, s3));
// Call handler
bool hr = (dst == IterativeParsingObjectInitialState) ? handler.StartObject() : handler.StartArray(); x = vrev64q_u8(x); // Rev in 64
// On handler short circuits the parsing. uint64_t low = vgetq_lane_u64(vreinterpretq_u64_u8(x), 0); // extract
if (!hr) { uint64_t high = vgetq_lane_u64(vreinterpretq_u64_u8(x), 1); // extract
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorTermination, is.Tell());
return IterativeParsingErrorState; SizeType length = 0;
} bool escaped = false;
else { if (low == 0) {
is.Take(); if (high != 0) {
return dst; uint32_t lz = internal::clzll(high);
} length = 8 + (lz >> 3);
} escaped = true;
}
case IterativeParsingMemberKeyState: } else {
ParseString<parseFlags>(is, handler, true); uint32_t lz = internal::clzll(low);
if (HasParseError()) length = lz >> 3;
return IterativeParsingErrorState; escaped = true;
else }
return dst; if (RAPIDJSON_UNLIKELY(escaped)) { // some of characters is escaped
if (length != 0) {
case IterativeParsingKeyValueDelimiterState: char* q = reinterpret_cast<char*>(os.Push(length));
if (token == ColonToken) { for (size_t i = 0; i < length; i++)
is.Take(); q[i] = p[i];
return dst;
} p += length;
else }
return IterativeParsingErrorState; break;
}
case IterativeParsingMemberValueState: vst1q_u8(reinterpret_cast<uint8_t *>(os.Push(16)), s);
// Must be non-compound value. Or it would be ObjectInitial or ArrayInitial state. }
ParseValue<parseFlags>(is, handler);
if (HasParseError()) { is.src_ = p;
return IterativeParsingErrorState; }
}
return dst; // InsituStringStream -> InsituStringStream
static RAPIDJSON_FORCEINLINE void ScanCopyUnescapedString(InsituStringStream& is, InsituStringStream& os) {
case IterativeParsingElementState: RAPIDJSON_ASSERT(&is == &os);
// Must be non-compound value. Or it would be ObjectInitial or ArrayInitial state. (void)os;
ParseValue<parseFlags>(is, handler);
if (HasParseError()) { if (is.src_ == is.dst_) {
return IterativeParsingErrorState; SkipUnescapedString(is);
} return;
return dst; }
case IterativeParsingMemberDelimiterState: char* p = is.src_;
case IterativeParsingElementDelimiterState: char *q = is.dst_;
is.Take();
// Update member/element count. // Scan one by one until alignment (unaligned load may cross page boundary and cause crash)
*stack_.template Top<SizeType>() = *stack_.template Top<SizeType>() + 1; const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
return dst; while (p != nextAligned)
if (RAPIDJSON_UNLIKELY(*p == '\"') || RAPIDJSON_UNLIKELY(*p == '\\') || RAPIDJSON_UNLIKELY(static_cast<unsigned>(*p) < 0x20)) {
case IterativeParsingObjectFinishState: is.src_ = p;
{ is.dst_ = q;
// Get member count. return;
SizeType c = *stack_.template Pop<SizeType>(1); }
// If the object is not empty, count the last member. else
if (src == IterativeParsingMemberValueState) *q++ = *p++;
++c;
// Restore the state. // The rest of string using SIMD
IterativeParsingState n = static_cast<IterativeParsingState>(*stack_.template Pop<SizeType>(1)); const uint8x16_t s0 = vmovq_n_u8('"');
// Transit to Finish state if this is the topmost scope. const uint8x16_t s1 = vmovq_n_u8('\\');
if (n == IterativeParsingStartState) const uint8x16_t s2 = vmovq_n_u8('\b');
n = IterativeParsingFinishState; const uint8x16_t s3 = vmovq_n_u8(32);
// Call handler
bool hr = handler.EndObject(c); for (;; p += 16, q += 16) {
// On handler short circuits the parsing. const uint8x16_t s = vld1q_u8(reinterpret_cast<uint8_t *>(p));
if (!hr) { uint8x16_t x = vceqq_u8(s, s0);
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorTermination, is.Tell()); x = vorrq_u8(x, vceqq_u8(s, s1));
return IterativeParsingErrorState; x = vorrq_u8(x, vceqq_u8(s, s2));
} x = vorrq_u8(x, vcltq_u8(s, s3));
else {
is.Take(); x = vrev64q_u8(x); // Rev in 64
return n; uint64_t low = vgetq_lane_u64(vreinterpretq_u64_u8(x), 0); // extract
} uint64_t high = vgetq_lane_u64(vreinterpretq_u64_u8(x), 1); // extract
}
SizeType length = 0;
case IterativeParsingArrayFinishState: bool escaped = false;
{ if (low == 0) {
// Get element count. if (high != 0) {
SizeType c = *stack_.template Pop<SizeType>(1); uint32_t lz = internal::clzll(high);
// If the array is not empty, count the last element. length = 8 + (lz >> 3);
if (src == IterativeParsingElementState) escaped = true;
++c; }
// Restore the state. } else {
IterativeParsingState n = static_cast<IterativeParsingState>(*stack_.template Pop<SizeType>(1)); uint32_t lz = internal::clzll(low);
// Transit to Finish state if this is the topmost scope. length = lz >> 3;
if (n == IterativeParsingStartState) escaped = true;
n = IterativeParsingFinishState; }
// Call handler if (RAPIDJSON_UNLIKELY(escaped)) { // some of characters is escaped
bool hr = handler.EndArray(c); for (const char* pend = p + length; p != pend; ) {
// On handler short circuits the parsing. *q++ = *p++;
if (!hr) { }
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorTermination, is.Tell()); break;
return IterativeParsingErrorState; }
} vst1q_u8(reinterpret_cast<uint8_t *>(q), s);
else { }
is.Take();
return n; is.src_ = p;
} is.dst_ = q;
} }
case IterativeParsingValueState: // When read/write pointers are the same for insitu stream, just skip unescaped characters
// Must be non-compound value. Or it would be ObjectInitial or ArrayInitial state. static RAPIDJSON_FORCEINLINE void SkipUnescapedString(InsituStringStream& is) {
ParseValue<parseFlags>(is, handler); RAPIDJSON_ASSERT(is.src_ == is.dst_);
if (HasParseError()) { char* p = is.src_;
return IterativeParsingErrorState;
} // Scan one by one until alignment (unaligned load may cross page boundary and cause crash)
return IterativeParsingFinishState; const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
for (; p != nextAligned; p++)
default: if (RAPIDJSON_UNLIKELY(*p == '\"') || RAPIDJSON_UNLIKELY(*p == '\\') || RAPIDJSON_UNLIKELY(static_cast<unsigned>(*p) < 0x20)) {
RAPIDJSON_ASSERT(false); is.src_ = is.dst_ = p;
return IterativeParsingErrorState; return;
} }
}
// The rest of string using SIMD
template <typename InputStream> const uint8x16_t s0 = vmovq_n_u8('"');
void HandleError(IterativeParsingState src, InputStream& is) { const uint8x16_t s1 = vmovq_n_u8('\\');
if (HasParseError()) { const uint8x16_t s2 = vmovq_n_u8('\b');
// Error flag has been set. const uint8x16_t s3 = vmovq_n_u8(32);
return;
} for (;; p += 16) {
const uint8x16_t s = vld1q_u8(reinterpret_cast<uint8_t *>(p));
switch (src) { uint8x16_t x = vceqq_u8(s, s0);
case IterativeParsingStartState: RAPIDJSON_PARSE_ERROR(kParseErrorDocumentEmpty, is.Tell()); x = vorrq_u8(x, vceqq_u8(s, s1));
case IterativeParsingFinishState: RAPIDJSON_PARSE_ERROR(kParseErrorDocumentRootNotSingular, is.Tell()); x = vorrq_u8(x, vceqq_u8(s, s2));
case IterativeParsingObjectInitialState: x = vorrq_u8(x, vcltq_u8(s, s3));
case IterativeParsingMemberDelimiterState: RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissName, is.Tell());
case IterativeParsingMemberKeyState: RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissColon, is.Tell()); x = vrev64q_u8(x); // Rev in 64
case IterativeParsingMemberValueState: RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissCommaOrCurlyBracket, is.Tell()); uint64_t low = vgetq_lane_u64(vreinterpretq_u64_u8(x), 0); // extract
case IterativeParsingElementState: RAPIDJSON_PARSE_ERROR(kParseErrorArrayMissCommaOrSquareBracket, is.Tell()); uint64_t high = vgetq_lane_u64(vreinterpretq_u64_u8(x), 1); // extract
default: RAPIDJSON_PARSE_ERROR(kParseErrorUnspecificSyntaxError, is.Tell());
} if (low == 0) {
} if (high != 0) {
uint32_t lz = internal::clzll(high);
template <unsigned parseFlags, typename InputStream, typename Handler> p += 8 + (lz >> 3);
ParseResult IterativeParse(InputStream& is, Handler& handler) { break;
parseResult_.Clear(); }
ClearStackOnExit scope(*this); } else {
IterativeParsingState state = IterativeParsingStartState; uint32_t lz = internal::clzll(low);
p += lz >> 3;
SkipWhitespace(is); break;
while (is.Peek() != '\0') { }
Token t = Tokenize(is.Peek()); }
IterativeParsingState n = Predict(state, t);
IterativeParsingState d = Transit<parseFlags>(state, t, n, is, handler); is.src_ = is.dst_ = p;
}
if (d == IterativeParsingErrorState) { #endif // RAPIDJSON_NEON
HandleError(state, is);
break; template<typename InputStream, bool backup, bool pushOnTake>
} class NumberStream;
state = d; template<typename InputStream>
class NumberStream<InputStream, false, false> {
// Do not further consume streams if a root JSON has been parsed. public:
if ((parseFlags & kParseStopWhenDoneFlag) && state == IterativeParsingFinishState) typedef typename InputStream::Ch Ch;
break;
NumberStream(GenericReader& reader, InputStream& s) : is(s) { (void)reader; }
SkipWhitespace(is);
} RAPIDJSON_FORCEINLINE Ch Peek() const { return is.Peek(); }
RAPIDJSON_FORCEINLINE Ch TakePush() { return is.Take(); }
// Handle the end of file. RAPIDJSON_FORCEINLINE Ch Take() { return is.Take(); }
if (state != IterativeParsingFinishState) RAPIDJSON_FORCEINLINE void Push(char) {}
HandleError(state, is);
size_t Tell() { return is.Tell(); }
return parseResult_; size_t Length() { return 0; }
} const char* Pop() { return 0; }
static const size_t kDefaultStackCapacity = 256; //!< Default stack capacity in bytes for storing a single decoded string. protected:
internal::Stack<StackAllocator> stack_; //!< A stack for storing decoded string temporarily during non-destructive parsing. NumberStream& operator=(const NumberStream&);
ParseResult parseResult_;
}; // class GenericReader InputStream& is;
};
//! Reader with UTF8 encoding and default allocator.
typedef GenericReader<UTF8<>, UTF8<> > Reader; template<typename InputStream>
class NumberStream<InputStream, true, false> : public NumberStream<InputStream, false, false> {
RAPIDJSON_NAMESPACE_END typedef NumberStream<InputStream, false, false> Base;
public:
#ifdef __GNUC__ NumberStream(GenericReader& reader, InputStream& is) : Base(reader, is), stackStream(reader.stack_) {}
RAPIDJSON_DIAG_POP
#endif RAPIDJSON_FORCEINLINE Ch TakePush() {
stackStream.Put(static_cast<char>(Base::is.Peek()));
#ifdef _MSC_VER return Base::is.Take();
RAPIDJSON_DIAG_POP }
#endif
RAPIDJSON_FORCEINLINE void Push(char c) {
#endif // RAPIDJSON_READER_H_ stackStream.Put(c);
}
size_t Length() { return stackStream.Length(); }
const char* Pop() {
stackStream.Put('\0');
return stackStream.Pop();
}
private:
StackStream<char> stackStream;
};
template<typename InputStream>
class NumberStream<InputStream, true, true> : public NumberStream<InputStream, true, false> {
typedef NumberStream<InputStream, true, false> Base;
public:
NumberStream(GenericReader& reader, InputStream& is) : Base(reader, is) {}
RAPIDJSON_FORCEINLINE Ch Take() { return Base::TakePush(); }
};
template<unsigned parseFlags, typename InputStream, typename Handler>
void ParseNumber(InputStream& is, Handler& handler) {
internal::StreamLocalCopy<InputStream> copy(is);
NumberStream<InputStream,
((parseFlags & kParseNumbersAsStringsFlag) != 0) ?
((parseFlags & kParseInsituFlag) == 0) :
((parseFlags & kParseFullPrecisionFlag) != 0),
(parseFlags & kParseNumbersAsStringsFlag) != 0 &&
(parseFlags & kParseInsituFlag) == 0> s(*this, copy.s);
size_t startOffset = s.Tell();
double d = 0.0;
bool useNanOrInf = false;
// Parse minus
bool minus = Consume(s, '-');
// Parse int: zero / ( digit1-9 *DIGIT )
unsigned i = 0;
uint64_t i64 = 0;
bool use64bit = false;
int significandDigit = 0;
if (RAPIDJSON_UNLIKELY(s.Peek() == '0')) {
i = 0;
s.TakePush();
}
else if (RAPIDJSON_LIKELY(s.Peek() >= '1' && s.Peek() <= '9')) {
i = static_cast<unsigned>(s.TakePush() - '0');
if (minus)
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
if (RAPIDJSON_UNLIKELY(i >= 214748364)) { // 2^31 = 2147483648
if (RAPIDJSON_LIKELY(i != 214748364 || s.Peek() > '8')) {
i64 = i;
use64bit = true;
break;
}
}
i = i * 10 + static_cast<unsigned>(s.TakePush() - '0');
significandDigit++;
}
else
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
if (RAPIDJSON_UNLIKELY(i >= 429496729)) { // 2^32 - 1 = 4294967295
if (RAPIDJSON_LIKELY(i != 429496729 || s.Peek() > '5')) {
i64 = i;
use64bit = true;
break;
}
}
i = i * 10 + static_cast<unsigned>(s.TakePush() - '0');
significandDigit++;
}
}
// Parse NaN or Infinity here
else if ((parseFlags & kParseNanAndInfFlag) && RAPIDJSON_LIKELY((s.Peek() == 'I' || s.Peek() == 'N'))) {
if (Consume(s, 'N')) {
if (Consume(s, 'a') && Consume(s, 'N')) {
d = std::numeric_limits<double>::quiet_NaN();
useNanOrInf = true;
}
}
else if (RAPIDJSON_LIKELY(Consume(s, 'I'))) {
if (Consume(s, 'n') && Consume(s, 'f')) {
d = (minus ? -std::numeric_limits<double>::infinity() : std::numeric_limits<double>::infinity());
useNanOrInf = true;
if (RAPIDJSON_UNLIKELY(s.Peek() == 'i' && !(Consume(s, 'i') && Consume(s, 'n')
&& Consume(s, 'i') && Consume(s, 't') && Consume(s, 'y')))) {
RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, s.Tell());
}
}
}
if (RAPIDJSON_UNLIKELY(!useNanOrInf)) {
RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, s.Tell());
}
}
else
RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, s.Tell());
// Parse 64bit int
bool useDouble = false;
if (use64bit) {
if (minus)
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
if (RAPIDJSON_UNLIKELY(i64 >= RAPIDJSON_UINT64_C2(0x0CCCCCCC, 0xCCCCCCCC))) // 2^63 = 9223372036854775808
if (RAPIDJSON_LIKELY(i64 != RAPIDJSON_UINT64_C2(0x0CCCCCCC, 0xCCCCCCCC) || s.Peek() > '8')) {
d = static_cast<double>(i64);
useDouble = true;
break;
}
i64 = i64 * 10 + static_cast<unsigned>(s.TakePush() - '0');
significandDigit++;
}
else
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
if (RAPIDJSON_UNLIKELY(i64 >= RAPIDJSON_UINT64_C2(0x19999999, 0x99999999))) // 2^64 - 1 = 18446744073709551615
if (RAPIDJSON_LIKELY(i64 != RAPIDJSON_UINT64_C2(0x19999999, 0x99999999) || s.Peek() > '5')) {
d = static_cast<double>(i64);
useDouble = true;
break;
}
i64 = i64 * 10 + static_cast<unsigned>(s.TakePush() - '0');
significandDigit++;
}
}
// Force double for big integer
if (useDouble) {
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
d = d * 10 + (s.TakePush() - '0');
}
}
// Parse frac = decimal-point 1*DIGIT
int expFrac = 0;
size_t decimalPosition;
if (Consume(s, '.')) {
decimalPosition = s.Length();
if (RAPIDJSON_UNLIKELY(!(s.Peek() >= '0' && s.Peek() <= '9')))
RAPIDJSON_PARSE_ERROR(kParseErrorNumberMissFraction, s.Tell());
if (!useDouble) {
#if RAPIDJSON_64BIT
// Use i64 to store significand in 64-bit architecture
if (!use64bit)
i64 = i;
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
if (i64 > RAPIDJSON_UINT64_C2(0x1FFFFF, 0xFFFFFFFF)) // 2^53 - 1 for fast path
break;
else {
i64 = i64 * 10 + static_cast<unsigned>(s.TakePush() - '0');
--expFrac;
if (i64 != 0)
significandDigit++;
}
}
d = static_cast<double>(i64);
#else
// Use double to store significand in 32-bit architecture
d = static_cast<double>(use64bit ? i64 : i);
#endif
useDouble = true;
}
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
if (significandDigit < 17) {
d = d * 10.0 + (s.TakePush() - '0');
--expFrac;
if (RAPIDJSON_LIKELY(d > 0.0))
significandDigit++;
}
else
s.TakePush();
}
}
else
decimalPosition = s.Length(); // decimal position at the end of integer.
// Parse exp = e [ minus / plus ] 1*DIGIT
int exp = 0;
if (Consume(s, 'e') || Consume(s, 'E')) {
if (!useDouble) {
d = static_cast<double>(use64bit ? i64 : i);
useDouble = true;
}
bool expMinus = false;
if (Consume(s, '+'))
;
else if (Consume(s, '-'))
expMinus = true;
if (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
exp = static_cast<int>(s.Take() - '0');
if (expMinus) {
// (exp + expFrac) must not underflow int => we're detecting when -exp gets
// dangerously close to INT_MIN (a pessimistic next digit 9 would push it into
// underflow territory):
//
// -(exp * 10 + 9) + expFrac >= INT_MIN
// <=> exp <= (expFrac - INT_MIN - 9) / 10
RAPIDJSON_ASSERT(expFrac <= 0);
int maxExp = (expFrac + 2147483639) / 10;
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
exp = exp * 10 + static_cast<int>(s.Take() - '0');
if (RAPIDJSON_UNLIKELY(exp > maxExp)) {
while (RAPIDJSON_UNLIKELY(s.Peek() >= '0' && s.Peek() <= '9')) // Consume the rest of exponent
s.Take();
}
}
}
else { // positive exp
int maxExp = 308 - expFrac;
while (RAPIDJSON_LIKELY(s.Peek() >= '0' && s.Peek() <= '9')) {
exp = exp * 10 + static_cast<int>(s.Take() - '0');
if (RAPIDJSON_UNLIKELY(exp > maxExp))
RAPIDJSON_PARSE_ERROR(kParseErrorNumberTooBig, startOffset);
}
}
}
else
RAPIDJSON_PARSE_ERROR(kParseErrorNumberMissExponent, s.Tell());
if (expMinus)
exp = -exp;
}
// Finish parsing, call event according to the type of number.
bool cont = true;
if (parseFlags & kParseNumbersAsStringsFlag) {
if (parseFlags & kParseInsituFlag) {
s.Pop(); // Pop stack no matter if it will be used or not.
typename InputStream::Ch* head = is.PutBegin();
const size_t length = s.Tell() - startOffset;
RAPIDJSON_ASSERT(length <= 0xFFFFFFFF);
// unable to insert the \0 character here, it will erase the comma after this number
const typename TargetEncoding::Ch* const str = reinterpret_cast<typename TargetEncoding::Ch*>(head);
cont = handler.RawNumber(str, SizeType(length), false);
}
else {
SizeType numCharsToCopy = static_cast<SizeType>(s.Length());
StringStream srcStream(s.Pop());
StackStream<typename TargetEncoding::Ch> dstStream(stack_);
while (numCharsToCopy--) {
Transcoder<UTF8<>, TargetEncoding>::Transcode(srcStream, dstStream);
}
dstStream.Put('\0');
const typename TargetEncoding::Ch* str = dstStream.Pop();
const SizeType length = static_cast<SizeType>(dstStream.Length()) - 1;
cont = handler.RawNumber(str, SizeType(length), true);
}
}
else {
size_t length = s.Length();
const char* decimal = s.Pop(); // Pop stack no matter if it will be used or not.
if (useDouble) {
int p = exp + expFrac;
if (parseFlags & kParseFullPrecisionFlag)
d = internal::StrtodFullPrecision(d, p, decimal, length, decimalPosition, exp);
else
d = internal::StrtodNormalPrecision(d, p);
// Use > max, instead of == inf, to fix bogus warning -Wfloat-equal
if (d > (std::numeric_limits<double>::max)()) {
// Overflow
// TODO: internal::StrtodX should report overflow (or underflow)
RAPIDJSON_PARSE_ERROR(kParseErrorNumberTooBig, startOffset);
}
cont = handler.Double(minus ? -d : d);
}
else if (useNanOrInf) {
cont = handler.Double(d);
}
else {
if (use64bit) {
if (minus)
cont = handler.Int64(static_cast<int64_t>(~i64 + 1));
else
cont = handler.Uint64(i64);
}
else {
if (minus)
cont = handler.Int(static_cast<int32_t>(~i + 1));
else
cont = handler.Uint(i);
}
}
}
if (RAPIDJSON_UNLIKELY(!cont))
RAPIDJSON_PARSE_ERROR(kParseErrorTermination, startOffset);
}
// Parse any JSON value
template<unsigned parseFlags, typename InputStream, typename Handler>
void ParseValue(InputStream& is, Handler& handler) {
switch (is.Peek()) {
case 'n': ParseNull <parseFlags>(is, handler); break;
case 't': ParseTrue <parseFlags>(is, handler); break;
case 'f': ParseFalse <parseFlags>(is, handler); break;
case '"': ParseString<parseFlags>(is, handler); break;
case '{': ParseObject<parseFlags>(is, handler); break;
case '[': ParseArray <parseFlags>(is, handler); break;
default :
ParseNumber<parseFlags>(is, handler);
break;
}
}
// Iterative Parsing
// States
enum IterativeParsingState {
IterativeParsingFinishState = 0, // sink states at top
IterativeParsingErrorState, // sink states at top
IterativeParsingStartState,
// Object states
IterativeParsingObjectInitialState,
IterativeParsingMemberKeyState,
IterativeParsingMemberValueState,
IterativeParsingObjectFinishState,
// Array states
IterativeParsingArrayInitialState,
IterativeParsingElementState,
IterativeParsingArrayFinishState,
// Single value state
IterativeParsingValueState,
// Delimiter states (at bottom)
IterativeParsingElementDelimiterState,
IterativeParsingMemberDelimiterState,
IterativeParsingKeyValueDelimiterState,
cIterativeParsingStateCount
};
// Tokens
enum Token {
LeftBracketToken = 0,
RightBracketToken,
LeftCurlyBracketToken,
RightCurlyBracketToken,
CommaToken,
ColonToken,
StringToken,
FalseToken,
TrueToken,
NullToken,
NumberToken,
kTokenCount
};
RAPIDJSON_FORCEINLINE Token Tokenize(Ch c) const {
//!@cond RAPIDJSON_HIDDEN_FROM_DOXYGEN
#define N NumberToken
#define N16 N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N
// Maps from ASCII to Token
static const unsigned char tokenMap[256] = {
N16, // 00~0F
N16, // 10~1F
N, N, StringToken, N, N, N, N, N, N, N, N, N, CommaToken, N, N, N, // 20~2F
N, N, N, N, N, N, N, N, N, N, ColonToken, N, N, N, N, N, // 30~3F
N16, // 40~4F
N, N, N, N, N, N, N, N, N, N, N, LeftBracketToken, N, RightBracketToken, N, N, // 50~5F
N, N, N, N, N, N, FalseToken, N, N, N, N, N, N, N, NullToken, N, // 60~6F
N, N, N, N, TrueToken, N, N, N, N, N, N, LeftCurlyBracketToken, N, RightCurlyBracketToken, N, N, // 70~7F
N16, N16, N16, N16, N16, N16, N16, N16 // 80~FF
};
#undef N
#undef N16
//!@endcond
if (sizeof(Ch) == 1 || static_cast<unsigned>(c) < 256)
return static_cast<Token>(tokenMap[static_cast<unsigned char>(c)]);
else
return NumberToken;
}
RAPIDJSON_FORCEINLINE IterativeParsingState Predict(IterativeParsingState state, Token token) const {
// current state x one lookahead token -> new state
static const char G[cIterativeParsingStateCount][kTokenCount] = {
// Finish(sink state)
{
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState,
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState,
IterativeParsingErrorState
},
// Error(sink state)
{
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState,
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState,
IterativeParsingErrorState
},
// Start
{
IterativeParsingArrayInitialState, // Left bracket
IterativeParsingErrorState, // Right bracket
IterativeParsingObjectInitialState, // Left curly bracket
IterativeParsingErrorState, // Right curly bracket
IterativeParsingErrorState, // Comma
IterativeParsingErrorState, // Colon
IterativeParsingValueState, // String
IterativeParsingValueState, // False
IterativeParsingValueState, // True
IterativeParsingValueState, // Null
IterativeParsingValueState // Number
},
// ObjectInitial
{
IterativeParsingErrorState, // Left bracket
IterativeParsingErrorState, // Right bracket
IterativeParsingErrorState, // Left curly bracket
IterativeParsingObjectFinishState, // Right curly bracket
IterativeParsingErrorState, // Comma
IterativeParsingErrorState, // Colon
IterativeParsingMemberKeyState, // String
IterativeParsingErrorState, // False
IterativeParsingErrorState, // True
IterativeParsingErrorState, // Null
IterativeParsingErrorState // Number
},
// MemberKey
{
IterativeParsingErrorState, // Left bracket
IterativeParsingErrorState, // Right bracket
IterativeParsingErrorState, // Left curly bracket
IterativeParsingErrorState, // Right curly bracket
IterativeParsingErrorState, // Comma
IterativeParsingKeyValueDelimiterState, // Colon
IterativeParsingErrorState, // String
IterativeParsingErrorState, // False
IterativeParsingErrorState, // True
IterativeParsingErrorState, // Null
IterativeParsingErrorState // Number
},
// MemberValue
{
IterativeParsingErrorState, // Left bracket
IterativeParsingErrorState, // Right bracket
IterativeParsingErrorState, // Left curly bracket
IterativeParsingObjectFinishState, // Right curly bracket
IterativeParsingMemberDelimiterState, // Comma
IterativeParsingErrorState, // Colon
IterativeParsingErrorState, // String
IterativeParsingErrorState, // False
IterativeParsingErrorState, // True
IterativeParsingErrorState, // Null
IterativeParsingErrorState // Number
},
// ObjectFinish(sink state)
{
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState,
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState,
IterativeParsingErrorState
},
// ArrayInitial
{
IterativeParsingArrayInitialState, // Left bracket(push Element state)
IterativeParsingArrayFinishState, // Right bracket
IterativeParsingObjectInitialState, // Left curly bracket(push Element state)
IterativeParsingErrorState, // Right curly bracket
IterativeParsingErrorState, // Comma
IterativeParsingErrorState, // Colon
IterativeParsingElementState, // String
IterativeParsingElementState, // False
IterativeParsingElementState, // True
IterativeParsingElementState, // Null
IterativeParsingElementState // Number
},
// Element
{
IterativeParsingErrorState, // Left bracket
IterativeParsingArrayFinishState, // Right bracket
IterativeParsingErrorState, // Left curly bracket
IterativeParsingErrorState, // Right curly bracket
IterativeParsingElementDelimiterState, // Comma
IterativeParsingErrorState, // Colon
IterativeParsingErrorState, // String
IterativeParsingErrorState, // False
IterativeParsingErrorState, // True
IterativeParsingErrorState, // Null
IterativeParsingErrorState // Number
},
// ArrayFinish(sink state)
{
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState,
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState,
IterativeParsingErrorState
},
// Single Value (sink state)
{
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState,
IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState, IterativeParsingErrorState,
IterativeParsingErrorState
},
// ElementDelimiter
{
IterativeParsingArrayInitialState, // Left bracket(push Element state)
IterativeParsingArrayFinishState, // Right bracket
IterativeParsingObjectInitialState, // Left curly bracket(push Element state)
IterativeParsingErrorState, // Right curly bracket
IterativeParsingErrorState, // Comma
IterativeParsingErrorState, // Colon
IterativeParsingElementState, // String
IterativeParsingElementState, // False
IterativeParsingElementState, // True
IterativeParsingElementState, // Null
IterativeParsingElementState // Number
},
// MemberDelimiter
{
IterativeParsingErrorState, // Left bracket
IterativeParsingErrorState, // Right bracket
IterativeParsingErrorState, // Left curly bracket
IterativeParsingObjectFinishState, // Right curly bracket
IterativeParsingErrorState, // Comma
IterativeParsingErrorState, // Colon
IterativeParsingMemberKeyState, // String
IterativeParsingErrorState, // False
IterativeParsingErrorState, // True
IterativeParsingErrorState, // Null
IterativeParsingErrorState // Number
},
// KeyValueDelimiter
{
IterativeParsingArrayInitialState, // Left bracket(push MemberValue state)
IterativeParsingErrorState, // Right bracket
IterativeParsingObjectInitialState, // Left curly bracket(push MemberValue state)
IterativeParsingErrorState, // Right curly bracket
IterativeParsingErrorState, // Comma
IterativeParsingErrorState, // Colon
IterativeParsingMemberValueState, // String
IterativeParsingMemberValueState, // False
IterativeParsingMemberValueState, // True
IterativeParsingMemberValueState, // Null
IterativeParsingMemberValueState // Number
},
}; // End of G
return static_cast<IterativeParsingState>(G[state][token]);
}
// Make an advance in the token stream and state based on the candidate destination state which was returned by Transit().
// May return a new state on state pop.
template <unsigned parseFlags, typename InputStream, typename Handler>
RAPIDJSON_FORCEINLINE IterativeParsingState Transit(IterativeParsingState src, Token token, IterativeParsingState dst, InputStream& is, Handler& handler) {
(void)token;
switch (dst) {
case IterativeParsingErrorState:
return dst;
case IterativeParsingObjectInitialState:
case IterativeParsingArrayInitialState:
{
// Push the state(Element or MemeberValue) if we are nested in another array or value of member.
// In this way we can get the correct state on ObjectFinish or ArrayFinish by frame pop.
IterativeParsingState n = src;
if (src == IterativeParsingArrayInitialState || src == IterativeParsingElementDelimiterState)
n = IterativeParsingElementState;
else if (src == IterativeParsingKeyValueDelimiterState)
n = IterativeParsingMemberValueState;
// Push current state.
*stack_.template Push<SizeType>(1) = n;
// Initialize and push the member/element count.
*stack_.template Push<SizeType>(1) = 0;
// Call handler
bool hr = (dst == IterativeParsingObjectInitialState) ? handler.StartObject() : handler.StartArray();
// On handler short circuits the parsing.
if (!hr) {
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorTermination, is.Tell());
return IterativeParsingErrorState;
}
else {
is.Take();
return dst;
}
}
case IterativeParsingMemberKeyState:
ParseString<parseFlags>(is, handler, true);
if (HasParseError())
return IterativeParsingErrorState;
else
return dst;
case IterativeParsingKeyValueDelimiterState:
RAPIDJSON_ASSERT(token == ColonToken);
is.Take();
return dst;
case IterativeParsingMemberValueState:
// Must be non-compound value. Or it would be ObjectInitial or ArrayInitial state.
ParseValue<parseFlags>(is, handler);
if (HasParseError()) {
return IterativeParsingErrorState;
}
return dst;
case IterativeParsingElementState:
// Must be non-compound value. Or it would be ObjectInitial or ArrayInitial state.
ParseValue<parseFlags>(is, handler);
if (HasParseError()) {
return IterativeParsingErrorState;
}
return dst;
case IterativeParsingMemberDelimiterState:
case IterativeParsingElementDelimiterState:
is.Take();
// Update member/element count.
*stack_.template Top<SizeType>() = *stack_.template Top<SizeType>() + 1;
return dst;
case IterativeParsingObjectFinishState:
{
// Transit from delimiter is only allowed when trailing commas are enabled
if (!(parseFlags & kParseTrailingCommasFlag) && src == IterativeParsingMemberDelimiterState) {
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorObjectMissName, is.Tell());
return IterativeParsingErrorState;
}
// Get member count.
SizeType c = *stack_.template Pop<SizeType>(1);
// If the object is not empty, count the last member.
if (src == IterativeParsingMemberValueState)
++c;
// Restore the state.
IterativeParsingState n = static_cast<IterativeParsingState>(*stack_.template Pop<SizeType>(1));
// Transit to Finish state if this is the topmost scope.
if (n == IterativeParsingStartState)
n = IterativeParsingFinishState;
// Call handler
bool hr = handler.EndObject(c);
// On handler short circuits the parsing.
if (!hr) {
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorTermination, is.Tell());
return IterativeParsingErrorState;
}
else {
is.Take();
return n;
}
}
case IterativeParsingArrayFinishState:
{
// Transit from delimiter is only allowed when trailing commas are enabled
if (!(parseFlags & kParseTrailingCommasFlag) && src == IterativeParsingElementDelimiterState) {
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorValueInvalid, is.Tell());
return IterativeParsingErrorState;
}
// Get element count.
SizeType c = *stack_.template Pop<SizeType>(1);
// If the array is not empty, count the last element.
if (src == IterativeParsingElementState)
++c;
// Restore the state.
IterativeParsingState n = static_cast<IterativeParsingState>(*stack_.template Pop<SizeType>(1));
// Transit to Finish state if this is the topmost scope.
if (n == IterativeParsingStartState)
n = IterativeParsingFinishState;
// Call handler
bool hr = handler.EndArray(c);
// On handler short circuits the parsing.
if (!hr) {
RAPIDJSON_PARSE_ERROR_NORETURN(kParseErrorTermination, is.Tell());
return IterativeParsingErrorState;
}
else {
is.Take();
return n;
}
}
default:
// This branch is for IterativeParsingValueState actually.
// Use `default:` rather than
// `case IterativeParsingValueState:` is for code coverage.
// The IterativeParsingStartState is not enumerated in this switch-case.
// It is impossible for that case. And it can be caught by following assertion.
// The IterativeParsingFinishState is not enumerated in this switch-case either.
// It is a "derivative" state which cannot triggered from Predict() directly.
// Therefore it cannot happen here. And it can be caught by following assertion.
RAPIDJSON_ASSERT(dst == IterativeParsingValueState);
// Must be non-compound value. Or it would be ObjectInitial or ArrayInitial state.
ParseValue<parseFlags>(is, handler);
if (HasParseError()) {
return IterativeParsingErrorState;
}
return IterativeParsingFinishState;
}
}
template <typename InputStream>
void HandleError(IterativeParsingState src, InputStream& is) {
if (HasParseError()) {
// Error flag has been set.
return;
}
switch (src) {
case IterativeParsingStartState: RAPIDJSON_PARSE_ERROR(kParseErrorDocumentEmpty, is.Tell()); return;
case IterativeParsingFinishState: RAPIDJSON_PARSE_ERROR(kParseErrorDocumentRootNotSingular, is.Tell()); return;
case IterativeParsingObjectInitialState:
case IterativeParsingMemberDelimiterState: RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissName, is.Tell()); return;
case IterativeParsingMemberKeyState: RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissColon, is.Tell()); return;
case IterativeParsingMemberValueState: RAPIDJSON_PARSE_ERROR(kParseErrorObjectMissCommaOrCurlyBracket, is.Tell()); return;
case IterativeParsingKeyValueDelimiterState:
case IterativeParsingArrayInitialState:
case IterativeParsingElementDelimiterState: RAPIDJSON_PARSE_ERROR(kParseErrorValueInvalid, is.Tell()); return;
default: RAPIDJSON_ASSERT(src == IterativeParsingElementState); RAPIDJSON_PARSE_ERROR(kParseErrorArrayMissCommaOrSquareBracket, is.Tell()); return;
}
}
RAPIDJSON_FORCEINLINE bool IsIterativeParsingDelimiterState(IterativeParsingState s) const {
return s >= IterativeParsingElementDelimiterState;
}
RAPIDJSON_FORCEINLINE bool IsIterativeParsingCompleteState(IterativeParsingState s) const {
return s <= IterativeParsingErrorState;
}
template <unsigned parseFlags, typename InputStream, typename Handler>
ParseResult IterativeParse(InputStream& is, Handler& handler) {
parseResult_.Clear();
ClearStackOnExit scope(*this);
IterativeParsingState state = IterativeParsingStartState;
SkipWhitespaceAndComments<parseFlags>(is);
RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_);
while (is.Peek() != '\0') {
Token t = Tokenize(is.Peek());
IterativeParsingState n = Predict(state, t);
IterativeParsingState d = Transit<parseFlags>(state, t, n, is, handler);
if (d == IterativeParsingErrorState) {
HandleError(state, is);
break;
}
state = d;
// Do not further consume streams if a root JSON has been parsed.
if ((parseFlags & kParseStopWhenDoneFlag) && state == IterativeParsingFinishState)
break;
SkipWhitespaceAndComments<parseFlags>(is);
RAPIDJSON_PARSE_ERROR_EARLY_RETURN(parseResult_);
}
// Handle the end of file.
if (state != IterativeParsingFinishState)
HandleError(state, is);
return parseResult_;
}
static const size_t kDefaultStackCapacity = 256; //!< Default stack capacity in bytes for storing a single decoded string.
internal::Stack<StackAllocator> stack_; //!< A stack for storing decoded string temporarily during non-destructive parsing.
ParseResult parseResult_;
IterativeParsingState state_;
}; // class GenericReader
//! Reader with UTF8 encoding and default allocator.
typedef GenericReader<UTF8<>, UTF8<> > Reader;
RAPIDJSON_NAMESPACE_END
#if defined(__clang__) || defined(_MSC_VER)
RAPIDJSON_DIAG_POP
#endif
#ifdef __GNUC__
RAPIDJSON_DIAG_POP
#endif
#endif // RAPIDJSON_READER_H_
// Tencent is pleased to support the open source community by making RapidJSON available->
//
// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip-> All rights reserved->
//
// Licensed under the MIT License (the "License"); you may not use this file except
// in compliance with the License-> You may obtain a copy of the License at
//
// http://opensource->org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied-> See the License for the
// specific language governing permissions and limitations under the License->
#ifndef RAPIDJSON_SCHEMA_H_
#define RAPIDJSON_SCHEMA_H_
#include "document.h"
#include "pointer.h"
#include "stringbuffer.h"
#include "error/en.h"
#include "uri.h"
#include <cmath> // abs, floor
#if !defined(RAPIDJSON_SCHEMA_USE_INTERNALREGEX)
#define RAPIDJSON_SCHEMA_USE_INTERNALREGEX 1
#else
#define RAPIDJSON_SCHEMA_USE_INTERNALREGEX 0
#endif
#if !RAPIDJSON_SCHEMA_USE_INTERNALREGEX && defined(RAPIDJSON_SCHEMA_USE_STDREGEX) && (__cplusplus >=201103L || (defined(_MSC_VER) && _MSC_VER >= 1800))
#define RAPIDJSON_SCHEMA_USE_STDREGEX 1
#else
#define RAPIDJSON_SCHEMA_USE_STDREGEX 0
#endif
#if RAPIDJSON_SCHEMA_USE_INTERNALREGEX
#include "internal/regex.h"
#elif RAPIDJSON_SCHEMA_USE_STDREGEX
#include <regex>
#endif
#if RAPIDJSON_SCHEMA_USE_INTERNALREGEX || RAPIDJSON_SCHEMA_USE_STDREGEX
#define RAPIDJSON_SCHEMA_HAS_REGEX 1
#else
#define RAPIDJSON_SCHEMA_HAS_REGEX 0
#endif
#ifndef RAPIDJSON_SCHEMA_VERBOSE
#define RAPIDJSON_SCHEMA_VERBOSE 0
#endif
#if RAPIDJSON_SCHEMA_VERBOSE
#include "stringbuffer.h"
#endif
RAPIDJSON_DIAG_PUSH
#if defined(__GNUC__)
RAPIDJSON_DIAG_OFF(effc++)
#endif
#ifdef __clang__
RAPIDJSON_DIAG_OFF(weak-vtables)
RAPIDJSON_DIAG_OFF(exit-time-destructors)
RAPIDJSON_DIAG_OFF(c++98-compat-pedantic)
RAPIDJSON_DIAG_OFF(variadic-macros)
#elif defined(_MSC_VER)
RAPIDJSON_DIAG_OFF(4512) // assignment operator could not be generated
#endif
RAPIDJSON_NAMESPACE_BEGIN
///////////////////////////////////////////////////////////////////////////////
// Verbose Utilities
#if RAPIDJSON_SCHEMA_VERBOSE
namespace internal {
inline void PrintInvalidKeyword(const char* keyword) {
printf("Fail keyword: %s\n", keyword);
}
inline void PrintInvalidKeyword(const wchar_t* keyword) {
wprintf(L"Fail keyword: %ls\n", keyword);
}
inline void PrintInvalidDocument(const char* document) {
printf("Fail document: %s\n\n", document);
}
inline void PrintInvalidDocument(const wchar_t* document) {
wprintf(L"Fail document: %ls\n\n", document);
}
inline void PrintValidatorPointers(unsigned depth, const char* s, const char* d) {
printf("S: %*s%s\nD: %*s%s\n\n", depth * 4, " ", s, depth * 4, " ", d);
}
inline void PrintValidatorPointers(unsigned depth, const wchar_t* s, const wchar_t* d) {
wprintf(L"S: %*ls%ls\nD: %*ls%ls\n\n", depth * 4, L" ", s, depth * 4, L" ", d);
}
} // namespace internal
#endif // RAPIDJSON_SCHEMA_VERBOSE
///////////////////////////////////////////////////////////////////////////////
// RAPIDJSON_INVALID_KEYWORD_RETURN
#if RAPIDJSON_SCHEMA_VERBOSE
#define RAPIDJSON_INVALID_KEYWORD_VERBOSE(keyword) internal::PrintInvalidKeyword(keyword)
#else
#define RAPIDJSON_INVALID_KEYWORD_VERBOSE(keyword)
#endif
#define RAPIDJSON_INVALID_KEYWORD_RETURN(code)\
RAPIDJSON_MULTILINEMACRO_BEGIN\
context.invalidCode = code;\
context.invalidKeyword = SchemaType::GetValidateErrorKeyword(code).GetString();\
RAPIDJSON_INVALID_KEYWORD_VERBOSE(context.invalidKeyword);\
return false;\
RAPIDJSON_MULTILINEMACRO_END
///////////////////////////////////////////////////////////////////////////////
// ValidateFlag
/*! \def RAPIDJSON_VALIDATE_DEFAULT_FLAGS
\ingroup RAPIDJSON_CONFIG
\brief User-defined kValidateDefaultFlags definition.
User can define this as any \c ValidateFlag combinations.
*/
#ifndef RAPIDJSON_VALIDATE_DEFAULT_FLAGS
#define RAPIDJSON_VALIDATE_DEFAULT_FLAGS kValidateNoFlags
#endif
//! Combination of validate flags
/*! \see
*/
enum ValidateFlag {
kValidateNoFlags = 0, //!< No flags are set.
kValidateContinueOnErrorFlag = 1, //!< Don't stop after first validation error.
kValidateDefaultFlags = RAPIDJSON_VALIDATE_DEFAULT_FLAGS //!< Default validate flags. Can be customized by defining RAPIDJSON_VALIDATE_DEFAULT_FLAGS
};
///////////////////////////////////////////////////////////////////////////////
// Forward declarations
template <typename ValueType, typename Allocator>
class GenericSchemaDocument;
namespace internal {
template <typename SchemaDocumentType>
class Schema;
///////////////////////////////////////////////////////////////////////////////
// ISchemaValidator
class ISchemaValidator {
public:
virtual ~ISchemaValidator() {}
virtual bool IsValid() const = 0;
virtual void SetValidateFlags(unsigned flags) = 0;
virtual unsigned GetValidateFlags() const = 0;
};
///////////////////////////////////////////////////////////////////////////////
// ISchemaStateFactory
template <typename SchemaType>
class ISchemaStateFactory {
public:
virtual ~ISchemaStateFactory() {}
virtual ISchemaValidator* CreateSchemaValidator(const SchemaType&, const bool inheritContinueOnErrors) = 0;
virtual void DestroySchemaValidator(ISchemaValidator* validator) = 0;
virtual void* CreateHasher() = 0;
virtual uint64_t GetHashCode(void* hasher) = 0;
virtual void DestroryHasher(void* hasher) = 0;
virtual void* MallocState(size_t size) = 0;
virtual void FreeState(void* p) = 0;
};
///////////////////////////////////////////////////////////////////////////////
// IValidationErrorHandler
template <typename SchemaType>
class IValidationErrorHandler {
public:
typedef typename SchemaType::Ch Ch;
typedef typename SchemaType::SValue SValue;
virtual ~IValidationErrorHandler() {}
virtual void NotMultipleOf(int64_t actual, const SValue& expected) = 0;
virtual void NotMultipleOf(uint64_t actual, const SValue& expected) = 0;
virtual void NotMultipleOf(double actual, const SValue& expected) = 0;
virtual void AboveMaximum(int64_t actual, const SValue& expected, bool exclusive) = 0;
virtual void AboveMaximum(uint64_t actual, const SValue& expected, bool exclusive) = 0;
virtual void AboveMaximum(double actual, const SValue& expected, bool exclusive) = 0;
virtual void BelowMinimum(int64_t actual, const SValue& expected, bool exclusive) = 0;
virtual void BelowMinimum(uint64_t actual, const SValue& expected, bool exclusive) = 0;
virtual void BelowMinimum(double actual, const SValue& expected, bool exclusive) = 0;
virtual void TooLong(const Ch* str, SizeType length, SizeType expected) = 0;
virtual void TooShort(const Ch* str, SizeType length, SizeType expected) = 0;
virtual void DoesNotMatch(const Ch* str, SizeType length) = 0;
virtual void DisallowedItem(SizeType index) = 0;
virtual void TooFewItems(SizeType actualCount, SizeType expectedCount) = 0;
virtual void TooManyItems(SizeType actualCount, SizeType expectedCount) = 0;
virtual void DuplicateItems(SizeType index1, SizeType index2) = 0;
virtual void TooManyProperties(SizeType actualCount, SizeType expectedCount) = 0;
virtual void TooFewProperties(SizeType actualCount, SizeType expectedCount) = 0;
virtual void StartMissingProperties() = 0;
virtual void AddMissingProperty(const SValue& name) = 0;
virtual bool EndMissingProperties() = 0;
virtual void PropertyViolations(ISchemaValidator** subvalidators, SizeType count) = 0;
virtual void DisallowedProperty(const Ch* name, SizeType length) = 0;
virtual void StartDependencyErrors() = 0;
virtual void StartMissingDependentProperties() = 0;
virtual void AddMissingDependentProperty(const SValue& targetName) = 0;
virtual void EndMissingDependentProperties(const SValue& sourceName) = 0;
virtual void AddDependencySchemaError(const SValue& souceName, ISchemaValidator* subvalidator) = 0;
virtual bool EndDependencyErrors() = 0;
virtual void DisallowedValue(const ValidateErrorCode code) = 0;
virtual void StartDisallowedType() = 0;
virtual void AddExpectedType(const typename SchemaType::ValueType& expectedType) = 0;
virtual void EndDisallowedType(const typename SchemaType::ValueType& actualType) = 0;
virtual void NotAllOf(ISchemaValidator** subvalidators, SizeType count) = 0;
virtual void NoneOf(ISchemaValidator** subvalidators, SizeType count) = 0;
virtual void NotOneOf(ISchemaValidator** subvalidators, SizeType count, bool matched) = 0;
virtual void Disallowed() = 0;
};
///////////////////////////////////////////////////////////////////////////////
// Hasher
// For comparison of compound value
template<typename Encoding, typename Allocator>
class Hasher {
public:
typedef typename Encoding::Ch Ch;
Hasher(Allocator* allocator = 0, size_t stackCapacity = kDefaultSize) : stack_(allocator, stackCapacity) {}
bool Null() { return WriteType(kNullType); }
bool Bool(bool b) { return WriteType(b ? kTrueType : kFalseType); }
bool Int(int i) { Number n; n.u.i = i; n.d = static_cast<double>(i); return WriteNumber(n); }
bool Uint(unsigned u) { Number n; n.u.u = u; n.d = static_cast<double>(u); return WriteNumber(n); }
bool Int64(int64_t i) { Number n; n.u.i = i; n.d = static_cast<double>(i); return WriteNumber(n); }
bool Uint64(uint64_t u) { Number n; n.u.u = u; n.d = static_cast<double>(u); return WriteNumber(n); }
bool Double(double d) {
Number n;
if (d < 0) n.u.i = static_cast<int64_t>(d);
else n.u.u = static_cast<uint64_t>(d);
n.d = d;
return WriteNumber(n);
}
bool RawNumber(const Ch* str, SizeType len, bool) {
WriteBuffer(kNumberType, str, len * sizeof(Ch));
return true;
}
bool String(const Ch* str, SizeType len, bool) {
WriteBuffer(kStringType, str, len * sizeof(Ch));
return true;
}
bool StartObject() { return true; }
bool Key(const Ch* str, SizeType len, bool copy) { return String(str, len, copy); }
bool EndObject(SizeType memberCount) {
uint64_t h = Hash(0, kObjectType);
uint64_t* kv = stack_.template Pop<uint64_t>(memberCount * 2);
for (SizeType i = 0; i < memberCount; i++)
h ^= Hash(kv[i * 2], kv[i * 2 + 1]); // Use xor to achieve member order insensitive
*stack_.template Push<uint64_t>() = h;
return true;
}
bool StartArray() { return true; }
bool EndArray(SizeType elementCount) {
uint64_t h = Hash(0, kArrayType);
uint64_t* e = stack_.template Pop<uint64_t>(elementCount);
for (SizeType i = 0; i < elementCount; i++)
h = Hash(h, e[i]); // Use hash to achieve element order sensitive
*stack_.template Push<uint64_t>() = h;
return true;
}
bool IsValid() const { return stack_.GetSize() == sizeof(uint64_t); }
uint64_t GetHashCode() const {
RAPIDJSON_ASSERT(IsValid());
return *stack_.template Top<uint64_t>();
}
private:
static const size_t kDefaultSize = 256;
struct Number {
union U {
uint64_t u;
int64_t i;
}u;
double d;
};
bool WriteType(Type type) { return WriteBuffer(type, 0, 0); }
bool WriteNumber(const Number& n) { return WriteBuffer(kNumberType, &n, sizeof(n)); }
bool WriteBuffer(Type type, const void* data, size_t len) {
// FNV-1a from http://isthe.com/chongo/tech/comp/fnv/
uint64_t h = Hash(RAPIDJSON_UINT64_C2(0x84222325, 0xcbf29ce4), type);
const unsigned char* d = static_cast<const unsigned char*>(data);
for (size_t i = 0; i < len; i++)
h = Hash(h, d[i]);
*stack_.template Push<uint64_t>() = h;
return true;
}
static uint64_t Hash(uint64_t h, uint64_t d) {
static const uint64_t kPrime = RAPIDJSON_UINT64_C2(0x00000100, 0x000001b3);
h ^= d;
h *= kPrime;
return h;
}
Stack<Allocator> stack_;
};
///////////////////////////////////////////////////////////////////////////////
// SchemaValidationContext
template <typename SchemaDocumentType>
struct SchemaValidationContext {
typedef Schema<SchemaDocumentType> SchemaType;
typedef ISchemaStateFactory<SchemaType> SchemaValidatorFactoryType;
typedef IValidationErrorHandler<SchemaType> ErrorHandlerType;
typedef typename SchemaType::ValueType ValueType;
typedef typename ValueType::Ch Ch;
enum PatternValidatorType {
kPatternValidatorOnly,
kPatternValidatorWithProperty,
kPatternValidatorWithAdditionalProperty
};
SchemaValidationContext(SchemaValidatorFactoryType& f, ErrorHandlerType& eh, const SchemaType* s) :
factory(f),
error_handler(eh),
schema(s),
valueSchema(),
invalidKeyword(),
invalidCode(),
hasher(),
arrayElementHashCodes(),
validators(),
validatorCount(),
patternPropertiesValidators(),
patternPropertiesValidatorCount(),
patternPropertiesSchemas(),
patternPropertiesSchemaCount(),
valuePatternValidatorType(kPatternValidatorOnly),
propertyExist(),
inArray(false),
valueUniqueness(false),
arrayUniqueness(false)
{
}
~SchemaValidationContext() {
if (hasher)
factory.DestroryHasher(hasher);
if (validators) {
for (SizeType i = 0; i < validatorCount; i++)
factory.DestroySchemaValidator(validators[i]);
factory.FreeState(validators);
}
if (patternPropertiesValidators) {
for (SizeType i = 0; i < patternPropertiesValidatorCount; i++)
factory.DestroySchemaValidator(patternPropertiesValidators[i]);
factory.FreeState(patternPropertiesValidators);
}
if (patternPropertiesSchemas)
factory.FreeState(patternPropertiesSchemas);
if (propertyExist)
factory.FreeState(propertyExist);
}
SchemaValidatorFactoryType& factory;
ErrorHandlerType& error_handler;
const SchemaType* schema;
const SchemaType* valueSchema;
const Ch* invalidKeyword;
ValidateErrorCode invalidCode;
void* hasher; // Only validator access
void* arrayElementHashCodes; // Only validator access this
ISchemaValidator** validators;
SizeType validatorCount;
ISchemaValidator** patternPropertiesValidators;
SizeType patternPropertiesValidatorCount;
const SchemaType** patternPropertiesSchemas;
SizeType patternPropertiesSchemaCount;
PatternValidatorType valuePatternValidatorType;
PatternValidatorType objectPatternValidatorType;
SizeType arrayElementIndex;
bool* propertyExist;
bool inArray;
bool valueUniqueness;
bool arrayUniqueness;
};
///////////////////////////////////////////////////////////////////////////////
// Schema
template <typename SchemaDocumentType>
class Schema {
public:
typedef typename SchemaDocumentType::ValueType ValueType;
typedef typename SchemaDocumentType::AllocatorType AllocatorType;
typedef typename SchemaDocumentType::PointerType PointerType;
typedef typename ValueType::EncodingType EncodingType;
typedef typename EncodingType::Ch Ch;
typedef SchemaValidationContext<SchemaDocumentType> Context;
typedef Schema<SchemaDocumentType> SchemaType;
typedef GenericValue<EncodingType, AllocatorType> SValue;
typedef IValidationErrorHandler<Schema> ErrorHandler;
typedef GenericUri<ValueType, AllocatorType> UriType;
friend class GenericSchemaDocument<ValueType, AllocatorType>;
Schema(SchemaDocumentType* schemaDocument, const PointerType& p, const ValueType& value, const ValueType& document, AllocatorType* allocator, const UriType& id = UriType()) :
allocator_(allocator),
uri_(schemaDocument->GetURI(), *allocator),
id_(id),
pointer_(p, allocator),
typeless_(schemaDocument->GetTypeless()),
enum_(),
enumCount_(),
not_(),
type_((1 << kTotalSchemaType) - 1), // typeless
validatorCount_(),
notValidatorIndex_(),
properties_(),
additionalPropertiesSchema_(),
patternProperties_(),
patternPropertyCount_(),
propertyCount_(),
minProperties_(),
maxProperties_(SizeType(~0)),
additionalProperties_(true),
hasDependencies_(),
hasRequired_(),
hasSchemaDependencies_(),
additionalItemsSchema_(),
itemsList_(),
itemsTuple_(),
itemsTupleCount_(),
minItems_(),
maxItems_(SizeType(~0)),
additionalItems_(true),
uniqueItems_(false),
pattern_(),
minLength_(0),
maxLength_(~SizeType(0)),
exclusiveMinimum_(false),
exclusiveMaximum_(false),
defaultValueLength_(0)
{
typedef typename ValueType::ConstValueIterator ConstValueIterator;
typedef typename ValueType::ConstMemberIterator ConstMemberIterator;
// PR #1393
// Early add this Schema and its $ref(s) in schemaDocument's map to avoid infinite
// recursion (with recursive schemas), since schemaDocument->getSchema() is always
// checked before creating a new one. Don't cache typeless_, though.
if (this != typeless_) {
typedef typename SchemaDocumentType::SchemaEntry SchemaEntry;
SchemaEntry *entry = schemaDocument->schemaMap_.template Push<SchemaEntry>();
new (entry) SchemaEntry(pointer_, this, true, allocator_);
schemaDocument->AddSchemaRefs(this);
}
if (!value.IsObject())
return;
// If we have an id property, resolve it with the in-scope id
if (const ValueType* v = GetMember(value, GetIdString())) {
if (v->IsString()) {
UriType local(*v, allocator);
id_ = local.Resolve(id_, allocator);
}
}
if (const ValueType* v = GetMember(value, GetTypeString())) {
type_ = 0;
if (v->IsString())
AddType(*v);
else if (v->IsArray())
for (ConstValueIterator itr = v->Begin(); itr != v->End(); ++itr)
AddType(*itr);
}
if (const ValueType* v = GetMember(value, GetEnumString())) {
if (v->IsArray() && v->Size() > 0) {
enum_ = static_cast<uint64_t*>(allocator_->Malloc(sizeof(uint64_t) * v->Size()));
for (ConstValueIterator itr = v->Begin(); itr != v->End(); ++itr) {
typedef Hasher<EncodingType, MemoryPoolAllocator<> > EnumHasherType;
char buffer[256u + 24];
MemoryPoolAllocator<> hasherAllocator(buffer, sizeof(buffer));
EnumHasherType h(&hasherAllocator, 256);
itr->Accept(h);
enum_[enumCount_++] = h.GetHashCode();
}
}
}
if (schemaDocument) {
AssignIfExist(allOf_, *schemaDocument, p, value, GetAllOfString(), document);
AssignIfExist(anyOf_, *schemaDocument, p, value, GetAnyOfString(), document);
AssignIfExist(oneOf_, *schemaDocument, p, value, GetOneOfString(), document);
if (const ValueType* v = GetMember(value, GetNotString())) {
schemaDocument->CreateSchema(&not_, p.Append(GetNotString(), allocator_), *v, document, id_);
notValidatorIndex_ = validatorCount_;
validatorCount_++;
}
}
// Object
const ValueType* properties = GetMember(value, GetPropertiesString());
const ValueType* required = GetMember(value, GetRequiredString());
const ValueType* dependencies = GetMember(value, GetDependenciesString());
{
// Gather properties from properties/required/dependencies
SValue allProperties(kArrayType);
if (properties && properties->IsObject())
for (ConstMemberIterator itr = properties->MemberBegin(); itr != properties->MemberEnd(); ++itr)
AddUniqueElement(allProperties, itr->name);
if (required && required->IsArray())
for (ConstValueIterator itr = required->Begin(); itr != required->End(); ++itr)
if (itr->IsString())
AddUniqueElement(allProperties, *itr);
if (dependencies && dependencies->IsObject())
for (ConstMemberIterator itr = dependencies->MemberBegin(); itr != dependencies->MemberEnd(); ++itr) {
AddUniqueElement(allProperties, itr->name);
if (itr->value.IsArray())
for (ConstValueIterator i = itr->value.Begin(); i != itr->value.End(); ++i)
if (i->IsString())
AddUniqueElement(allProperties, *i);
}
if (allProperties.Size() > 0) {
propertyCount_ = allProperties.Size();
properties_ = static_cast<Property*>(allocator_->Malloc(sizeof(Property) * propertyCount_));
for (SizeType i = 0; i < propertyCount_; i++) {
new (&properties_[i]) Property();
properties_[i].name = allProperties[i];
properties_[i].schema = typeless_;
}
}
}
if (properties && properties->IsObject()) {
PointerType q = p.Append(GetPropertiesString(), allocator_);
for (ConstMemberIterator itr = properties->MemberBegin(); itr != properties->MemberEnd(); ++itr) {
SizeType index;
if (FindPropertyIndex(itr->name, &index))
schemaDocument->CreateSchema(&properties_[index].schema, q.Append(itr->name, allocator_), itr->value, document, id_);
}
}
if (const ValueType* v = GetMember(value, GetPatternPropertiesString())) {
PointerType q = p.Append(GetPatternPropertiesString(), allocator_);
patternProperties_ = static_cast<PatternProperty*>(allocator_->Malloc(sizeof(PatternProperty) * v->MemberCount()));
patternPropertyCount_ = 0;
for (ConstMemberIterator itr = v->MemberBegin(); itr != v->MemberEnd(); ++itr) {
new (&patternProperties_[patternPropertyCount_]) PatternProperty();
patternProperties_[patternPropertyCount_].pattern = CreatePattern(itr->name);
schemaDocument->CreateSchema(&patternProperties_[patternPropertyCount_].schema, q.Append(itr->name, allocator_), itr->value, document, id_);
patternPropertyCount_++;
}
}
if (required && required->IsArray())
for (ConstValueIterator itr = required->Begin(); itr != required->End(); ++itr)
if (itr->IsString()) {
SizeType index;
if (FindPropertyIndex(*itr, &index)) {
properties_[index].required = true;
hasRequired_ = true;
}
}
if (dependencies && dependencies->IsObject()) {
PointerType q = p.Append(GetDependenciesString(), allocator_);
hasDependencies_ = true;
for (ConstMemberIterator itr = dependencies->MemberBegin(); itr != dependencies->MemberEnd(); ++itr) {
SizeType sourceIndex;
if (FindPropertyIndex(itr->name, &sourceIndex)) {
if (itr->value.IsArray()) {
properties_[sourceIndex].dependencies = static_cast<bool*>(allocator_->Malloc(sizeof(bool) * propertyCount_));
std::memset(properties_[sourceIndex].dependencies, 0, sizeof(bool)* propertyCount_);
for (ConstValueIterator targetItr = itr->value.Begin(); targetItr != itr->value.End(); ++targetItr) {
SizeType targetIndex;
if (FindPropertyIndex(*targetItr, &targetIndex))
properties_[sourceIndex].dependencies[targetIndex] = true;
}
}
else if (itr->value.IsObject()) {
hasSchemaDependencies_ = true;
schemaDocument->CreateSchema(&properties_[sourceIndex].dependenciesSchema, q.Append(itr->name, allocator_), itr->value, document, id_);
properties_[sourceIndex].dependenciesValidatorIndex = validatorCount_;
validatorCount_++;
}
}
}
}
if (const ValueType* v = GetMember(value, GetAdditionalPropertiesString())) {
if (v->IsBool())
additionalProperties_ = v->GetBool();
else if (v->IsObject())
schemaDocument->CreateSchema(&additionalPropertiesSchema_, p.Append(GetAdditionalPropertiesString(), allocator_), *v, document, id_);
}
AssignIfExist(minProperties_, value, GetMinPropertiesString());
AssignIfExist(maxProperties_, value, GetMaxPropertiesString());
// Array
if (const ValueType* v = GetMember(value, GetItemsString())) {
PointerType q = p.Append(GetItemsString(), allocator_);
if (v->IsObject()) // List validation
schemaDocument->CreateSchema(&itemsList_, q, *v, document, id_);
else if (v->IsArray()) { // Tuple validation
itemsTuple_ = static_cast<const Schema**>(allocator_->Malloc(sizeof(const Schema*) * v->Size()));
SizeType index = 0;
for (ConstValueIterator itr = v->Begin(); itr != v->End(); ++itr, index++)
schemaDocument->CreateSchema(&itemsTuple_[itemsTupleCount_++], q.Append(index, allocator_), *itr, document, id_);
}
}
AssignIfExist(minItems_, value, GetMinItemsString());
AssignIfExist(maxItems_, value, GetMaxItemsString());
if (const ValueType* v = GetMember(value, GetAdditionalItemsString())) {
if (v->IsBool())
additionalItems_ = v->GetBool();
else if (v->IsObject())
schemaDocument->CreateSchema(&additionalItemsSchema_, p.Append(GetAdditionalItemsString(), allocator_), *v, document, id_);
}
AssignIfExist(uniqueItems_, value, GetUniqueItemsString());
// String
AssignIfExist(minLength_, value, GetMinLengthString());
AssignIfExist(maxLength_, value, GetMaxLengthString());
if (const ValueType* v = GetMember(value, GetPatternString()))
pattern_ = CreatePattern(*v);
// Number
if (const ValueType* v = GetMember(value, GetMinimumString()))
if (v->IsNumber())
minimum_.CopyFrom(*v, *allocator_);
if (const ValueType* v = GetMember(value, GetMaximumString()))
if (v->IsNumber())
maximum_.CopyFrom(*v, *allocator_);
AssignIfExist(exclusiveMinimum_, value, GetExclusiveMinimumString());
AssignIfExist(exclusiveMaximum_, value, GetExclusiveMaximumString());
if (const ValueType* v = GetMember(value, GetMultipleOfString()))
if (v->IsNumber() && v->GetDouble() > 0.0)
multipleOf_.CopyFrom(*v, *allocator_);
// Default
if (const ValueType* v = GetMember(value, GetDefaultValueString()))
if (v->IsString())
defaultValueLength_ = v->GetStringLength();
}
~Schema() {
AllocatorType::Free(enum_);
if (properties_) {
for (SizeType i = 0; i < propertyCount_; i++)
properties_[i].~Property();
AllocatorType::Free(properties_);
}
if (patternProperties_) {
for (SizeType i = 0; i < patternPropertyCount_; i++)
patternProperties_[i].~PatternProperty();
AllocatorType::Free(patternProperties_);
}
AllocatorType::Free(itemsTuple_);
#if RAPIDJSON_SCHEMA_HAS_REGEX
if (pattern_) {
pattern_->~RegexType();
AllocatorType::Free(pattern_);
}
#endif
}
const SValue& GetURI() const {
return uri_;
}
const UriType& GetId() const {
return id_;
}
const PointerType& GetPointer() const {
return pointer_;
}
bool BeginValue(Context& context) const {
if (context.inArray) {
if (uniqueItems_)
context.valueUniqueness = true;
if (itemsList_)
context.valueSchema = itemsList_;
else if (itemsTuple_) {
if (context.arrayElementIndex < itemsTupleCount_)
context.valueSchema = itemsTuple_[context.arrayElementIndex];
else if (additionalItemsSchema_)
context.valueSchema = additionalItemsSchema_;
else if (additionalItems_)
context.valueSchema = typeless_;
else {
context.error_handler.DisallowedItem(context.arrayElementIndex);
// Must set valueSchema for when kValidateContinueOnErrorFlag is set, else reports spurious type error
context.valueSchema = typeless_;
// Must bump arrayElementIndex for when kValidateContinueOnErrorFlag is set
context.arrayElementIndex++;
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorAdditionalItems);
}
}
else
context.valueSchema = typeless_;
context.arrayElementIndex++;
}
return true;
}
RAPIDJSON_FORCEINLINE bool EndValue(Context& context) const {
// Only check pattern properties if we have validators
if (context.patternPropertiesValidatorCount > 0) {
bool otherValid = false;
SizeType count = context.patternPropertiesValidatorCount;
if (context.objectPatternValidatorType != Context::kPatternValidatorOnly)
otherValid = context.patternPropertiesValidators[--count]->IsValid();
bool patternValid = true;
for (SizeType i = 0; i < count; i++)
if (!context.patternPropertiesValidators[i]->IsValid()) {
patternValid = false;
break;
}
if (context.objectPatternValidatorType == Context::kPatternValidatorOnly) {
if (!patternValid) {
context.error_handler.PropertyViolations(context.patternPropertiesValidators, count);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorPatternProperties);
}
}
else if (context.objectPatternValidatorType == Context::kPatternValidatorWithProperty) {
if (!patternValid || !otherValid) {
context.error_handler.PropertyViolations(context.patternPropertiesValidators, count + 1);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorPatternProperties);
}
}
else if (!patternValid && !otherValid) { // kPatternValidatorWithAdditionalProperty)
context.error_handler.PropertyViolations(context.patternPropertiesValidators, count + 1);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorPatternProperties);
}
}
// For enums only check if we have a hasher
if (enum_ && context.hasher) {
const uint64_t h = context.factory.GetHashCode(context.hasher);
for (SizeType i = 0; i < enumCount_; i++)
if (enum_[i] == h)
goto foundEnum;
context.error_handler.DisallowedValue(kValidateErrorEnum);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorEnum);
foundEnum:;
}
// Only check allOf etc if we have validators
if (context.validatorCount > 0) {
if (allOf_.schemas)
for (SizeType i = allOf_.begin; i < allOf_.begin + allOf_.count; i++)
if (!context.validators[i]->IsValid()) {
context.error_handler.NotAllOf(&context.validators[allOf_.begin], allOf_.count);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorAllOf);
}
if (anyOf_.schemas) {
for (SizeType i = anyOf_.begin; i < anyOf_.begin + anyOf_.count; i++)
if (context.validators[i]->IsValid())
goto foundAny;
context.error_handler.NoneOf(&context.validators[anyOf_.begin], anyOf_.count);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorAnyOf);
foundAny:;
}
if (oneOf_.schemas) {
bool oneValid = false;
for (SizeType i = oneOf_.begin; i < oneOf_.begin + oneOf_.count; i++)
if (context.validators[i]->IsValid()) {
if (oneValid) {
context.error_handler.NotOneOf(&context.validators[oneOf_.begin], oneOf_.count, true);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorOneOfMatch);
} else
oneValid = true;
}
if (!oneValid) {
context.error_handler.NotOneOf(&context.validators[oneOf_.begin], oneOf_.count, false);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorOneOf);
}
}
if (not_ && context.validators[notValidatorIndex_]->IsValid()) {
context.error_handler.Disallowed();
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorNot);
}
}
return true;
}
bool Null(Context& context) const {
if (!(type_ & (1 << kNullSchemaType))) {
DisallowedType(context, GetNullString());
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorType);
}
return CreateParallelValidator(context);
}
bool Bool(Context& context, bool) const {
if (!(type_ & (1 << kBooleanSchemaType))) {
DisallowedType(context, GetBooleanString());
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorType);
}
return CreateParallelValidator(context);
}
bool Int(Context& context, int i) const {
if (!CheckInt(context, i))
return false;
return CreateParallelValidator(context);
}
bool Uint(Context& context, unsigned u) const {
if (!CheckUint(context, u))
return false;
return CreateParallelValidator(context);
}
bool Int64(Context& context, int64_t i) const {
if (!CheckInt(context, i))
return false;
return CreateParallelValidator(context);
}
bool Uint64(Context& context, uint64_t u) const {
if (!CheckUint(context, u))
return false;
return CreateParallelValidator(context);
}
bool Double(Context& context, double d) const {
if (!(type_ & (1 << kNumberSchemaType))) {
DisallowedType(context, GetNumberString());
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorType);
}
if (!minimum_.IsNull() && !CheckDoubleMinimum(context, d))
return false;
if (!maximum_.IsNull() && !CheckDoubleMaximum(context, d))
return false;
if (!multipleOf_.IsNull() && !CheckDoubleMultipleOf(context, d))
return false;
return CreateParallelValidator(context);
}
bool String(Context& context, const Ch* str, SizeType length, bool) const {
if (!(type_ & (1 << kStringSchemaType))) {
DisallowedType(context, GetStringString());
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorType);
}
if (minLength_ != 0 || maxLength_ != SizeType(~0)) {
SizeType count;
if (internal::CountStringCodePoint<EncodingType>(str, length, &count)) {
if (count < minLength_) {
context.error_handler.TooShort(str, length, minLength_);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorMinLength);
}
if (count > maxLength_) {
context.error_handler.TooLong(str, length, maxLength_);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorMaxLength);
}
}
}
if (pattern_ && !IsPatternMatch(pattern_, str, length)) {
context.error_handler.DoesNotMatch(str, length);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorPattern);
}
return CreateParallelValidator(context);
}
bool StartObject(Context& context) const {
if (!(type_ & (1 << kObjectSchemaType))) {
DisallowedType(context, GetObjectString());
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorType);
}
if (hasDependencies_ || hasRequired_) {
context.propertyExist = static_cast<bool*>(context.factory.MallocState(sizeof(bool) * propertyCount_));
std::memset(context.propertyExist, 0, sizeof(bool) * propertyCount_);
}
if (patternProperties_) { // pre-allocate schema array
SizeType count = patternPropertyCount_ + 1; // extra for valuePatternValidatorType
context.patternPropertiesSchemas = static_cast<const SchemaType**>(context.factory.MallocState(sizeof(const SchemaType*) * count));
context.patternPropertiesSchemaCount = 0;
std::memset(context.patternPropertiesSchemas, 0, sizeof(SchemaType*) * count);
}
return CreateParallelValidator(context);
}
bool Key(Context& context, const Ch* str, SizeType len, bool) const {
if (patternProperties_) {
context.patternPropertiesSchemaCount = 0;
for (SizeType i = 0; i < patternPropertyCount_; i++)
if (patternProperties_[i].pattern && IsPatternMatch(patternProperties_[i].pattern, str, len)) {
context.patternPropertiesSchemas[context.patternPropertiesSchemaCount++] = patternProperties_[i].schema;
context.valueSchema = typeless_;
}
}
SizeType index = 0;
if (FindPropertyIndex(ValueType(str, len).Move(), &index)) {
if (context.patternPropertiesSchemaCount > 0) {
context.patternPropertiesSchemas[context.patternPropertiesSchemaCount++] = properties_[index].schema;
context.valueSchema = typeless_;
context.valuePatternValidatorType = Context::kPatternValidatorWithProperty;
}
else
context.valueSchema = properties_[index].schema;
if (context.propertyExist)
context.propertyExist[index] = true;
return true;
}
if (additionalPropertiesSchema_) {
if (context.patternPropertiesSchemaCount > 0) {
context.patternPropertiesSchemas[context.patternPropertiesSchemaCount++] = additionalPropertiesSchema_;
context.valueSchema = typeless_;
context.valuePatternValidatorType = Context::kPatternValidatorWithAdditionalProperty;
}
else
context.valueSchema = additionalPropertiesSchema_;
return true;
}
else if (additionalProperties_) {
context.valueSchema = typeless_;
return true;
}
if (context.patternPropertiesSchemaCount == 0) { // patternProperties are not additional properties
// Must set valueSchema for when kValidateContinueOnErrorFlag is set, else reports spurious type error
context.valueSchema = typeless_;
context.error_handler.DisallowedProperty(str, len);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorAdditionalProperties);
}
return true;
}
bool EndObject(Context& context, SizeType memberCount) const {
if (hasRequired_) {
context.error_handler.StartMissingProperties();
for (SizeType index = 0; index < propertyCount_; index++)
if (properties_[index].required && !context.propertyExist[index])
if (properties_[index].schema->defaultValueLength_ == 0 )
context.error_handler.AddMissingProperty(properties_[index].name);
if (context.error_handler.EndMissingProperties())
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorRequired);
}
if (memberCount < minProperties_) {
context.error_handler.TooFewProperties(memberCount, minProperties_);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorMinProperties);
}
if (memberCount > maxProperties_) {
context.error_handler.TooManyProperties(memberCount, maxProperties_);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorMaxProperties);
}
if (hasDependencies_) {
context.error_handler.StartDependencyErrors();
for (SizeType sourceIndex = 0; sourceIndex < propertyCount_; sourceIndex++) {
const Property& source = properties_[sourceIndex];
if (context.propertyExist[sourceIndex]) {
if (source.dependencies) {
context.error_handler.StartMissingDependentProperties();
for (SizeType targetIndex = 0; targetIndex < propertyCount_; targetIndex++)
if (source.dependencies[targetIndex] && !context.propertyExist[targetIndex])
context.error_handler.AddMissingDependentProperty(properties_[targetIndex].name);
context.error_handler.EndMissingDependentProperties(source.name);
}
else if (source.dependenciesSchema) {
ISchemaValidator* dependenciesValidator = context.validators[source.dependenciesValidatorIndex];
if (!dependenciesValidator->IsValid())
context.error_handler.AddDependencySchemaError(source.name, dependenciesValidator);
}
}
}
if (context.error_handler.EndDependencyErrors())
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorDependencies);
}
return true;
}
bool StartArray(Context& context) const {
context.arrayElementIndex = 0;
context.inArray = true; // Ensure we note that we are in an array
if (!(type_ & (1 << kArraySchemaType))) {
DisallowedType(context, GetArrayString());
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorType);
}
return CreateParallelValidator(context);
}
bool EndArray(Context& context, SizeType elementCount) const {
context.inArray = false;
if (elementCount < minItems_) {
context.error_handler.TooFewItems(elementCount, minItems_);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorMinItems);
}
if (elementCount > maxItems_) {
context.error_handler.TooManyItems(elementCount, maxItems_);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorMaxItems);
}
return true;
}
static const ValueType& GetValidateErrorKeyword(ValidateErrorCode validateErrorCode) {
switch (validateErrorCode) {
case kValidateErrorMultipleOf: return GetMultipleOfString();
case kValidateErrorMaximum: return GetMaximumString();
case kValidateErrorExclusiveMaximum: return GetMaximumString(); // Same
case kValidateErrorMinimum: return GetMinimumString();
case kValidateErrorExclusiveMinimum: return GetMinimumString(); // Same
case kValidateErrorMaxLength: return GetMaxLengthString();
case kValidateErrorMinLength: return GetMinLengthString();
case kValidateErrorPattern: return GetPatternString();
case kValidateErrorMaxItems: return GetMaxItemsString();
case kValidateErrorMinItems: return GetMinItemsString();
case kValidateErrorUniqueItems: return GetUniqueItemsString();
case kValidateErrorAdditionalItems: return GetAdditionalItemsString();
case kValidateErrorMaxProperties: return GetMaxPropertiesString();
case kValidateErrorMinProperties: return GetMinPropertiesString();
case kValidateErrorRequired: return GetRequiredString();
case kValidateErrorAdditionalProperties: return GetAdditionalPropertiesString();
case kValidateErrorPatternProperties: return GetPatternPropertiesString();
case kValidateErrorDependencies: return GetDependenciesString();
case kValidateErrorEnum: return GetEnumString();
case kValidateErrorType: return GetTypeString();
case kValidateErrorOneOf: return GetOneOfString();
case kValidateErrorOneOfMatch: return GetOneOfString(); // Same
case kValidateErrorAllOf: return GetAllOfString();
case kValidateErrorAnyOf: return GetAnyOfString();
case kValidateErrorNot: return GetNotString();
default: return GetNullString();
}
}
// Generate functions for string literal according to Ch
#define RAPIDJSON_STRING_(name, ...) \
static const ValueType& Get##name##String() {\
static const Ch s[] = { __VA_ARGS__, '\0' };\
static const ValueType v(s, static_cast<SizeType>(sizeof(s) / sizeof(Ch) - 1));\
return v;\
}
RAPIDJSON_STRING_(Null, 'n', 'u', 'l', 'l')
RAPIDJSON_STRING_(Boolean, 'b', 'o', 'o', 'l', 'e', 'a', 'n')
RAPIDJSON_STRING_(Object, 'o', 'b', 'j', 'e', 'c', 't')
RAPIDJSON_STRING_(Array, 'a', 'r', 'r', 'a', 'y')
RAPIDJSON_STRING_(String, 's', 't', 'r', 'i', 'n', 'g')
RAPIDJSON_STRING_(Number, 'n', 'u', 'm', 'b', 'e', 'r')
RAPIDJSON_STRING_(Integer, 'i', 'n', 't', 'e', 'g', 'e', 'r')
RAPIDJSON_STRING_(Type, 't', 'y', 'p', 'e')
RAPIDJSON_STRING_(Enum, 'e', 'n', 'u', 'm')
RAPIDJSON_STRING_(AllOf, 'a', 'l', 'l', 'O', 'f')
RAPIDJSON_STRING_(AnyOf, 'a', 'n', 'y', 'O', 'f')
RAPIDJSON_STRING_(OneOf, 'o', 'n', 'e', 'O', 'f')
RAPIDJSON_STRING_(Not, 'n', 'o', 't')
RAPIDJSON_STRING_(Properties, 'p', 'r', 'o', 'p', 'e', 'r', 't', 'i', 'e', 's')
RAPIDJSON_STRING_(Required, 'r', 'e', 'q', 'u', 'i', 'r', 'e', 'd')
RAPIDJSON_STRING_(Dependencies, 'd', 'e', 'p', 'e', 'n', 'd', 'e', 'n', 'c', 'i', 'e', 's')
RAPIDJSON_STRING_(PatternProperties, 'p', 'a', 't', 't', 'e', 'r', 'n', 'P', 'r', 'o', 'p', 'e', 'r', 't', 'i', 'e', 's')
RAPIDJSON_STRING_(AdditionalProperties, 'a', 'd', 'd', 'i', 't', 'i', 'o', 'n', 'a', 'l', 'P', 'r', 'o', 'p', 'e', 'r', 't', 'i', 'e', 's')
RAPIDJSON_STRING_(MinProperties, 'm', 'i', 'n', 'P', 'r', 'o', 'p', 'e', 'r', 't', 'i', 'e', 's')
RAPIDJSON_STRING_(MaxProperties, 'm', 'a', 'x', 'P', 'r', 'o', 'p', 'e', 'r', 't', 'i', 'e', 's')
RAPIDJSON_STRING_(Items, 'i', 't', 'e', 'm', 's')
RAPIDJSON_STRING_(MinItems, 'm', 'i', 'n', 'I', 't', 'e', 'm', 's')
RAPIDJSON_STRING_(MaxItems, 'm', 'a', 'x', 'I', 't', 'e', 'm', 's')
RAPIDJSON_STRING_(AdditionalItems, 'a', 'd', 'd', 'i', 't', 'i', 'o', 'n', 'a', 'l', 'I', 't', 'e', 'm', 's')
RAPIDJSON_STRING_(UniqueItems, 'u', 'n', 'i', 'q', 'u', 'e', 'I', 't', 'e', 'm', 's')
RAPIDJSON_STRING_(MinLength, 'm', 'i', 'n', 'L', 'e', 'n', 'g', 't', 'h')
RAPIDJSON_STRING_(MaxLength, 'm', 'a', 'x', 'L', 'e', 'n', 'g', 't', 'h')
RAPIDJSON_STRING_(Pattern, 'p', 'a', 't', 't', 'e', 'r', 'n')
RAPIDJSON_STRING_(Minimum, 'm', 'i', 'n', 'i', 'm', 'u', 'm')
RAPIDJSON_STRING_(Maximum, 'm', 'a', 'x', 'i', 'm', 'u', 'm')
RAPIDJSON_STRING_(ExclusiveMinimum, 'e', 'x', 'c', 'l', 'u', 's', 'i', 'v', 'e', 'M', 'i', 'n', 'i', 'm', 'u', 'm')
RAPIDJSON_STRING_(ExclusiveMaximum, 'e', 'x', 'c', 'l', 'u', 's', 'i', 'v', 'e', 'M', 'a', 'x', 'i', 'm', 'u', 'm')
RAPIDJSON_STRING_(MultipleOf, 'm', 'u', 'l', 't', 'i', 'p', 'l', 'e', 'O', 'f')
RAPIDJSON_STRING_(DefaultValue, 'd', 'e', 'f', 'a', 'u', 'l', 't')
RAPIDJSON_STRING_(Ref, '$', 'r', 'e', 'f')
RAPIDJSON_STRING_(Id, 'i', 'd')
RAPIDJSON_STRING_(SchemeEnd, ':')
RAPIDJSON_STRING_(AuthStart, '/', '/')
RAPIDJSON_STRING_(QueryStart, '?')
RAPIDJSON_STRING_(FragStart, '#')
RAPIDJSON_STRING_(Slash, '/')
RAPIDJSON_STRING_(Dot, '.')
#undef RAPIDJSON_STRING_
private:
enum SchemaValueType {
kNullSchemaType,
kBooleanSchemaType,
kObjectSchemaType,
kArraySchemaType,
kStringSchemaType,
kNumberSchemaType,
kIntegerSchemaType,
kTotalSchemaType
};
#if RAPIDJSON_SCHEMA_USE_INTERNALREGEX
typedef internal::GenericRegex<EncodingType, AllocatorType> RegexType;
#elif RAPIDJSON_SCHEMA_USE_STDREGEX
typedef std::basic_regex<Ch> RegexType;
#else
typedef char RegexType;
#endif
struct SchemaArray {
SchemaArray() : schemas(), count() {}
~SchemaArray() { AllocatorType::Free(schemas); }
const SchemaType** schemas;
SizeType begin; // begin index of context.validators
SizeType count;
};
template <typename V1, typename V2>
void AddUniqueElement(V1& a, const V2& v) {
for (typename V1::ConstValueIterator itr = a.Begin(); itr != a.End(); ++itr)
if (*itr == v)
return;
V1 c(v, *allocator_);
a.PushBack(c, *allocator_);
}
static const ValueType* GetMember(const ValueType& value, const ValueType& name) {
typename ValueType::ConstMemberIterator itr = value.FindMember(name);
return itr != value.MemberEnd() ? &(itr->value) : 0;
}
static void AssignIfExist(bool& out, const ValueType& value, const ValueType& name) {
if (const ValueType* v = GetMember(value, name))
if (v->IsBool())
out = v->GetBool();
}
static void AssignIfExist(SizeType& out, const ValueType& value, const ValueType& name) {
if (const ValueType* v = GetMember(value, name))
if (v->IsUint64() && v->GetUint64() <= SizeType(~0))
out = static_cast<SizeType>(v->GetUint64());
}
void AssignIfExist(SchemaArray& out, SchemaDocumentType& schemaDocument, const PointerType& p, const ValueType& value, const ValueType& name, const ValueType& document) {
if (const ValueType* v = GetMember(value, name)) {
if (v->IsArray() && v->Size() > 0) {
PointerType q = p.Append(name, allocator_);
out.count = v->Size();
out.schemas = static_cast<const Schema**>(allocator_->Malloc(out.count * sizeof(const Schema*)));
memset(out.schemas, 0, sizeof(Schema*)* out.count);
for (SizeType i = 0; i < out.count; i++)
schemaDocument.CreateSchema(&out.schemas[i], q.Append(i, allocator_), (*v)[i], document, id_);
out.begin = validatorCount_;
validatorCount_ += out.count;
}
}
}
#if RAPIDJSON_SCHEMA_USE_INTERNALREGEX
template <typename ValueType>
RegexType* CreatePattern(const ValueType& value) {
if (value.IsString()) {
RegexType* r = new (allocator_->Malloc(sizeof(RegexType))) RegexType(value.GetString(), allocator_);
if (!r->IsValid()) {
r->~RegexType();
AllocatorType::Free(r);
r = 0;
}
return r;
}
return 0;
}
static bool IsPatternMatch(const RegexType* pattern, const Ch *str, SizeType) {
GenericRegexSearch<RegexType> rs(*pattern);
return rs.Search(str);
}
#elif RAPIDJSON_SCHEMA_USE_STDREGEX
template <typename ValueType>
RegexType* CreatePattern(const ValueType& value) {
if (value.IsString()) {
RegexType *r = static_cast<RegexType*>(allocator_->Malloc(sizeof(RegexType)));
try {
return new (r) RegexType(value.GetString(), std::size_t(value.GetStringLength()), std::regex_constants::ECMAScript);
}
catch (const std::regex_error&) {
AllocatorType::Free(r);
}
}
return 0;
}
static bool IsPatternMatch(const RegexType* pattern, const Ch *str, SizeType length) {
std::match_results<const Ch*> r;
return std::regex_search(str, str + length, r, *pattern);
}
#else
template <typename ValueType>
RegexType* CreatePattern(const ValueType&) { return 0; }
static bool IsPatternMatch(const RegexType*, const Ch *, SizeType) { return true; }
#endif // RAPIDJSON_SCHEMA_USE_STDREGEX
void AddType(const ValueType& type) {
if (type == GetNullString() ) type_ |= 1 << kNullSchemaType;
else if (type == GetBooleanString()) type_ |= 1 << kBooleanSchemaType;
else if (type == GetObjectString() ) type_ |= 1 << kObjectSchemaType;
else if (type == GetArrayString() ) type_ |= 1 << kArraySchemaType;
else if (type == GetStringString() ) type_ |= 1 << kStringSchemaType;
else if (type == GetIntegerString()) type_ |= 1 << kIntegerSchemaType;
else if (type == GetNumberString() ) type_ |= (1 << kNumberSchemaType) | (1 << kIntegerSchemaType);
}
bool CreateParallelValidator(Context& context) const {
if (enum_ || context.arrayUniqueness)
context.hasher = context.factory.CreateHasher();
if (validatorCount_) {
RAPIDJSON_ASSERT(context.validators == 0);
context.validators = static_cast<ISchemaValidator**>(context.factory.MallocState(sizeof(ISchemaValidator*) * validatorCount_));
context.validatorCount = validatorCount_;
// Always return after first failure for these sub-validators
if (allOf_.schemas)
CreateSchemaValidators(context, allOf_, false);
if (anyOf_.schemas)
CreateSchemaValidators(context, anyOf_, false);
if (oneOf_.schemas)
CreateSchemaValidators(context, oneOf_, false);
if (not_)
context.validators[notValidatorIndex_] = context.factory.CreateSchemaValidator(*not_, false);
if (hasSchemaDependencies_) {
for (SizeType i = 0; i < propertyCount_; i++)
if (properties_[i].dependenciesSchema)
context.validators[properties_[i].dependenciesValidatorIndex] = context.factory.CreateSchemaValidator(*properties_[i].dependenciesSchema, false);
}
}
return true;
}
void CreateSchemaValidators(Context& context, const SchemaArray& schemas, const bool inheritContinueOnErrors) const {
for (SizeType i = 0; i < schemas.count; i++)
context.validators[schemas.begin + i] = context.factory.CreateSchemaValidator(*schemas.schemas[i], inheritContinueOnErrors);
}
// O(n)
bool FindPropertyIndex(const ValueType& name, SizeType* outIndex) const {
SizeType len = name.GetStringLength();
const Ch* str = name.GetString();
for (SizeType index = 0; index < propertyCount_; index++)
if (properties_[index].name.GetStringLength() == len &&
(std::memcmp(properties_[index].name.GetString(), str, sizeof(Ch) * len) == 0))
{
*outIndex = index;
return true;
}
return false;
}
bool CheckInt(Context& context, int64_t i) const {
if (!(type_ & ((1 << kIntegerSchemaType) | (1 << kNumberSchemaType)))) {
DisallowedType(context, GetIntegerString());
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorType);
}
if (!minimum_.IsNull()) {
if (minimum_.IsInt64()) {
if (exclusiveMinimum_ ? i <= minimum_.GetInt64() : i < minimum_.GetInt64()) {
context.error_handler.BelowMinimum(i, minimum_, exclusiveMinimum_);
RAPIDJSON_INVALID_KEYWORD_RETURN(exclusiveMinimum_ ? kValidateErrorExclusiveMinimum : kValidateErrorMinimum);
}
}
else if (minimum_.IsUint64()) {
context.error_handler.BelowMinimum(i, minimum_, exclusiveMinimum_);
RAPIDJSON_INVALID_KEYWORD_RETURN(exclusiveMinimum_ ? kValidateErrorExclusiveMinimum : kValidateErrorMinimum); // i <= max(int64_t) < minimum.GetUint64()
}
else if (!CheckDoubleMinimum(context, static_cast<double>(i)))
return false;
}
if (!maximum_.IsNull()) {
if (maximum_.IsInt64()) {
if (exclusiveMaximum_ ? i >= maximum_.GetInt64() : i > maximum_.GetInt64()) {
context.error_handler.AboveMaximum(i, maximum_, exclusiveMaximum_);
RAPIDJSON_INVALID_KEYWORD_RETURN(exclusiveMaximum_ ? kValidateErrorExclusiveMaximum : kValidateErrorMaximum);
}
}
else if (maximum_.IsUint64()) { }
/* do nothing */ // i <= max(int64_t) < maximum_.GetUint64()
else if (!CheckDoubleMaximum(context, static_cast<double>(i)))
return false;
}
if (!multipleOf_.IsNull()) {
if (multipleOf_.IsUint64()) {
if (static_cast<uint64_t>(i >= 0 ? i : -i) % multipleOf_.GetUint64() != 0) {
context.error_handler.NotMultipleOf(i, multipleOf_);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorMultipleOf);
}
}
else if (!CheckDoubleMultipleOf(context, static_cast<double>(i)))
return false;
}
return true;
}
bool CheckUint(Context& context, uint64_t i) const {
if (!(type_ & ((1 << kIntegerSchemaType) | (1 << kNumberSchemaType)))) {
DisallowedType(context, GetIntegerString());
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorType);
}
if (!minimum_.IsNull()) {
if (minimum_.IsUint64()) {
if (exclusiveMinimum_ ? i <= minimum_.GetUint64() : i < minimum_.GetUint64()) {
context.error_handler.BelowMinimum(i, minimum_, exclusiveMinimum_);
RAPIDJSON_INVALID_KEYWORD_RETURN(exclusiveMinimum_ ? kValidateErrorExclusiveMinimum : kValidateErrorMinimum);
}
}
else if (minimum_.IsInt64())
/* do nothing */; // i >= 0 > minimum.Getint64()
else if (!CheckDoubleMinimum(context, static_cast<double>(i)))
return false;
}
if (!maximum_.IsNull()) {
if (maximum_.IsUint64()) {
if (exclusiveMaximum_ ? i >= maximum_.GetUint64() : i > maximum_.GetUint64()) {
context.error_handler.AboveMaximum(i, maximum_, exclusiveMaximum_);
RAPIDJSON_INVALID_KEYWORD_RETURN(exclusiveMaximum_ ? kValidateErrorExclusiveMaximum : kValidateErrorMaximum);
}
}
else if (maximum_.IsInt64()) {
context.error_handler.AboveMaximum(i, maximum_, exclusiveMaximum_);
RAPIDJSON_INVALID_KEYWORD_RETURN(exclusiveMaximum_ ? kValidateErrorExclusiveMaximum : kValidateErrorMaximum); // i >= 0 > maximum_
}
else if (!CheckDoubleMaximum(context, static_cast<double>(i)))
return false;
}
if (!multipleOf_.IsNull()) {
if (multipleOf_.IsUint64()) {
if (i % multipleOf_.GetUint64() != 0) {
context.error_handler.NotMultipleOf(i, multipleOf_);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorMultipleOf);
}
}
else if (!CheckDoubleMultipleOf(context, static_cast<double>(i)))
return false;
}
return true;
}
bool CheckDoubleMinimum(Context& context, double d) const {
if (exclusiveMinimum_ ? d <= minimum_.GetDouble() : d < minimum_.GetDouble()) {
context.error_handler.BelowMinimum(d, minimum_, exclusiveMinimum_);
RAPIDJSON_INVALID_KEYWORD_RETURN(exclusiveMinimum_ ? kValidateErrorExclusiveMinimum : kValidateErrorMinimum);
}
return true;
}
bool CheckDoubleMaximum(Context& context, double d) const {
if (exclusiveMaximum_ ? d >= maximum_.GetDouble() : d > maximum_.GetDouble()) {
context.error_handler.AboveMaximum(d, maximum_, exclusiveMaximum_);
RAPIDJSON_INVALID_KEYWORD_RETURN(exclusiveMaximum_ ? kValidateErrorExclusiveMaximum : kValidateErrorMaximum);
}
return true;
}
bool CheckDoubleMultipleOf(Context& context, double d) const {
double a = std::abs(d), b = std::abs(multipleOf_.GetDouble());
double q = std::floor(a / b);
double r = a - q * b;
if (r > 0.0) {
context.error_handler.NotMultipleOf(d, multipleOf_);
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorMultipleOf);
}
return true;
}
void DisallowedType(Context& context, const ValueType& actualType) const {
ErrorHandler& eh = context.error_handler;
eh.StartDisallowedType();
if (type_ & (1 << kNullSchemaType)) eh.AddExpectedType(GetNullString());
if (type_ & (1 << kBooleanSchemaType)) eh.AddExpectedType(GetBooleanString());
if (type_ & (1 << kObjectSchemaType)) eh.AddExpectedType(GetObjectString());
if (type_ & (1 << kArraySchemaType)) eh.AddExpectedType(GetArrayString());
if (type_ & (1 << kStringSchemaType)) eh.AddExpectedType(GetStringString());
if (type_ & (1 << kNumberSchemaType)) eh.AddExpectedType(GetNumberString());
else if (type_ & (1 << kIntegerSchemaType)) eh.AddExpectedType(GetIntegerString());
eh.EndDisallowedType(actualType);
}
struct Property {
Property() : schema(), dependenciesSchema(), dependenciesValidatorIndex(), dependencies(), required(false) {}
~Property() { AllocatorType::Free(dependencies); }
SValue name;
const SchemaType* schema;
const SchemaType* dependenciesSchema;
SizeType dependenciesValidatorIndex;
bool* dependencies;
bool required;
};
struct PatternProperty {
PatternProperty() : schema(), pattern() {}
~PatternProperty() {
if (pattern) {
pattern->~RegexType();
AllocatorType::Free(pattern);
}
}
const SchemaType* schema;
RegexType* pattern;
};
AllocatorType* allocator_;
SValue uri_;
UriType id_;
PointerType pointer_;
const SchemaType* typeless_;
uint64_t* enum_;
SizeType enumCount_;
SchemaArray allOf_;
SchemaArray anyOf_;
SchemaArray oneOf_;
const SchemaType* not_;
unsigned type_; // bitmask of kSchemaType
SizeType validatorCount_;
SizeType notValidatorIndex_;
Property* properties_;
const SchemaType* additionalPropertiesSchema_;
PatternProperty* patternProperties_;
SizeType patternPropertyCount_;
SizeType propertyCount_;
SizeType minProperties_;
SizeType maxProperties_;
bool additionalProperties_;
bool hasDependencies_;
bool hasRequired_;
bool hasSchemaDependencies_;
const SchemaType* additionalItemsSchema_;
const SchemaType* itemsList_;
const SchemaType** itemsTuple_;
SizeType itemsTupleCount_;
SizeType minItems_;
SizeType maxItems_;
bool additionalItems_;
bool uniqueItems_;
RegexType* pattern_;
SizeType minLength_;
SizeType maxLength_;
SValue minimum_;
SValue maximum_;
SValue multipleOf_;
bool exclusiveMinimum_;
bool exclusiveMaximum_;
SizeType defaultValueLength_;
};
template<typename Stack, typename Ch>
struct TokenHelper {
RAPIDJSON_FORCEINLINE static void AppendIndexToken(Stack& documentStack, SizeType index) {
*documentStack.template Push<Ch>() = '/';
char buffer[21];
size_t length = static_cast<size_t>((sizeof(SizeType) == 4 ? u32toa(index, buffer) : u64toa(index, buffer)) - buffer);
for (size_t i = 0; i < length; i++)
*documentStack.template Push<Ch>() = static_cast<Ch>(buffer[i]);
}
};
// Partial specialized version for char to prevent buffer copying.
template <typename Stack>
struct TokenHelper<Stack, char> {
RAPIDJSON_FORCEINLINE static void AppendIndexToken(Stack& documentStack, SizeType index) {
if (sizeof(SizeType) == 4) {
char *buffer = documentStack.template Push<char>(1 + 10); // '/' + uint
*buffer++ = '/';
const char* end = internal::u32toa(index, buffer);
documentStack.template Pop<char>(static_cast<size_t>(10 - (end - buffer)));
}
else {
char *buffer = documentStack.template Push<char>(1 + 20); // '/' + uint64
*buffer++ = '/';
const char* end = internal::u64toa(index, buffer);
documentStack.template Pop<char>(static_cast<size_t>(20 - (end - buffer)));
}
}
};
} // namespace internal
///////////////////////////////////////////////////////////////////////////////
// IGenericRemoteSchemaDocumentProvider
template <typename SchemaDocumentType>
class IGenericRemoteSchemaDocumentProvider {
public:
typedef typename SchemaDocumentType::Ch Ch;
typedef typename SchemaDocumentType::ValueType ValueType;
typedef typename SchemaDocumentType::AllocatorType AllocatorType;
virtual ~IGenericRemoteSchemaDocumentProvider() {}
virtual const SchemaDocumentType* GetRemoteDocument(const Ch* uri, SizeType length) = 0;
virtual const SchemaDocumentType* GetRemoteDocument(GenericUri<ValueType, AllocatorType> uri) { return GetRemoteDocument(uri.GetBaseString(), uri.GetBaseStringLength()); }
};
///////////////////////////////////////////////////////////////////////////////
// GenericSchemaDocument
//! JSON schema document.
/*!
A JSON schema document is a compiled version of a JSON schema.
It is basically a tree of internal::Schema.
\note This is an immutable class (i.e. its instance cannot be modified after construction).
\tparam ValueT Type of JSON value (e.g. \c Value ), which also determine the encoding.
\tparam Allocator Allocator type for allocating memory of this document.
*/
template <typename ValueT, typename Allocator = CrtAllocator>
class GenericSchemaDocument {
public:
typedef ValueT ValueType;
typedef IGenericRemoteSchemaDocumentProvider<GenericSchemaDocument> IRemoteSchemaDocumentProviderType;
typedef Allocator AllocatorType;
typedef typename ValueType::EncodingType EncodingType;
typedef typename EncodingType::Ch Ch;
typedef internal::Schema<GenericSchemaDocument> SchemaType;
typedef GenericPointer<ValueType, Allocator> PointerType;
typedef GenericValue<EncodingType, AllocatorType> SValue;
typedef GenericUri<ValueType, Allocator> UriType;
friend class internal::Schema<GenericSchemaDocument>;
template <typename, typename, typename>
friend class GenericSchemaValidator;
//! Constructor.
/*!
Compile a JSON document into schema document.
\param document A JSON document as source.
\param uri The base URI of this schema document for purposes of violation reporting.
\param uriLength Length of \c name, in code points.
\param remoteProvider An optional remote schema document provider for resolving remote reference. Can be null.
\param allocator An optional allocator instance for allocating memory. Can be null.
\param pointer An optional JSON pointer to the start of the schema document
*/
explicit GenericSchemaDocument(const ValueType& document, const Ch* uri = 0, SizeType uriLength = 0,
IRemoteSchemaDocumentProviderType* remoteProvider = 0, Allocator* allocator = 0,
const PointerType& pointer = PointerType()) : // PR #1393
remoteProvider_(remoteProvider),
allocator_(allocator),
ownAllocator_(),
root_(),
typeless_(),
schemaMap_(allocator, kInitialSchemaMapSize),
schemaRef_(allocator, kInitialSchemaRefSize)
{
if (!allocator_)
ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator)();
Ch noUri[1] = {0};
uri_.SetString(uri ? uri : noUri, uriLength, *allocator_);
docId_ = UriType(uri_, allocator_);
typeless_ = static_cast<SchemaType*>(allocator_->Malloc(sizeof(SchemaType)));
new (typeless_) SchemaType(this, PointerType(), ValueType(kObjectType).Move(), ValueType(kObjectType).Move(), allocator_, docId_);
// Generate root schema, it will call CreateSchema() to create sub-schemas,
// And call HandleRefSchema() if there are $ref.
// PR #1393 use input pointer if supplied
root_ = typeless_;
if (pointer.GetTokenCount() == 0) {
CreateSchemaRecursive(&root_, pointer, document, document, docId_);
}
else if (const ValueType* v = pointer.Get(document)) {
CreateSchema(&root_, pointer, *v, document, docId_);
}
RAPIDJSON_ASSERT(root_ != 0);
schemaRef_.ShrinkToFit(); // Deallocate all memory for ref
}
#if RAPIDJSON_HAS_CXX11_RVALUE_REFS
//! Move constructor in C++11
GenericSchemaDocument(GenericSchemaDocument&& rhs) RAPIDJSON_NOEXCEPT :
remoteProvider_(rhs.remoteProvider_),
allocator_(rhs.allocator_),
ownAllocator_(rhs.ownAllocator_),
root_(rhs.root_),
typeless_(rhs.typeless_),
schemaMap_(std::move(rhs.schemaMap_)),
schemaRef_(std::move(rhs.schemaRef_)),
uri_(std::move(rhs.uri_)),
docId_(rhs.docId_)
{
rhs.remoteProvider_ = 0;
rhs.allocator_ = 0;
rhs.ownAllocator_ = 0;
rhs.typeless_ = 0;
}
#endif
//! Destructor
~GenericSchemaDocument() {
while (!schemaMap_.Empty())
schemaMap_.template Pop<SchemaEntry>(1)->~SchemaEntry();
if (typeless_) {
typeless_->~SchemaType();
Allocator::Free(typeless_);
}
RAPIDJSON_DELETE(ownAllocator_);
}
const SValue& GetURI() const { return uri_; }
//! Get the root schema.
const SchemaType& GetRoot() const { return *root_; }
private:
//! Prohibit copying
GenericSchemaDocument(const GenericSchemaDocument&);
//! Prohibit assignment
GenericSchemaDocument& operator=(const GenericSchemaDocument&);
typedef const PointerType* SchemaRefPtr; // PR #1393
struct SchemaEntry {
SchemaEntry(const PointerType& p, SchemaType* s, bool o, Allocator* allocator) : pointer(p, allocator), schema(s), owned(o) {}
~SchemaEntry() {
if (owned) {
schema->~SchemaType();
Allocator::Free(schema);
}
}
PointerType pointer;
SchemaType* schema;
bool owned;
};
// Changed by PR #1393
void CreateSchemaRecursive(const SchemaType** schema, const PointerType& pointer, const ValueType& v, const ValueType& document, const UriType& id) {
if (v.GetType() == kObjectType) {
UriType newid = UriType(CreateSchema(schema, pointer, v, document, id), allocator_);
for (typename ValueType::ConstMemberIterator itr = v.MemberBegin(); itr != v.MemberEnd(); ++itr)
CreateSchemaRecursive(0, pointer.Append(itr->name, allocator_), itr->value, document, newid);
}
else if (v.GetType() == kArrayType)
for (SizeType i = 0; i < v.Size(); i++)
CreateSchemaRecursive(0, pointer.Append(i, allocator_), v[i], document, id);
}
// Changed by PR #1393
const UriType& CreateSchema(const SchemaType** schema, const PointerType& pointer, const ValueType& v, const ValueType& document, const UriType& id) {
RAPIDJSON_ASSERT(pointer.IsValid());
if (v.IsObject()) {
if (const SchemaType* sc = GetSchema(pointer)) {
if (schema)
*schema = sc;
AddSchemaRefs(const_cast<SchemaType*>(sc));
}
else if (!HandleRefSchema(pointer, schema, v, document, id)) {
// The new schema constructor adds itself and its $ref(s) to schemaMap_
SchemaType* s = new (allocator_->Malloc(sizeof(SchemaType))) SchemaType(this, pointer, v, document, allocator_, id);
if (schema)
*schema = s;
return s->GetId();
}
}
else {
if (schema)
*schema = typeless_;
AddSchemaRefs(typeless_);
}
return id;
}
// Changed by PR #1393
// TODO should this return a UriType& ?
bool HandleRefSchema(const PointerType& source, const SchemaType** schema, const ValueType& v, const ValueType& document, const UriType& id) {
typename ValueType::ConstMemberIterator itr = v.FindMember(SchemaType::GetRefString());
if (itr == v.MemberEnd())
return false;
// Resolve the source pointer to the $ref'ed schema (finally)
new (schemaRef_.template Push<SchemaRefPtr>()) SchemaRefPtr(&source);
if (itr->value.IsString()) {
SizeType len = itr->value.GetStringLength();
if (len > 0) {
// First resolve $ref against the in-scope id
UriType scopeId = UriType(id, allocator_);
UriType ref = UriType(itr->value, allocator_).Resolve(scopeId, allocator_);
// See if the resolved $ref minus the fragment matches a resolved id in this document
// Search from the root. Returns the subschema in the document and its absolute JSON pointer.
PointerType basePointer = PointerType();
const ValueType *base = FindId(document, ref, basePointer, docId_, false);
if (!base) {
// Remote reference - call the remote document provider
if (remoteProvider_) {
if (const GenericSchemaDocument* remoteDocument = remoteProvider_->GetRemoteDocument(ref)) {
const Ch* s = ref.GetFragString();
len = ref.GetFragStringLength();
if (len <= 1 || s[1] == '/') {
// JSON pointer fragment, absolute in the remote schema
const PointerType pointer(s, len, allocator_);
if (pointer.IsValid()) {
// Get the subschema
if (const SchemaType *sc = remoteDocument->GetSchema(pointer)) {
if (schema)
*schema = sc;
AddSchemaRefs(const_cast<SchemaType *>(sc));
return true;
}
}
} else {
// Plain name fragment, not allowed
}
}
}
}
else { // Local reference
const Ch* s = ref.GetFragString();
len = ref.GetFragStringLength();
if (len <= 1 || s[1] == '/') {
// JSON pointer fragment, relative to the resolved URI
const PointerType relPointer(s, len, allocator_);
if (relPointer.IsValid()) {
// Get the subschema
if (const ValueType *pv = relPointer.Get(*base)) {
// Now get the absolute JSON pointer by adding relative to base
PointerType pointer(basePointer);
for (SizeType i = 0; i < relPointer.GetTokenCount(); i++)
pointer = pointer.Append(relPointer.GetTokens()[i], allocator_);
//GenericStringBuffer<EncodingType> sb;
//pointer.StringifyUriFragment(sb);
if (pointer.IsValid() && !IsCyclicRef(pointer)) {
// Call CreateSchema recursively, but first compute the in-scope id for the $ref target as we have jumped there
// TODO: cache pointer <-> id mapping
size_t unresolvedTokenIndex;
scopeId = pointer.GetUri(document, docId_, &unresolvedTokenIndex, allocator_);
CreateSchema(schema, pointer, *pv, document, scopeId);
return true;
}
}
}
} else {
// Plain name fragment, relative to the resolved URI
// See if the fragment matches an id in this document.
// Search from the base we just established. Returns the subschema in the document and its absolute JSON pointer.
PointerType pointer = PointerType();
if (const ValueType *pv = FindId(*base, ref, pointer, UriType(ref.GetBaseString(), ref.GetBaseStringLength(), allocator_), true, basePointer)) {
if (!IsCyclicRef(pointer)) {
//GenericStringBuffer<EncodingType> sb;
//pointer.StringifyUriFragment(sb);
// Call CreateSchema recursively, but first compute the in-scope id for the $ref target as we have jumped there
// TODO: cache pointer <-> id mapping
size_t unresolvedTokenIndex;
scopeId = pointer.GetUri(document, docId_, &unresolvedTokenIndex, allocator_);
CreateSchema(schema, pointer, *pv, document, scopeId);
return true;
}
}
}
}
}
}
// Invalid/Unknown $ref
if (schema)
*schema = typeless_;
AddSchemaRefs(typeless_);
return true;
}
//! Find the first subschema with a resolved 'id' that matches the specified URI.
// If full specified use all URI else ignore fragment.
// If found, return a pointer to the subschema and its JSON pointer.
// TODO cache pointer <-> id mapping
ValueType* FindId(const ValueType& doc, const UriType& finduri, PointerType& resptr, const UriType& baseuri, bool full, const PointerType& here = PointerType()) const {
SizeType i = 0;
ValueType* resval = 0;
UriType tempuri = UriType(finduri, allocator_);
UriType localuri = UriType(baseuri, allocator_);
if (doc.GetType() == kObjectType) {
// Establish the base URI of this object
typename ValueType::ConstMemberIterator m = doc.FindMember(SchemaType::GetIdString());
if (m != doc.MemberEnd() && m->value.GetType() == kStringType) {
localuri = UriType(m->value, allocator_).Resolve(baseuri, allocator_);
}
// See if it matches
if (localuri.Match(finduri, full)) {
resval = const_cast<ValueType *>(&doc);
resptr = here;
return resval;
}
// No match, continue looking
for (m = doc.MemberBegin(); m != doc.MemberEnd(); ++m) {
if (m->value.GetType() == kObjectType || m->value.GetType() == kArrayType) {
resval = FindId(m->value, finduri, resptr, localuri, full, here.Append(m->name.GetString(), m->name.GetStringLength(), allocator_));
}
if (resval) break;
}
} else if (doc.GetType() == kArrayType) {
// Continue looking
for (typename ValueType::ConstValueIterator v = doc.Begin(); v != doc.End(); ++v) {
if (v->GetType() == kObjectType || v->GetType() == kArrayType) {
resval = FindId(*v, finduri, resptr, localuri, full, here.Append(i, allocator_));
}
if (resval) break;
i++;
}
}
return resval;
}
// Added by PR #1393
void AddSchemaRefs(SchemaType* schema) {
while (!schemaRef_.Empty()) {
SchemaRefPtr *ref = schemaRef_.template Pop<SchemaRefPtr>(1);
SchemaEntry *entry = schemaMap_.template Push<SchemaEntry>();
new (entry) SchemaEntry(**ref, schema, false, allocator_);
}
}
// Added by PR #1393
bool IsCyclicRef(const PointerType& pointer) const {
for (const SchemaRefPtr* ref = schemaRef_.template Bottom<SchemaRefPtr>(); ref != schemaRef_.template End<SchemaRefPtr>(); ++ref)
if (pointer == **ref)
return true;
return false;
}
const SchemaType* GetSchema(const PointerType& pointer) const {
for (const SchemaEntry* target = schemaMap_.template Bottom<SchemaEntry>(); target != schemaMap_.template End<SchemaEntry>(); ++target)
if (pointer == target->pointer)
return target->schema;
return 0;
}
PointerType GetPointer(const SchemaType* schema) const {
for (const SchemaEntry* target = schemaMap_.template Bottom<SchemaEntry>(); target != schemaMap_.template End<SchemaEntry>(); ++target)
if (schema == target->schema)
return target->pointer;
return PointerType();
}
const SchemaType* GetTypeless() const { return typeless_; }
static const size_t kInitialSchemaMapSize = 64;
static const size_t kInitialSchemaRefSize = 64;
IRemoteSchemaDocumentProviderType* remoteProvider_;
Allocator *allocator_;
Allocator *ownAllocator_;
const SchemaType* root_; //!< Root schema.
SchemaType* typeless_;
internal::Stack<Allocator> schemaMap_; // Stores created Pointer -> Schemas
internal::Stack<Allocator> schemaRef_; // Stores Pointer(s) from $ref(s) until resolved
SValue uri_; // Schema document URI
UriType docId_;
};
//! GenericSchemaDocument using Value type.
typedef GenericSchemaDocument<Value> SchemaDocument;
//! IGenericRemoteSchemaDocumentProvider using SchemaDocument.
typedef IGenericRemoteSchemaDocumentProvider<SchemaDocument> IRemoteSchemaDocumentProvider;
///////////////////////////////////////////////////////////////////////////////
// GenericSchemaValidator
//! JSON Schema Validator.
/*!
A SAX style JSON schema validator.
It uses a \c GenericSchemaDocument to validate SAX events.
It delegates the incoming SAX events to an output handler.
The default output handler does nothing.
It can be reused multiple times by calling \c Reset().
\tparam SchemaDocumentType Type of schema document.
\tparam OutputHandler Type of output handler. Default handler does nothing.
\tparam StateAllocator Allocator for storing the internal validation states.
*/
template <
typename SchemaDocumentType,
typename OutputHandler = BaseReaderHandler<typename SchemaDocumentType::SchemaType::EncodingType>,
typename StateAllocator = CrtAllocator>
class GenericSchemaValidator :
public internal::ISchemaStateFactory<typename SchemaDocumentType::SchemaType>,
public internal::ISchemaValidator,
public internal::IValidationErrorHandler<typename SchemaDocumentType::SchemaType> {
public:
typedef typename SchemaDocumentType::SchemaType SchemaType;
typedef typename SchemaDocumentType::PointerType PointerType;
typedef typename SchemaType::EncodingType EncodingType;
typedef typename SchemaType::SValue SValue;
typedef typename EncodingType::Ch Ch;
typedef GenericStringRef<Ch> StringRefType;
typedef GenericValue<EncodingType, StateAllocator> ValueType;
//! Constructor without output handler.
/*!
\param schemaDocument The schema document to conform to.
\param allocator Optional allocator for storing internal validation states.
\param schemaStackCapacity Optional initial capacity of schema path stack.
\param documentStackCapacity Optional initial capacity of document path stack.
*/
GenericSchemaValidator(
const SchemaDocumentType& schemaDocument,
StateAllocator* allocator = 0,
size_t schemaStackCapacity = kDefaultSchemaStackCapacity,
size_t documentStackCapacity = kDefaultDocumentStackCapacity)
:
schemaDocument_(&schemaDocument),
root_(schemaDocument.GetRoot()),
stateAllocator_(allocator),
ownStateAllocator_(0),
schemaStack_(allocator, schemaStackCapacity),
documentStack_(allocator, documentStackCapacity),
outputHandler_(0),
error_(kObjectType),
currentError_(),
missingDependents_(),
valid_(true),
flags_(kValidateDefaultFlags)
#if RAPIDJSON_SCHEMA_VERBOSE
, depth_(0)
#endif
{
}
//! Constructor with output handler.
/*!
\param schemaDocument The schema document to conform to.
\param allocator Optional allocator for storing internal validation states.
\param schemaStackCapacity Optional initial capacity of schema path stack.
\param documentStackCapacity Optional initial capacity of document path stack.
*/
GenericSchemaValidator(
const SchemaDocumentType& schemaDocument,
OutputHandler& outputHandler,
StateAllocator* allocator = 0,
size_t schemaStackCapacity = kDefaultSchemaStackCapacity,
size_t documentStackCapacity = kDefaultDocumentStackCapacity)
:
schemaDocument_(&schemaDocument),
root_(schemaDocument.GetRoot()),
stateAllocator_(allocator),
ownStateAllocator_(0),
schemaStack_(allocator, schemaStackCapacity),
documentStack_(allocator, documentStackCapacity),
outputHandler_(&outputHandler),
error_(kObjectType),
currentError_(),
missingDependents_(),
valid_(true),
flags_(kValidateDefaultFlags)
#if RAPIDJSON_SCHEMA_VERBOSE
, depth_(0)
#endif
{
}
//! Destructor.
~GenericSchemaValidator() {
Reset();
RAPIDJSON_DELETE(ownStateAllocator_);
}
//! Reset the internal states.
void Reset() {
while (!schemaStack_.Empty())
PopSchema();
documentStack_.Clear();
ResetError();
}
//! Reset the error state.
void ResetError() {
error_.SetObject();
currentError_.SetNull();
missingDependents_.SetNull();
valid_ = true;
}
//! Implementation of ISchemaValidator
void SetValidateFlags(unsigned flags) {
flags_ = flags;
}
virtual unsigned GetValidateFlags() const {
return flags_;
}
//! Checks whether the current state is valid.
// Implementation of ISchemaValidator
virtual bool IsValid() const {
if (!valid_) return false;
if (GetContinueOnErrors() && !error_.ObjectEmpty()) return false;
return true;
}
//! Gets the error object.
ValueType& GetError() { return error_; }
const ValueType& GetError() const { return error_; }
//! Gets the JSON pointer pointed to the invalid schema.
// If reporting all errors, the stack will be empty.
PointerType GetInvalidSchemaPointer() const {
return schemaStack_.Empty() ? PointerType() : CurrentSchema().GetPointer();
}
//! Gets the keyword of invalid schema.
// If reporting all errors, the stack will be empty, so return "errors".
const Ch* GetInvalidSchemaKeyword() const {
if (!schemaStack_.Empty()) return CurrentContext().invalidKeyword;
if (GetContinueOnErrors() && !error_.ObjectEmpty()) return (const Ch*)GetErrorsString();
return 0;
}
//! Gets the error code of invalid schema.
// If reporting all errors, the stack will be empty, so return kValidateErrors.
ValidateErrorCode GetInvalidSchemaCode() const {
if (!schemaStack_.Empty()) return CurrentContext().invalidCode;
if (GetContinueOnErrors() && !error_.ObjectEmpty()) return kValidateErrors;
return kValidateErrorNone;
}
//! Gets the JSON pointer pointed to the invalid value.
// If reporting all errors, the stack will be empty.
PointerType GetInvalidDocumentPointer() const {
if (documentStack_.Empty()) {
return PointerType();
}
else {
return PointerType(documentStack_.template Bottom<Ch>(), documentStack_.GetSize() / sizeof(Ch));
}
}
void NotMultipleOf(int64_t actual, const SValue& expected) {
AddNumberError(kValidateErrorMultipleOf, ValueType(actual).Move(), expected);
}
void NotMultipleOf(uint64_t actual, const SValue& expected) {
AddNumberError(kValidateErrorMultipleOf, ValueType(actual).Move(), expected);
}
void NotMultipleOf(double actual, const SValue& expected) {
AddNumberError(kValidateErrorMultipleOf, ValueType(actual).Move(), expected);
}
void AboveMaximum(int64_t actual, const SValue& expected, bool exclusive) {
AddNumberError(exclusive ? kValidateErrorExclusiveMaximum : kValidateErrorMaximum, ValueType(actual).Move(), expected,
exclusive ? &SchemaType::GetExclusiveMaximumString : 0);
}
void AboveMaximum(uint64_t actual, const SValue& expected, bool exclusive) {
AddNumberError(exclusive ? kValidateErrorExclusiveMaximum : kValidateErrorMaximum, ValueType(actual).Move(), expected,
exclusive ? &SchemaType::GetExclusiveMaximumString : 0);
}
void AboveMaximum(double actual, const SValue& expected, bool exclusive) {
AddNumberError(exclusive ? kValidateErrorExclusiveMaximum : kValidateErrorMaximum, ValueType(actual).Move(), expected,
exclusive ? &SchemaType::GetExclusiveMaximumString : 0);
}
void BelowMinimum(int64_t actual, const SValue& expected, bool exclusive) {
AddNumberError(exclusive ? kValidateErrorExclusiveMinimum : kValidateErrorMinimum, ValueType(actual).Move(), expected,
exclusive ? &SchemaType::GetExclusiveMinimumString : 0);
}
void BelowMinimum(uint64_t actual, const SValue& expected, bool exclusive) {
AddNumberError(exclusive ? kValidateErrorExclusiveMinimum : kValidateErrorMinimum, ValueType(actual).Move(), expected,
exclusive ? &SchemaType::GetExclusiveMinimumString : 0);
}
void BelowMinimum(double actual, const SValue& expected, bool exclusive) {
AddNumberError(exclusive ? kValidateErrorExclusiveMinimum : kValidateErrorMinimum, ValueType(actual).Move(), expected,
exclusive ? &SchemaType::GetExclusiveMinimumString : 0);
}
void TooLong(const Ch* str, SizeType length, SizeType expected) {
AddNumberError(kValidateErrorMaxLength,
ValueType(str, length, GetStateAllocator()).Move(), SValue(expected).Move());
}
void TooShort(const Ch* str, SizeType length, SizeType expected) {
AddNumberError(kValidateErrorMinLength,
ValueType(str, length, GetStateAllocator()).Move(), SValue(expected).Move());
}
void DoesNotMatch(const Ch* str, SizeType length) {
currentError_.SetObject();
currentError_.AddMember(GetActualString(), ValueType(str, length, GetStateAllocator()).Move(), GetStateAllocator());
AddCurrentError(kValidateErrorPattern);
}
void DisallowedItem(SizeType index) {
currentError_.SetObject();
currentError_.AddMember(GetDisallowedString(), ValueType(index).Move(), GetStateAllocator());
AddCurrentError(kValidateErrorAdditionalItems, true);
}
void TooFewItems(SizeType actualCount, SizeType expectedCount) {
AddNumberError(kValidateErrorMinItems,
ValueType(actualCount).Move(), SValue(expectedCount).Move());
}
void TooManyItems(SizeType actualCount, SizeType expectedCount) {
AddNumberError(kValidateErrorMaxItems,
ValueType(actualCount).Move(), SValue(expectedCount).Move());
}
void DuplicateItems(SizeType index1, SizeType index2) {
ValueType duplicates(kArrayType);
duplicates.PushBack(index1, GetStateAllocator());
duplicates.PushBack(index2, GetStateAllocator());
currentError_.SetObject();
currentError_.AddMember(GetDuplicatesString(), duplicates, GetStateAllocator());
AddCurrentError(kValidateErrorUniqueItems, true);
}
void TooManyProperties(SizeType actualCount, SizeType expectedCount) {
AddNumberError(kValidateErrorMaxProperties,
ValueType(actualCount).Move(), SValue(expectedCount).Move());
}
void TooFewProperties(SizeType actualCount, SizeType expectedCount) {
AddNumberError(kValidateErrorMinProperties,
ValueType(actualCount).Move(), SValue(expectedCount).Move());
}
void StartMissingProperties() {
currentError_.SetArray();
}
void AddMissingProperty(const SValue& name) {
currentError_.PushBack(ValueType(name, GetStateAllocator()).Move(), GetStateAllocator());
}
bool EndMissingProperties() {
if (currentError_.Empty())
return false;
ValueType error(kObjectType);
error.AddMember(GetMissingString(), currentError_, GetStateAllocator());
currentError_ = error;
AddCurrentError(kValidateErrorRequired);
return true;
}
void PropertyViolations(ISchemaValidator** subvalidators, SizeType count) {
for (SizeType i = 0; i < count; ++i)
MergeError(static_cast<GenericSchemaValidator*>(subvalidators[i])->GetError());
}
void DisallowedProperty(const Ch* name, SizeType length) {
currentError_.SetObject();
currentError_.AddMember(GetDisallowedString(), ValueType(name, length, GetStateAllocator()).Move(), GetStateAllocator());
AddCurrentError(kValidateErrorAdditionalProperties, true);
}
void StartDependencyErrors() {
currentError_.SetObject();
}
void StartMissingDependentProperties() {
missingDependents_.SetArray();
}
void AddMissingDependentProperty(const SValue& targetName) {
missingDependents_.PushBack(ValueType(targetName, GetStateAllocator()).Move(), GetStateAllocator());
}
void EndMissingDependentProperties(const SValue& sourceName) {
if (!missingDependents_.Empty()) {
// Create equivalent 'required' error
ValueType error(kObjectType);
ValidateErrorCode code = kValidateErrorRequired;
error.AddMember(GetMissingString(), missingDependents_.Move(), GetStateAllocator());
AddErrorCode(error, code);
AddErrorInstanceLocation(error, false);
// When appending to a pointer ensure its allocator is used
PointerType schemaRef = GetInvalidSchemaPointer().Append(SchemaType::GetValidateErrorKeyword(kValidateErrorDependencies), &GetInvalidSchemaPointer().GetAllocator());
AddErrorSchemaLocation(error, schemaRef.Append(sourceName.GetString(), sourceName.GetStringLength(), &GetInvalidSchemaPointer().GetAllocator()));
ValueType wrapper(kObjectType);
wrapper.AddMember(ValueType(SchemaType::GetValidateErrorKeyword(code), GetStateAllocator()).Move(), error, GetStateAllocator());
currentError_.AddMember(ValueType(sourceName, GetStateAllocator()).Move(), wrapper, GetStateAllocator());
}
}
void AddDependencySchemaError(const SValue& sourceName, ISchemaValidator* subvalidator) {
currentError_.AddMember(ValueType(sourceName, GetStateAllocator()).Move(),
static_cast<GenericSchemaValidator*>(subvalidator)->GetError(), GetStateAllocator());
}
bool EndDependencyErrors() {
if (currentError_.ObjectEmpty())
return false;
ValueType error(kObjectType);
error.AddMember(GetErrorsString(), currentError_, GetStateAllocator());
currentError_ = error;
AddCurrentError(kValidateErrorDependencies);
return true;
}
void DisallowedValue(const ValidateErrorCode code = kValidateErrorEnum) {
currentError_.SetObject();
AddCurrentError(code);
}
void StartDisallowedType() {
currentError_.SetArray();
}
void AddExpectedType(const typename SchemaType::ValueType& expectedType) {
currentError_.PushBack(ValueType(expectedType, GetStateAllocator()).Move(), GetStateAllocator());
}
void EndDisallowedType(const typename SchemaType::ValueType& actualType) {
ValueType error(kObjectType);
error.AddMember(GetExpectedString(), currentError_, GetStateAllocator());
error.AddMember(GetActualString(), ValueType(actualType, GetStateAllocator()).Move(), GetStateAllocator());
currentError_ = error;
AddCurrentError(kValidateErrorType);
}
void NotAllOf(ISchemaValidator** subvalidators, SizeType count) {
// Treat allOf like oneOf and anyOf to match https://rapidjson.org/md_doc_schema.html#allOf-anyOf-oneOf
AddErrorArray(kValidateErrorAllOf, subvalidators, count);
//for (SizeType i = 0; i < count; ++i) {
// MergeError(static_cast<GenericSchemaValidator*>(subvalidators[i])->GetError());
//}
}
void NoneOf(ISchemaValidator** subvalidators, SizeType count) {
AddErrorArray(kValidateErrorAnyOf, subvalidators, count);
}
void NotOneOf(ISchemaValidator** subvalidators, SizeType count, bool matched = false) {
AddErrorArray(matched ? kValidateErrorOneOfMatch : kValidateErrorOneOf, subvalidators, count);
}
void Disallowed() {
currentError_.SetObject();
AddCurrentError(kValidateErrorNot);
}
#define RAPIDJSON_STRING_(name, ...) \
static const StringRefType& Get##name##String() {\
static const Ch s[] = { __VA_ARGS__, '\0' };\
static const StringRefType v(s, static_cast<SizeType>(sizeof(s) / sizeof(Ch) - 1)); \
return v;\
}
RAPIDJSON_STRING_(InstanceRef, 'i', 'n', 's', 't', 'a', 'n', 'c', 'e', 'R', 'e', 'f')
RAPIDJSON_STRING_(SchemaRef, 's', 'c', 'h', 'e', 'm', 'a', 'R', 'e', 'f')
RAPIDJSON_STRING_(Expected, 'e', 'x', 'p', 'e', 'c', 't', 'e', 'd')
RAPIDJSON_STRING_(Actual, 'a', 'c', 't', 'u', 'a', 'l')
RAPIDJSON_STRING_(Disallowed, 'd', 'i', 's', 'a', 'l', 'l', 'o', 'w', 'e', 'd')
RAPIDJSON_STRING_(Missing, 'm', 'i', 's', 's', 'i', 'n', 'g')
RAPIDJSON_STRING_(Errors, 'e', 'r', 'r', 'o', 'r', 's')
RAPIDJSON_STRING_(ErrorCode, 'e', 'r', 'r', 'o', 'r', 'C', 'o', 'd', 'e')
RAPIDJSON_STRING_(ErrorMessage, 'e', 'r', 'r', 'o', 'r', 'M', 'e', 's', 's', 'a', 'g', 'e')
RAPIDJSON_STRING_(Duplicates, 'd', 'u', 'p', 'l', 'i', 'c', 'a', 't', 'e', 's')
#undef RAPIDJSON_STRING_
#if RAPIDJSON_SCHEMA_VERBOSE
#define RAPIDJSON_SCHEMA_HANDLE_BEGIN_VERBOSE_() \
RAPIDJSON_MULTILINEMACRO_BEGIN\
*documentStack_.template Push<Ch>() = '\0';\
documentStack_.template Pop<Ch>(1);\
internal::PrintInvalidDocument(documentStack_.template Bottom<Ch>());\
RAPIDJSON_MULTILINEMACRO_END
#else
#define RAPIDJSON_SCHEMA_HANDLE_BEGIN_VERBOSE_()
#endif
#define RAPIDJSON_SCHEMA_HANDLE_BEGIN_(method, arg1)\
if (!valid_) return false; \
if ((!BeginValue() && !GetContinueOnErrors()) || (!CurrentSchema().method arg1 && !GetContinueOnErrors())) {\
RAPIDJSON_SCHEMA_HANDLE_BEGIN_VERBOSE_();\
return valid_ = false;\
}
#define RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(method, arg2)\
for (Context* context = schemaStack_.template Bottom<Context>(); context != schemaStack_.template End<Context>(); context++) {\
if (context->hasher)\
static_cast<HasherType*>(context->hasher)->method arg2;\
if (context->validators)\
for (SizeType i_ = 0; i_ < context->validatorCount; i_++)\
static_cast<GenericSchemaValidator*>(context->validators[i_])->method arg2;\
if (context->patternPropertiesValidators)\
for (SizeType i_ = 0; i_ < context->patternPropertiesValidatorCount; i_++)\
static_cast<GenericSchemaValidator*>(context->patternPropertiesValidators[i_])->method arg2;\
}
#define RAPIDJSON_SCHEMA_HANDLE_END_(method, arg2)\
valid_ = (EndValue() || GetContinueOnErrors()) && (!outputHandler_ || outputHandler_->method arg2);\
return valid_;
#define RAPIDJSON_SCHEMA_HANDLE_VALUE_(method, arg1, arg2) \
RAPIDJSON_SCHEMA_HANDLE_BEGIN_ (method, arg1);\
RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(method, arg2);\
RAPIDJSON_SCHEMA_HANDLE_END_ (method, arg2)
bool Null() { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Null, (CurrentContext()), ( )); }
bool Bool(bool b) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Bool, (CurrentContext(), b), (b)); }
bool Int(int i) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Int, (CurrentContext(), i), (i)); }
bool Uint(unsigned u) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Uint, (CurrentContext(), u), (u)); }
bool Int64(int64_t i) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Int64, (CurrentContext(), i), (i)); }
bool Uint64(uint64_t u) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Uint64, (CurrentContext(), u), (u)); }
bool Double(double d) { RAPIDJSON_SCHEMA_HANDLE_VALUE_(Double, (CurrentContext(), d), (d)); }
bool RawNumber(const Ch* str, SizeType length, bool copy)
{ RAPIDJSON_SCHEMA_HANDLE_VALUE_(String, (CurrentContext(), str, length, copy), (str, length, copy)); }
bool String(const Ch* str, SizeType length, bool copy)
{ RAPIDJSON_SCHEMA_HANDLE_VALUE_(String, (CurrentContext(), str, length, copy), (str, length, copy)); }
bool StartObject() {
RAPIDJSON_SCHEMA_HANDLE_BEGIN_(StartObject, (CurrentContext()));
RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(StartObject, ());
return valid_ = !outputHandler_ || outputHandler_->StartObject();
}
bool Key(const Ch* str, SizeType len, bool copy) {
if (!valid_) return false;
AppendToken(str, len);
if (!CurrentSchema().Key(CurrentContext(), str, len, copy) && !GetContinueOnErrors()) return valid_ = false;
RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(Key, (str, len, copy));
return valid_ = !outputHandler_ || outputHandler_->Key(str, len, copy);
}
bool EndObject(SizeType memberCount) {
if (!valid_) return false;
RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(EndObject, (memberCount));
if (!CurrentSchema().EndObject(CurrentContext(), memberCount) && !GetContinueOnErrors()) return valid_ = false;
RAPIDJSON_SCHEMA_HANDLE_END_(EndObject, (memberCount));
}
bool StartArray() {
RAPIDJSON_SCHEMA_HANDLE_BEGIN_(StartArray, (CurrentContext()));
RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(StartArray, ());
return valid_ = !outputHandler_ || outputHandler_->StartArray();
}
bool EndArray(SizeType elementCount) {
if (!valid_) return false;
RAPIDJSON_SCHEMA_HANDLE_PARALLEL_(EndArray, (elementCount));
if (!CurrentSchema().EndArray(CurrentContext(), elementCount) && !GetContinueOnErrors()) return valid_ = false;
RAPIDJSON_SCHEMA_HANDLE_END_(EndArray, (elementCount));
}
#undef RAPIDJSON_SCHEMA_HANDLE_BEGIN_VERBOSE_
#undef RAPIDJSON_SCHEMA_HANDLE_BEGIN_
#undef RAPIDJSON_SCHEMA_HANDLE_PARALLEL_
#undef RAPIDJSON_SCHEMA_HANDLE_VALUE_
// Implementation of ISchemaStateFactory<SchemaType>
virtual ISchemaValidator* CreateSchemaValidator(const SchemaType& root, const bool inheritContinueOnErrors) {
ISchemaValidator* sv = new (GetStateAllocator().Malloc(sizeof(GenericSchemaValidator))) GenericSchemaValidator(*schemaDocument_, root, documentStack_.template Bottom<char>(), documentStack_.GetSize(),
#if RAPIDJSON_SCHEMA_VERBOSE
depth_ + 1,
#endif
&GetStateAllocator());
sv->SetValidateFlags(inheritContinueOnErrors ? GetValidateFlags() : GetValidateFlags() & ~(unsigned)kValidateContinueOnErrorFlag);
return sv;
}
virtual void DestroySchemaValidator(ISchemaValidator* validator) {
GenericSchemaValidator* v = static_cast<GenericSchemaValidator*>(validator);
v->~GenericSchemaValidator();
StateAllocator::Free(v);
}
virtual void* CreateHasher() {
return new (GetStateAllocator().Malloc(sizeof(HasherType))) HasherType(&GetStateAllocator());
}
virtual uint64_t GetHashCode(void* hasher) {
return static_cast<HasherType*>(hasher)->GetHashCode();
}
virtual void DestroryHasher(void* hasher) {
HasherType* h = static_cast<HasherType*>(hasher);
h->~HasherType();
StateAllocator::Free(h);
}
virtual void* MallocState(size_t size) {
return GetStateAllocator().Malloc(size);
}
virtual void FreeState(void* p) {
StateAllocator::Free(p);
}
private:
typedef typename SchemaType::Context Context;
typedef GenericValue<UTF8<>, StateAllocator> HashCodeArray;
typedef internal::Hasher<EncodingType, StateAllocator> HasherType;
GenericSchemaValidator(
const SchemaDocumentType& schemaDocument,
const SchemaType& root,
const char* basePath, size_t basePathSize,
#if RAPIDJSON_SCHEMA_VERBOSE
unsigned depth,
#endif
StateAllocator* allocator = 0,
size_t schemaStackCapacity = kDefaultSchemaStackCapacity,
size_t documentStackCapacity = kDefaultDocumentStackCapacity)
:
schemaDocument_(&schemaDocument),
root_(root),
stateAllocator_(allocator),
ownStateAllocator_(0),
schemaStack_(allocator, schemaStackCapacity),
documentStack_(allocator, documentStackCapacity),
outputHandler_(0),
error_(kObjectType),
currentError_(),
missingDependents_(),
valid_(true),
flags_(kValidateDefaultFlags)
#if RAPIDJSON_SCHEMA_VERBOSE
, depth_(depth)
#endif
{
if (basePath && basePathSize)
memcpy(documentStack_.template Push<char>(basePathSize), basePath, basePathSize);
}
StateAllocator& GetStateAllocator() {
if (!stateAllocator_)
stateAllocator_ = ownStateAllocator_ = RAPIDJSON_NEW(StateAllocator)();
return *stateAllocator_;
}
bool GetContinueOnErrors() const {
return flags_ & kValidateContinueOnErrorFlag;
}
bool BeginValue() {
if (schemaStack_.Empty())
PushSchema(root_);
else {
if (CurrentContext().inArray)
internal::TokenHelper<internal::Stack<StateAllocator>, Ch>::AppendIndexToken(documentStack_, CurrentContext().arrayElementIndex);
if (!CurrentSchema().BeginValue(CurrentContext()) && !GetContinueOnErrors())
return false;
SizeType count = CurrentContext().patternPropertiesSchemaCount;
const SchemaType** sa = CurrentContext().patternPropertiesSchemas;
typename Context::PatternValidatorType patternValidatorType = CurrentContext().valuePatternValidatorType;
bool valueUniqueness = CurrentContext().valueUniqueness;
RAPIDJSON_ASSERT(CurrentContext().valueSchema);
PushSchema(*CurrentContext().valueSchema);
if (count > 0) {
CurrentContext().objectPatternValidatorType = patternValidatorType;
ISchemaValidator**& va = CurrentContext().patternPropertiesValidators;
SizeType& validatorCount = CurrentContext().patternPropertiesValidatorCount;
va = static_cast<ISchemaValidator**>(MallocState(sizeof(ISchemaValidator*) * count));
for (SizeType i = 0; i < count; i++)
va[validatorCount++] = CreateSchemaValidator(*sa[i], true); // Inherit continueOnError
}
CurrentContext().arrayUniqueness = valueUniqueness;
}
return true;
}
bool EndValue() {
if (!CurrentSchema().EndValue(CurrentContext()) && !GetContinueOnErrors())
return false;
#if RAPIDJSON_SCHEMA_VERBOSE
GenericStringBuffer<EncodingType> sb;
schemaDocument_->GetPointer(&CurrentSchema()).Stringify(sb);
*documentStack_.template Push<Ch>() = '\0';
documentStack_.template Pop<Ch>(1);
internal::PrintValidatorPointers(depth_, sb.GetString(), documentStack_.template Bottom<Ch>());
#endif
void* hasher = CurrentContext().hasher;
uint64_t h = hasher && CurrentContext().arrayUniqueness ? static_cast<HasherType*>(hasher)->GetHashCode() : 0;
PopSchema();
if (!schemaStack_.Empty()) {
Context& context = CurrentContext();
// Only check uniqueness if there is a hasher
if (hasher && context.valueUniqueness) {
HashCodeArray* a = static_cast<HashCodeArray*>(context.arrayElementHashCodes);
if (!a)
CurrentContext().arrayElementHashCodes = a = new (GetStateAllocator().Malloc(sizeof(HashCodeArray))) HashCodeArray(kArrayType);
for (typename HashCodeArray::ConstValueIterator itr = a->Begin(); itr != a->End(); ++itr)
if (itr->GetUint64() == h) {
DuplicateItems(static_cast<SizeType>(itr - a->Begin()), a->Size());
// Cleanup before returning if continuing
if (GetContinueOnErrors()) {
a->PushBack(h, GetStateAllocator());
while (!documentStack_.Empty() && *documentStack_.template Pop<Ch>(1) != '/');
}
RAPIDJSON_INVALID_KEYWORD_RETURN(kValidateErrorUniqueItems);
}
a->PushBack(h, GetStateAllocator());
}
}
// Remove the last token of document pointer
while (!documentStack_.Empty() && *documentStack_.template Pop<Ch>(1) != '/')
;
return true;
}
void AppendToken(const Ch* str, SizeType len) {
documentStack_.template Reserve<Ch>(1 + len * 2); // worst case all characters are escaped as two characters
*documentStack_.template PushUnsafe<Ch>() = '/';
for (SizeType i = 0; i < len; i++) {
if (str[i] == '~') {
*documentStack_.template PushUnsafe<Ch>() = '~';
*documentStack_.template PushUnsafe<Ch>() = '0';
}
else if (str[i] == '/') {
*documentStack_.template PushUnsafe<Ch>() = '~';
*documentStack_.template PushUnsafe<Ch>() = '1';
}
else
*documentStack_.template PushUnsafe<Ch>() = str[i];
}
}
RAPIDJSON_FORCEINLINE void PushSchema(const SchemaType& schema) { new (schemaStack_.template Push<Context>()) Context(*this, *this, &schema); }
RAPIDJSON_FORCEINLINE void PopSchema() {
Context* c = schemaStack_.template Pop<Context>(1);
if (HashCodeArray* a = static_cast<HashCodeArray*>(c->arrayElementHashCodes)) {
a->~HashCodeArray();
StateAllocator::Free(a);
}
c->~Context();
}
void AddErrorInstanceLocation(ValueType& result, bool parent) {
GenericStringBuffer<EncodingType> sb;
PointerType instancePointer = GetInvalidDocumentPointer();
((parent && instancePointer.GetTokenCount() > 0)
? PointerType(instancePointer.GetTokens(), instancePointer.GetTokenCount() - 1)
: instancePointer).StringifyUriFragment(sb);
ValueType instanceRef(sb.GetString(), static_cast<SizeType>(sb.GetSize() / sizeof(Ch)),
GetStateAllocator());
result.AddMember(GetInstanceRefString(), instanceRef, GetStateAllocator());
}
void AddErrorSchemaLocation(ValueType& result, PointerType schema = PointerType()) {
GenericStringBuffer<EncodingType> sb;
SizeType len = CurrentSchema().GetURI().GetStringLength();
if (len) memcpy(sb.Push(len), CurrentSchema().GetURI().GetString(), len * sizeof(Ch));
if (schema.GetTokenCount()) schema.StringifyUriFragment(sb);
else GetInvalidSchemaPointer().StringifyUriFragment(sb);
ValueType schemaRef(sb.GetString(), static_cast<SizeType>(sb.GetSize() / sizeof(Ch)),
GetStateAllocator());
result.AddMember(GetSchemaRefString(), schemaRef, GetStateAllocator());
}
void AddErrorCode(ValueType& result, const ValidateErrorCode code) {
result.AddMember(GetErrorCodeString(), code, GetStateAllocator());
}
void AddError(ValueType& keyword, ValueType& error) {
typename ValueType::MemberIterator member = error_.FindMember(keyword);
if (member == error_.MemberEnd())
error_.AddMember(keyword, error, GetStateAllocator());
else {
if (member->value.IsObject()) {
ValueType errors(kArrayType);
errors.PushBack(member->value, GetStateAllocator());
member->value = errors;
}
member->value.PushBack(error, GetStateAllocator());
}
}
void AddCurrentError(const ValidateErrorCode code, bool parent = false) {
AddErrorCode(currentError_, code);
AddErrorInstanceLocation(currentError_, parent);
AddErrorSchemaLocation(currentError_);
AddError(ValueType(SchemaType::GetValidateErrorKeyword(code), GetStateAllocator(), false).Move(), currentError_);
}
void MergeError(ValueType& other) {
for (typename ValueType::MemberIterator it = other.MemberBegin(), end = other.MemberEnd(); it != end; ++it) {
AddError(it->name, it->value);
}
}
void AddNumberError(const ValidateErrorCode code, ValueType& actual, const SValue& expected,
const typename SchemaType::ValueType& (*exclusive)() = 0) {
currentError_.SetObject();
currentError_.AddMember(GetActualString(), actual, GetStateAllocator());
currentError_.AddMember(GetExpectedString(), ValueType(expected, GetStateAllocator()).Move(), GetStateAllocator());
if (exclusive)
currentError_.AddMember(ValueType(exclusive(), GetStateAllocator()).Move(), true, GetStateAllocator());
AddCurrentError(code);
}
void AddErrorArray(const ValidateErrorCode code,
ISchemaValidator** subvalidators, SizeType count) {
ValueType errors(kArrayType);
for (SizeType i = 0; i < count; ++i)
errors.PushBack(static_cast<GenericSchemaValidator*>(subvalidators[i])->GetError(), GetStateAllocator());
currentError_.SetObject();
currentError_.AddMember(GetErrorsString(), errors, GetStateAllocator());
AddCurrentError(code);
}
const SchemaType& CurrentSchema() const { return *schemaStack_.template Top<Context>()->schema; }
Context& CurrentContext() { return *schemaStack_.template Top<Context>(); }
const Context& CurrentContext() const { return *schemaStack_.template Top<Context>(); }
static const size_t kDefaultSchemaStackCapacity = 1024;
static const size_t kDefaultDocumentStackCapacity = 256;
const SchemaDocumentType* schemaDocument_;
const SchemaType& root_;
StateAllocator* stateAllocator_;
StateAllocator* ownStateAllocator_;
internal::Stack<StateAllocator> schemaStack_; //!< stack to store the current path of schema (BaseSchemaType *)
internal::Stack<StateAllocator> documentStack_; //!< stack to store the current path of validating document (Ch)
OutputHandler* outputHandler_;
ValueType error_;
ValueType currentError_;
ValueType missingDependents_;
bool valid_;
unsigned flags_;
#if RAPIDJSON_SCHEMA_VERBOSE
unsigned depth_;
#endif
};
typedef GenericSchemaValidator<SchemaDocument> SchemaValidator;
///////////////////////////////////////////////////////////////////////////////
// SchemaValidatingReader
//! A helper class for parsing with validation.
/*!
This helper class is a functor, designed as a parameter of \ref GenericDocument::Populate().
\tparam parseFlags Combination of \ref ParseFlag.
\tparam InputStream Type of input stream, implementing Stream concept.
\tparam SourceEncoding Encoding of the input stream.
\tparam SchemaDocumentType Type of schema document.
\tparam StackAllocator Allocator type for stack.
*/
template <
unsigned parseFlags,
typename InputStream,
typename SourceEncoding,
typename SchemaDocumentType = SchemaDocument,
typename StackAllocator = CrtAllocator>
class SchemaValidatingReader {
public:
typedef typename SchemaDocumentType::PointerType PointerType;
typedef typename InputStream::Ch Ch;
typedef GenericValue<SourceEncoding, StackAllocator> ValueType;
//! Constructor
/*!
\param is Input stream.
\param sd Schema document.
*/
SchemaValidatingReader(InputStream& is, const SchemaDocumentType& sd) : is_(is), sd_(sd), invalidSchemaKeyword_(), invalidSchemaCode_(kValidateErrorNone), error_(kObjectType), isValid_(true) {}
template <typename Handler>
bool operator()(Handler& handler) {
GenericReader<SourceEncoding, typename SchemaDocumentType::EncodingType, StackAllocator> reader;
GenericSchemaValidator<SchemaDocumentType, Handler> validator(sd_, handler);
parseResult_ = reader.template Parse<parseFlags>(is_, validator);
isValid_ = validator.IsValid();
if (isValid_) {
invalidSchemaPointer_ = PointerType();
invalidSchemaKeyword_ = 0;
invalidDocumentPointer_ = PointerType();
error_.SetObject();
}
else {
invalidSchemaPointer_ = validator.GetInvalidSchemaPointer();
invalidSchemaKeyword_ = validator.GetInvalidSchemaKeyword();
invalidSchemaCode_ = validator.GetInvalidSchemaCode();
invalidDocumentPointer_ = validator.GetInvalidDocumentPointer();
error_.CopyFrom(validator.GetError(), allocator_);
}
return parseResult_;
}
const ParseResult& GetParseResult() const { return parseResult_; }
bool IsValid() const { return isValid_; }
const PointerType& GetInvalidSchemaPointer() const { return invalidSchemaPointer_; }
const Ch* GetInvalidSchemaKeyword() const { return invalidSchemaKeyword_; }
const PointerType& GetInvalidDocumentPointer() const { return invalidDocumentPointer_; }
const ValueType& GetError() const { return error_; }
ValidateErrorCode GetInvalidSchemaCode() const { return invalidSchemaCode_; }
private:
InputStream& is_;
const SchemaDocumentType& sd_;
ParseResult parseResult_;
PointerType invalidSchemaPointer_;
const Ch* invalidSchemaKeyword_;
PointerType invalidDocumentPointer_;
ValidateErrorCode invalidSchemaCode_;
StackAllocator allocator_;
ValueType error_;
bool isValid_;
};
RAPIDJSON_NAMESPACE_END
RAPIDJSON_DIAG_POP
#endif // RAPIDJSON_SCHEMA_H_
// Tencent is pleased to support the open source community by making RapidJSON available.
//
// Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip.
//
// Licensed under the MIT License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#include "rapidjson.h"
#ifndef RAPIDJSON_STREAM_H_
#define RAPIDJSON_STREAM_H_
#include "encodings.h"
RAPIDJSON_NAMESPACE_BEGIN
///////////////////////////////////////////////////////////////////////////////
// Stream
/*! \class rapidjson::Stream
\brief Concept for reading and writing characters.
For read-only stream, no need to implement PutBegin(), Put(), Flush() and PutEnd().
For write-only stream, only need to implement Put() and Flush().
\code
concept Stream {
typename Ch; //!< Character type of the stream.
//! Read the current character from stream without moving the read cursor.
Ch Peek() const;
//! Read the current character from stream and moving the read cursor to next character.
Ch Take();
//! Get the current read cursor.
//! \return Number of characters read from start.
size_t Tell();
//! Begin writing operation at the current read pointer.
//! \return The begin writer pointer.
Ch* PutBegin();
//! Write a character.
void Put(Ch c);
//! Flush the buffer.
void Flush();
//! End the writing operation.
//! \param begin The begin write pointer returned by PutBegin().
//! \return Number of characters written.
size_t PutEnd(Ch* begin);
}
\endcode
*/
//! Provides additional information for stream.
/*!
By using traits pattern, this type provides a default configuration for stream.
For custom stream, this type can be specialized for other configuration.
See TEST(Reader, CustomStringStream) in readertest.cpp for example.
*/
template<typename Stream>
struct StreamTraits {
//! Whether to make local copy of stream for optimization during parsing.
/*!
By default, for safety, streams do not use local copy optimization.
Stream that can be copied fast should specialize this, like StreamTraits<StringStream>.
*/
enum { copyOptimization = 0 };
};
//! Reserve n characters for writing to a stream.
template<typename Stream>
inline void PutReserve(Stream& stream, size_t count) {
(void)stream;
(void)count;
}
//! Write character to a stream, presuming buffer is reserved.
template<typename Stream>
inline void PutUnsafe(Stream& stream, typename Stream::Ch c) {
stream.Put(c);
}
//! Put N copies of a character to a stream.
template<typename Stream, typename Ch>
inline void PutN(Stream& stream, Ch c, size_t n) {
PutReserve(stream, n);
for (size_t i = 0; i < n; i++)
PutUnsafe(stream, c);
}
///////////////////////////////////////////////////////////////////////////////
// GenericStreamWrapper
//! A Stream Wrapper
/*! \tThis string stream is a wrapper for any stream by just forwarding any
\treceived message to the origin stream.
\note implements Stream concept
*/
#if defined(_MSC_VER) && _MSC_VER <= 1800
RAPIDJSON_DIAG_PUSH
RAPIDJSON_DIAG_OFF(4702) // unreachable code
RAPIDJSON_DIAG_OFF(4512) // assignment operator could not be generated
#endif
template <typename InputStream, typename Encoding = UTF8<> >
class GenericStreamWrapper {
public:
typedef typename Encoding::Ch Ch;
GenericStreamWrapper(InputStream& is): is_(is) {}
Ch Peek() const { return is_.Peek(); }
Ch Take() { return is_.Take(); }
size_t Tell() { return is_.Tell(); }
Ch* PutBegin() { return is_.PutBegin(); }
void Put(Ch ch) { is_.Put(ch); }
void Flush() { is_.Flush(); }
size_t PutEnd(Ch* ch) { return is_.PutEnd(ch); }
// wrapper for MemoryStream
const Ch* Peek4() const { return is_.Peek4(); }
// wrapper for AutoUTFInputStream
UTFType GetType() const { return is_.GetType(); }
bool HasBOM() const { return is_.HasBOM(); }
protected:
InputStream& is_;
};
#if defined(_MSC_VER) && _MSC_VER <= 1800
RAPIDJSON_DIAG_POP
#endif
///////////////////////////////////////////////////////////////////////////////
// StringStream
//! Read-only string stream.
/*! \note implements Stream concept
*/
template <typename Encoding>
struct GenericStringStream {
typedef typename Encoding::Ch Ch;
GenericStringStream(const Ch *src) : src_(src), head_(src) {}
Ch Peek() const { return *src_; }
Ch Take() { return *src_++; }
size_t Tell() const { return static_cast<size_t>(src_ - head_); }
Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; }
void Put(Ch) { RAPIDJSON_ASSERT(false); }
void Flush() { RAPIDJSON_ASSERT(false); }
size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; }
const Ch* src_; //!< Current read position.
const Ch* head_; //!< Original head of the string.
};
template <typename Encoding>
struct StreamTraits<GenericStringStream<Encoding> > {
enum { copyOptimization = 1 };
};
//! String stream with UTF8 encoding.
typedef GenericStringStream<UTF8<> > StringStream;
///////////////////////////////////////////////////////////////////////////////
// InsituStringStream
//! A read-write string stream.
/*! This string stream is particularly designed for in-situ parsing.
\note implements Stream concept
*/
template <typename Encoding>
struct GenericInsituStringStream {
typedef typename Encoding::Ch Ch;
GenericInsituStringStream(Ch *src) : src_(src), dst_(0), head_(src) {}
// Read
Ch Peek() { return *src_; }
Ch Take() { return *src_++; }
size_t Tell() { return static_cast<size_t>(src_ - head_); }
// Write
void Put(Ch c) { RAPIDJSON_ASSERT(dst_ != 0); *dst_++ = c; }
Ch* PutBegin() { return dst_ = src_; }
size_t PutEnd(Ch* begin) { return static_cast<size_t>(dst_ - begin); }
void Flush() {}
Ch* Push(size_t count) { Ch* begin = dst_; dst_ += count; return begin; }
void Pop(size_t count) { dst_ -= count; }
Ch* src_;
Ch* dst_;
Ch* head_;
};
template <typename Encoding>
struct StreamTraits<GenericInsituStringStream<Encoding> > {
enum { copyOptimization = 1 };
};
//! Insitu string stream with UTF8 encoding.
typedef GenericInsituStringStream<UTF8<> > InsituStringStream;
RAPIDJSON_NAMESPACE_END
#endif // RAPIDJSON_STREAM_H_
// Copyright (C) 2011 Milo Yip // Tencent is pleased to support the open source community by making RapidJSON available.
// //
// Permission is hereby granted, free of charge, to any person obtaining a copy // Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip.
// of this software and associated documentation files (the "Software"), to deal //
// in the Software without restriction, including without limitation the rights // Licensed under the MIT License (the "License"); you may not use this file except
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // in compliance with the License. You may obtain a copy of the License at
// copies of the Software, and to permit persons to whom the Software is //
// furnished to do so, subject to the following conditions: // http://opensource.org/licenses/MIT
// //
// The above copyright notice and this permission notice shall be included in // Unless required by applicable law or agreed to in writing, software distributed
// all copies or substantial portions of the Software. // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// // CONDITIONS OF ANY KIND, either express or implied. See the License for the
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // specific language governing permissions and limitations under the License.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #ifndef RAPIDJSON_STRINGBUFFER_H_
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #define RAPIDJSON_STRINGBUFFER_H_
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #include "stream.h"
// THE SOFTWARE. #include "internal/stack.h"
#ifndef RAPIDJSON_STRINGBUFFER_H_ #if RAPIDJSON_HAS_CXX11_RVALUE_REFS
#define RAPIDJSON_STRINGBUFFER_H_ #include <utility> // std::move
#endif
#include "rapidjson.h"
#include "internal/stack.h"
#if RAPIDJSON_HAS_CXX11_RVALUE_REFS
#include <utility> // std::move #if defined(__clang__)
#endif RAPIDJSON_DIAG_PUSH
RAPIDJSON_DIAG_OFF(c++98-compat)
#include "internal/stack.h" #endif
RAPIDJSON_NAMESPACE_BEGIN RAPIDJSON_NAMESPACE_BEGIN
//! Represents an in-memory output stream. //! Represents an in-memory output stream.
/*! /*!
\tparam Encoding Encoding of the stream. \tparam Encoding Encoding of the stream.
\tparam Allocator type for allocating memory buffer. \tparam Allocator type for allocating memory buffer.
\note implements Stream concept \note implements Stream concept
*/ */
template <typename Encoding, typename Allocator = CrtAllocator> template <typename Encoding, typename Allocator = CrtAllocator>
class GenericStringBuffer { class GenericStringBuffer {
public: public:
typedef typename Encoding::Ch Ch; typedef typename Encoding::Ch Ch;
GenericStringBuffer(Allocator* allocator = 0, size_t capacity = kDefaultCapacity) : stack_(allocator, capacity) {} GenericStringBuffer(Allocator* allocator = 0, size_t capacity = kDefaultCapacity) : stack_(allocator, capacity) {}
#if RAPIDJSON_HAS_CXX11_RVALUE_REFS #if RAPIDJSON_HAS_CXX11_RVALUE_REFS
GenericStringBuffer(GenericStringBuffer&& rhs) : stack_(std::move(rhs.stack_)) {} GenericStringBuffer(GenericStringBuffer&& rhs) : stack_(std::move(rhs.stack_)) {}
GenericStringBuffer& operator=(GenericStringBuffer&& rhs) { GenericStringBuffer& operator=(GenericStringBuffer&& rhs) {
if (&rhs != this) if (&rhs != this)
stack_ = std::move(rhs.stack_); stack_ = std::move(rhs.stack_);
return *this; return *this;
} }
#endif #endif
void Put(Ch c) { *stack_.template Push<Ch>() = c; } void Put(Ch c) { *stack_.template Push<Ch>() = c; }
void Flush() {} void PutUnsafe(Ch c) { *stack_.template PushUnsafe<Ch>() = c; }
void Flush() {}
void Clear() { stack_.Clear(); }
void ShrinkToFit() { void Clear() { stack_.Clear(); }
// Push and pop a null terminator. This is safe. void ShrinkToFit() {
*stack_.template Push<Ch>() = '\0'; // Push and pop a null terminator. This is safe.
stack_.ShrinkToFit(); *stack_.template Push<Ch>() = '\0';
stack_.template Pop<Ch>(1); stack_.ShrinkToFit();
} stack_.template Pop<Ch>(1);
Ch* Push(size_t count) { return stack_.template Push<Ch>(count); } }
void Pop(size_t count) { stack_.template Pop<Ch>(count); }
void Reserve(size_t count) { stack_.template Reserve<Ch>(count); }
const Ch* GetString() const { Ch* Push(size_t count) { return stack_.template Push<Ch>(count); }
// Push and pop a null terminator. This is safe. Ch* PushUnsafe(size_t count) { return stack_.template PushUnsafe<Ch>(count); }
*stack_.template Push<Ch>() = '\0'; void Pop(size_t count) { stack_.template Pop<Ch>(count); }
stack_.template Pop<Ch>(1);
const Ch* GetString() const {
return stack_.template Bottom<Ch>(); // Push and pop a null terminator. This is safe.
} *stack_.template Push<Ch>() = '\0';
stack_.template Pop<Ch>(1);
size_t GetSize() const { return stack_.GetSize(); }
return stack_.template Bottom<Ch>();
static const size_t kDefaultCapacity = 256; }
mutable internal::Stack<Allocator> stack_;
//! Get the size of string in bytes in the string buffer.
private: size_t GetSize() const { return stack_.GetSize(); }
// Prohibit copy constructor & assignment operator.
GenericStringBuffer(const GenericStringBuffer&); //! Get the length of string in Ch in the string buffer.
GenericStringBuffer& operator=(const GenericStringBuffer&); size_t GetLength() const { return stack_.GetSize() / sizeof(Ch); }
};
static const size_t kDefaultCapacity = 256;
//! String buffer with UTF8 encoding mutable internal::Stack<Allocator> stack_;
typedef GenericStringBuffer<UTF8<> > StringBuffer;
private:
//! Implement specialized version of PutN() with memset() for better performance. // Prohibit copy constructor & assignment operator.
template<> GenericStringBuffer(const GenericStringBuffer&);
inline void PutN(GenericStringBuffer<UTF8<> >& stream, char c, size_t n) { GenericStringBuffer& operator=(const GenericStringBuffer&);
std::memset(stream.stack_.Push<char>(n), c, n * sizeof(c)); };
}
//! String buffer with UTF8 encoding
RAPIDJSON_NAMESPACE_END typedef GenericStringBuffer<UTF8<> > StringBuffer;
#endif // RAPIDJSON_STRINGBUFFER_H_ template<typename Encoding, typename Allocator>
inline void PutReserve(GenericStringBuffer<Encoding, Allocator>& stream, size_t count) {
stream.Reserve(count);
}
template<typename Encoding, typename Allocator>
inline void PutUnsafe(GenericStringBuffer<Encoding, Allocator>& stream, typename Encoding::Ch c) {
stream.PutUnsafe(c);
}
//! Implement specialized version of PutN() with memset() for better performance.
template<>
inline void PutN(GenericStringBuffer<UTF8<> >& stream, char c, size_t n) {
std::memset(stream.stack_.Push<char>(n), c, n * sizeof(c));
}
RAPIDJSON_NAMESPACE_END
#if defined(__clang__)
RAPIDJSON_DIAG_POP
#endif
#endif // RAPIDJSON_STRINGBUFFER_H_
// Tencent is pleased to support the open source community by making RapidJSON available.
//
// (C) Copyright IBM Corporation 2021
//
// Licensed under the MIT License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#ifndef RAPIDJSON_URI_H_
#define RAPIDJSON_URI_H_
#include "internal/strfunc.h"
#if defined(__clang__)
RAPIDJSON_DIAG_PUSH
RAPIDJSON_DIAG_OFF(c++98-compat)
#elif defined(_MSC_VER)
RAPIDJSON_DIAG_OFF(4512) // assignment operator could not be generated
#endif
RAPIDJSON_NAMESPACE_BEGIN
///////////////////////////////////////////////////////////////////////////////
// GenericUri
template <typename ValueType, typename Allocator=CrtAllocator>
class GenericUri {
public:
typedef typename ValueType::Ch Ch;
#if RAPIDJSON_HAS_STDSTRING
typedef std::basic_string<Ch> String;
#endif
//! Constructors
GenericUri(Allocator* allocator = 0) : uri_(), base_(), scheme_(), auth_(), path_(), query_(), frag_(), allocator_(allocator), ownAllocator_() {
}
GenericUri(const Ch* uri, SizeType len, Allocator* allocator = 0) : uri_(), base_(), scheme_(), auth_(), path_(), query_(), frag_(), allocator_(allocator), ownAllocator_() {
Parse(uri, len);
}
GenericUri(const Ch* uri, Allocator* allocator = 0) : uri_(), base_(), scheme_(), auth_(), path_(), query_(), frag_(), allocator_(allocator), ownAllocator_() {
Parse(uri, internal::StrLen<Ch>(uri));
}
// Use with specializations of GenericValue
template<typename T> GenericUri(const T& uri, Allocator* allocator = 0) : uri_(), base_(), scheme_(), auth_(), path_(), query_(), frag_(), allocator_(allocator), ownAllocator_() {
const Ch* u = uri.template Get<const Ch*>(); // TypeHelper from document.h
Parse(u, internal::StrLen<Ch>(u));
}
#if RAPIDJSON_HAS_STDSTRING
GenericUri(const String& uri, Allocator* allocator = 0) : uri_(), base_(), scheme_(), auth_(), path_(), query_(), frag_(), allocator_(allocator), ownAllocator_() {
Parse(uri.c_str(), internal::StrLen<Ch>(uri.c_str()));
}
#endif
//! Copy constructor
GenericUri(const GenericUri& rhs) : uri_(), base_(), scheme_(), auth_(), path_(), query_(), frag_(), allocator_(), ownAllocator_() {
*this = rhs;
}
//! Copy constructor
GenericUri(const GenericUri& rhs, Allocator* allocator) : uri_(), base_(), scheme_(), auth_(), path_(), query_(), frag_(), allocator_(allocator), ownAllocator_() {
*this = rhs;
}
//! Destructor.
~GenericUri() {
Free();
RAPIDJSON_DELETE(ownAllocator_);
}
//! Assignment operator
GenericUri& operator=(const GenericUri& rhs) {
if (this != &rhs) {
// Do not delete ownAllocator
Free();
Allocate(rhs.GetStringLength());
auth_ = CopyPart(scheme_, rhs.scheme_, rhs.GetSchemeStringLength());
path_ = CopyPart(auth_, rhs.auth_, rhs.GetAuthStringLength());
query_ = CopyPart(path_, rhs.path_, rhs.GetPathStringLength());
frag_ = CopyPart(query_, rhs.query_, rhs.GetQueryStringLength());
base_ = CopyPart(frag_, rhs.frag_, rhs.GetFragStringLength());
uri_ = CopyPart(base_, rhs.base_, rhs.GetBaseStringLength());
CopyPart(uri_, rhs.uri_, rhs.GetStringLength());
}
return *this;
}
//! Getters
// Use with specializations of GenericValue
template<typename T> void Get(T& uri, Allocator& allocator) {
uri.template Set<const Ch*>(this->GetString(), allocator); // TypeHelper from document.h
}
const Ch* GetString() const { return uri_; }
SizeType GetStringLength() const { return uri_ == 0 ? 0 : internal::StrLen<Ch>(uri_); }
const Ch* GetBaseString() const { return base_; }
SizeType GetBaseStringLength() const { return base_ == 0 ? 0 : internal::StrLen<Ch>(base_); }
const Ch* GetSchemeString() const { return scheme_; }
SizeType GetSchemeStringLength() const { return scheme_ == 0 ? 0 : internal::StrLen<Ch>(scheme_); }
const Ch* GetAuthString() const { return auth_; }
SizeType GetAuthStringLength() const { return auth_ == 0 ? 0 : internal::StrLen<Ch>(auth_); }
const Ch* GetPathString() const { return path_; }
SizeType GetPathStringLength() const { return path_ == 0 ? 0 : internal::StrLen<Ch>(path_); }
const Ch* GetQueryString() const { return query_; }
SizeType GetQueryStringLength() const { return query_ == 0 ? 0 : internal::StrLen<Ch>(query_); }
const Ch* GetFragString() const { return frag_; }
SizeType GetFragStringLength() const { return frag_ == 0 ? 0 : internal::StrLen<Ch>(frag_); }
#if RAPIDJSON_HAS_STDSTRING
static String Get(const GenericUri& uri) { return String(uri.GetString(), uri.GetStringLength()); }
static String GetBase(const GenericUri& uri) { return String(uri.GetBaseString(), uri.GetBaseStringLength()); }
static String GetScheme(const GenericUri& uri) { return String(uri.GetSchemeString(), uri.GetSchemeStringLength()); }
static String GetAuth(const GenericUri& uri) { return String(uri.GetAuthString(), uri.GetAuthStringLength()); }
static String GetPath(const GenericUri& uri) { return String(uri.GetPathString(), uri.GetPathStringLength()); }
static String GetQuery(const GenericUri& uri) { return String(uri.GetQueryString(), uri.GetQueryStringLength()); }
static String GetFrag(const GenericUri& uri) { return String(uri.GetFragString(), uri.GetFragStringLength()); }
#endif
//! Equality operators
bool operator==(const GenericUri& rhs) const {
return Match(rhs, true);
}
bool operator!=(const GenericUri& rhs) const {
return !Match(rhs, true);
}
bool Match(const GenericUri& uri, bool full = true) const {
Ch* s1;
Ch* s2;
if (full) {
s1 = uri_;
s2 = uri.uri_;
} else {
s1 = base_;
s2 = uri.base_;
}
if (s1 == s2) return true;
if (s1 == 0 || s2 == 0) return false;
return internal::StrCmp<Ch>(s1, s2) == 0;
}
//! Resolve this URI against another (base) URI in accordance with URI resolution rules.
// See https://tools.ietf.org/html/rfc3986
// Use for resolving an id or $ref with an in-scope id.
// Returns a new GenericUri for the resolved URI.
GenericUri Resolve(const GenericUri& baseuri, Allocator* allocator = 0) {
GenericUri resuri;
resuri.allocator_ = allocator;
// Ensure enough space for combining paths
resuri.Allocate(GetStringLength() + baseuri.GetStringLength() + 1); // + 1 for joining slash
if (!(GetSchemeStringLength() == 0)) {
// Use all of this URI
resuri.auth_ = CopyPart(resuri.scheme_, scheme_, GetSchemeStringLength());
resuri.path_ = CopyPart(resuri.auth_, auth_, GetAuthStringLength());
resuri.query_ = CopyPart(resuri.path_, path_, GetPathStringLength());
resuri.frag_ = CopyPart(resuri.query_, query_, GetQueryStringLength());
resuri.RemoveDotSegments();
} else {
// Use the base scheme
resuri.auth_ = CopyPart(resuri.scheme_, baseuri.scheme_, baseuri.GetSchemeStringLength());
if (!(GetAuthStringLength() == 0)) {
// Use this auth, path, query
resuri.path_ = CopyPart(resuri.auth_, auth_, GetAuthStringLength());
resuri.query_ = CopyPart(resuri.path_, path_, GetPathStringLength());
resuri.frag_ = CopyPart(resuri.query_, query_, GetQueryStringLength());
resuri.RemoveDotSegments();
} else {
// Use the base auth
resuri.path_ = CopyPart(resuri.auth_, baseuri.auth_, baseuri.GetAuthStringLength());
if (GetPathStringLength() == 0) {
// Use the base path
resuri.query_ = CopyPart(resuri.path_, baseuri.path_, baseuri.GetPathStringLength());
if (GetQueryStringLength() == 0) {
// Use the base query
resuri.frag_ = CopyPart(resuri.query_, baseuri.query_, baseuri.GetQueryStringLength());
} else {
// Use this query
resuri.frag_ = CopyPart(resuri.query_, query_, GetQueryStringLength());
}
} else {
if (path_[0] == '/') {
// Absolute path - use all of this path
resuri.query_ = CopyPart(resuri.path_, path_, GetPathStringLength());
resuri.RemoveDotSegments();
} else {
// Relative path - append this path to base path after base path's last slash
size_t pos = 0;
if (!(baseuri.GetAuthStringLength() == 0) && baseuri.GetPathStringLength() == 0) {
resuri.path_[pos] = '/';
pos++;
}
size_t lastslashpos = baseuri.GetPathStringLength();
while (lastslashpos > 0) {
if (baseuri.path_[lastslashpos - 1] == '/') break;
lastslashpos--;
}
std::memcpy(&resuri.path_[pos], baseuri.path_, lastslashpos * sizeof(Ch));
pos += lastslashpos;
resuri.query_ = CopyPart(&resuri.path_[pos], path_, GetPathStringLength());
resuri.RemoveDotSegments();
}
// Use this query
resuri.frag_ = CopyPart(resuri.query_, query_, GetQueryStringLength());
}
}
}
// Always use this frag
resuri.base_ = CopyPart(resuri.frag_, frag_, GetFragStringLength());
// Re-constitute base_ and uri_
resuri.SetBase();
resuri.uri_ = resuri.base_ + resuri.GetBaseStringLength() + 1;
resuri.SetUri();
return resuri;
}
//! Get the allocator of this GenericUri.
Allocator& GetAllocator() { return *allocator_; }
private:
// Allocate memory for a URI
// Returns total amount allocated
std::size_t Allocate(std::size_t len) {
// Create own allocator if user did not supply.
if (!allocator_)
ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator)();
// Allocate one block containing each part of the URI (5) plus base plus full URI, all null terminated.
// Order: scheme, auth, path, query, frag, base, uri
size_t total = (3 * len + 7) * sizeof(Ch);
scheme_ = static_cast<Ch*>(allocator_->Malloc(total));
*scheme_ = '\0';
auth_ = scheme_ + 1;
*auth_ = '\0';
path_ = auth_ + 1;
*path_ = '\0';
query_ = path_ + 1;
*query_ = '\0';
frag_ = query_ + 1;
*frag_ = '\0';
base_ = frag_ + 1;
*base_ = '\0';
uri_ = base_ + 1;
*uri_ = '\0';
return total;
}
// Free memory for a URI
void Free() {
if (scheme_) {
Allocator::Free(scheme_);
scheme_ = 0;
}
}
// Parse a URI into constituent scheme, authority, path, query, & fragment parts
// Supports URIs that match regex ^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))? as per
// https://tools.ietf.org/html/rfc3986
void Parse(const Ch* uri, std::size_t len) {
std::size_t start = 0, pos1 = 0, pos2 = 0;
Allocate(len);
// Look for scheme ([^:/?#]+):)?
if (start < len) {
while (pos1 < len) {
if (uri[pos1] == ':') break;
pos1++;
}
if (pos1 != len) {
while (pos2 < len) {
if (uri[pos2] == '/') break;
if (uri[pos2] == '?') break;
if (uri[pos2] == '#') break;
pos2++;
}
if (pos1 < pos2) {
pos1++;
std::memcpy(scheme_, &uri[start], pos1 * sizeof(Ch));
scheme_[pos1] = '\0';
start = pos1;
}
}
}
// Look for auth (//([^/?#]*))?
auth_ = scheme_ + GetSchemeStringLength() + 1;
*auth_ = '\0';
if (start < len - 1 && uri[start] == '/' && uri[start + 1] == '/') {
pos2 = start + 2;
while (pos2 < len) {
if (uri[pos2] == '/') break;
if (uri[pos2] == '?') break;
if (uri[pos2] == '#') break;
pos2++;
}
std::memcpy(auth_, &uri[start], (pos2 - start) * sizeof(Ch));
auth_[pos2 - start] = '\0';
start = pos2;
}
// Look for path ([^?#]*)
path_ = auth_ + GetAuthStringLength() + 1;
*path_ = '\0';
if (start < len) {
pos2 = start;
while (pos2 < len) {
if (uri[pos2] == '?') break;
if (uri[pos2] == '#') break;
pos2++;
}
if (start != pos2) {
std::memcpy(path_, &uri[start], (pos2 - start) * sizeof(Ch));
path_[pos2 - start] = '\0';
if (path_[0] == '/')
RemoveDotSegments(); // absolute path - normalize
start = pos2;
}
}
// Look for query (\?([^#]*))?
query_ = path_ + GetPathStringLength() + 1;
*query_ = '\0';
if (start < len && uri[start] == '?') {
pos2 = start + 1;
while (pos2 < len) {
if (uri[pos2] == '#') break;
pos2++;
}
if (start != pos2) {
std::memcpy(query_, &uri[start], (pos2 - start) * sizeof(Ch));
query_[pos2 - start] = '\0';
start = pos2;
}
}
// Look for fragment (#(.*))?
frag_ = query_ + GetQueryStringLength() + 1;
*frag_ = '\0';
if (start < len && uri[start] == '#') {
std::memcpy(frag_, &uri[start], (len - start) * sizeof(Ch));
frag_[len - start] = '\0';
}
// Re-constitute base_ and uri_
base_ = frag_ + GetFragStringLength() + 1;
SetBase();
uri_ = base_ + GetBaseStringLength() + 1;
SetUri();
}
// Reconstitute base
void SetBase() {
Ch* next = base_;
std::memcpy(next, scheme_, GetSchemeStringLength() * sizeof(Ch));
next+= GetSchemeStringLength();
std::memcpy(next, auth_, GetAuthStringLength() * sizeof(Ch));
next+= GetAuthStringLength();
std::memcpy(next, path_, GetPathStringLength() * sizeof(Ch));
next+= GetPathStringLength();
std::memcpy(next, query_, GetQueryStringLength() * sizeof(Ch));
next+= GetQueryStringLength();
*next = '\0';
}
// Reconstitute uri
void SetUri() {
Ch* next = uri_;
std::memcpy(next, base_, GetBaseStringLength() * sizeof(Ch));
next+= GetBaseStringLength();
std::memcpy(next, frag_, GetFragStringLength() * sizeof(Ch));
next+= GetFragStringLength();
*next = '\0';
}
// Copy a part from one GenericUri to another
// Return the pointer to the next part to be copied to
Ch* CopyPart(Ch* to, Ch* from, std::size_t len) {
RAPIDJSON_ASSERT(to != 0);
RAPIDJSON_ASSERT(from != 0);
std::memcpy(to, from, len * sizeof(Ch));
to[len] = '\0';
Ch* next = to + len + 1;
return next;
}
// Remove . and .. segments from the path_ member.
// https://tools.ietf.org/html/rfc3986
// This is done in place as we are only removing segments.
void RemoveDotSegments() {
std::size_t pathlen = GetPathStringLength();
std::size_t pathpos = 0; // Position in path_
std::size_t newpos = 0; // Position in new path_
// Loop through each segment in original path_
while (pathpos < pathlen) {
// Get next segment, bounded by '/' or end
size_t slashpos = 0;
while ((pathpos + slashpos) < pathlen) {
if (path_[pathpos + slashpos] == '/') break;
slashpos++;
}
// Check for .. and . segments
if (slashpos == 2 && path_[pathpos] == '.' && path_[pathpos + 1] == '.') {
// Backup a .. segment in the new path_
// We expect to find a previously added slash at the end or nothing
RAPIDJSON_ASSERT(newpos == 0 || path_[newpos - 1] == '/');
size_t lastslashpos = newpos;
// Make sure we don't go beyond the start segment
if (lastslashpos > 1) {
// Find the next to last slash and back up to it
lastslashpos--;
while (lastslashpos > 0) {
if (path_[lastslashpos - 1] == '/') break;
lastslashpos--;
}
// Set the new path_ position
newpos = lastslashpos;
}
} else if (slashpos == 1 && path_[pathpos] == '.') {
// Discard . segment, leaves new path_ unchanged
} else {
// Move any other kind of segment to the new path_
RAPIDJSON_ASSERT(newpos <= pathpos);
std::memmove(&path_[newpos], &path_[pathpos], slashpos * sizeof(Ch));
newpos += slashpos;
// Add slash if not at end
if ((pathpos + slashpos) < pathlen) {
path_[newpos] = '/';
newpos++;
}
}
// Move to next segment
pathpos += slashpos + 1;
}
path_[newpos] = '\0';
}
Ch* uri_; // Everything
Ch* base_; // Everything except fragment
Ch* scheme_; // Includes the :
Ch* auth_; // Includes the //
Ch* path_; // Absolute if starts with /
Ch* query_; // Includes the ?
Ch* frag_; // Includes the #
Allocator* allocator_; //!< The current allocator. It is either user-supplied or equal to ownAllocator_.
Allocator* ownAllocator_; //!< Allocator owned by this Uri.
};
//! GenericUri for Value (UTF-8, default allocator).
typedef GenericUri<Value> Uri;
RAPIDJSON_NAMESPACE_END
#if defined(__clang__)
RAPIDJSON_DIAG_POP
#endif
#endif // RAPIDJSON_URI_H_
// Copyright (C) 2011 Milo Yip // Tencent is pleased to support the open source community by making RapidJSON available.
// //
// Permission is hereby granted, free of charge, to any person obtaining a copy // Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip.
// of this software and associated documentation files (the "Software"), to deal //
// in the Software without restriction, including without limitation the rights // Licensed under the MIT License (the "License"); you may not use this file except
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // in compliance with the License. You may obtain a copy of the License at
// copies of the Software, and to permit persons to whom the Software is //
// furnished to do so, subject to the following conditions: // http://opensource.org/licenses/MIT
// //
// The above copyright notice and this permission notice shall be included in // Unless required by applicable law or agreed to in writing, software distributed
// all copies or substantial portions of the Software. // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// // CONDITIONS OF ANY KIND, either express or implied. See the License for the
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // specific language governing permissions and limitations under the License.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #ifndef RAPIDJSON_WRITER_H_
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #define RAPIDJSON_WRITER_H_
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #include "stream.h"
// THE SOFTWARE. #include "internal/clzll.h"
#include "internal/meta.h"
#ifndef RAPIDJSON_WRITER_H_ #include "internal/stack.h"
#define RAPIDJSON_WRITER_H_ #include "internal/strfunc.h"
#include "internal/dtoa.h"
#include "rapidjson.h" #include "internal/itoa.h"
#include "internal/stack.h" #include "stringbuffer.h"
#include "internal/strfunc.h" #include <new> // placement new
#include "internal/dtoa.h"
#include "internal/itoa.h" #if defined(RAPIDJSON_SIMD) && defined(_MSC_VER)
#include "stringbuffer.h" #include <intrin.h>
#include <new> // placement new #pragma intrinsic(_BitScanForward)
#endif
#ifdef _MSC_VER #ifdef RAPIDJSON_SSE42
RAPIDJSON_DIAG_PUSH #include <nmmintrin.h>
RAPIDJSON_DIAG_OFF(4127) // conditional expression is constant #elif defined(RAPIDJSON_SSE2)
#endif #include <emmintrin.h>
#elif defined(RAPIDJSON_NEON)
RAPIDJSON_NAMESPACE_BEGIN #include <arm_neon.h>
#endif
//! JSON writer
/*! Writer implements the concept Handler. #ifdef __clang__
It generates JSON text by events to an output os. RAPIDJSON_DIAG_PUSH
RAPIDJSON_DIAG_OFF(padded)
User may programmatically calls the functions of a writer to generate JSON text. RAPIDJSON_DIAG_OFF(unreachable-code)
RAPIDJSON_DIAG_OFF(c++98-compat)
On the other side, a writer can also be passed to objects that generates events, #elif defined(_MSC_VER)
RAPIDJSON_DIAG_PUSH
for example Reader::Parse() and Document::Accept(). RAPIDJSON_DIAG_OFF(4127) // conditional expression is constant
#endif
\tparam OutputStream Type of output stream.
\tparam SourceEncoding Encoding of source string. RAPIDJSON_NAMESPACE_BEGIN
\tparam TargetEncoding Encoding of output stream.
\tparam StackAllocator Type of allocator for allocating memory of stack. ///////////////////////////////////////////////////////////////////////////////
\note implements Handler concept // WriteFlag
*/
template<typename OutputStream, typename SourceEncoding = UTF8<>, typename TargetEncoding = UTF8<>, typename StackAllocator = CrtAllocator> /*! \def RAPIDJSON_WRITE_DEFAULT_FLAGS
class Writer { \ingroup RAPIDJSON_CONFIG
public: \brief User-defined kWriteDefaultFlags definition.
typedef typename SourceEncoding::Ch Ch;
User can define this as any \c WriteFlag combinations.
//! Constructor */
/*! \param os Output stream. #ifndef RAPIDJSON_WRITE_DEFAULT_FLAGS
\param stackAllocator User supplied allocator. If it is null, it will create a private one. #define RAPIDJSON_WRITE_DEFAULT_FLAGS kWriteNoFlags
\param levelDepth Initial capacity of stack. #endif
*/
explicit //! Combination of writeFlags
Writer(OutputStream& os, StackAllocator* stackAllocator = 0, size_t levelDepth = kDefaultLevelDepth) : enum WriteFlag {
os_(&os), level_stack_(stackAllocator, levelDepth * sizeof(Level)), hasRoot_(false) {} kWriteNoFlags = 0, //!< No flags are set.
kWriteValidateEncodingFlag = 1, //!< Validate encoding of JSON strings.
explicit kWriteNanAndInfFlag = 2, //!< Allow writing of Infinity, -Infinity and NaN.
Writer(StackAllocator* allocator = 0, size_t levelDepth = kDefaultLevelDepth) : kWriteDefaultFlags = RAPIDJSON_WRITE_DEFAULT_FLAGS //!< Default write flags. Can be customized by defining RAPIDJSON_WRITE_DEFAULT_FLAGS
os_(0), level_stack_(allocator, levelDepth * sizeof(Level)), hasRoot_(false) {} };
//! Reset the writer with a new stream. //! JSON writer
/*! /*! Writer implements the concept Handler.
This function reset the writer with a new stream and default settings, It generates JSON text by events to an output os.
in order to make a Writer object reusable for output multiple JSONs.
User may programmatically calls the functions of a writer to generate JSON text.
\param os New output stream.
\code On the other side, a writer can also be passed to objects that generates events,
Writer<OutputStream> writer(os1);
writer.StartObject(); for example Reader::Parse() and Document::Accept().
// ...
writer.EndObject(); \tparam OutputStream Type of output stream.
\tparam SourceEncoding Encoding of source string.
writer.Reset(os2); \tparam TargetEncoding Encoding of output stream.
writer.StartObject(); \tparam StackAllocator Type of allocator for allocating memory of stack.
// ... \note implements Handler concept
writer.EndObject(); */
\endcode template<typename OutputStream, typename SourceEncoding = UTF8<>, typename TargetEncoding = UTF8<>, typename StackAllocator = CrtAllocator, unsigned writeFlags = kWriteDefaultFlags>
*/ class Writer {
void Reset(OutputStream& os) { public:
os_ = &os; typedef typename SourceEncoding::Ch Ch;
hasRoot_ = false;
level_stack_.Clear(); static const int kDefaultMaxDecimalPlaces = 324;
}
//! Constructor
//! Checks whether the output is a complete JSON. /*! \param os Output stream.
/*! \param stackAllocator User supplied allocator. If it is null, it will create a private one.
A complete JSON has a complete root object or array. \param levelDepth Initial capacity of stack.
*/ */
bool IsComplete() const { explicit
return hasRoot_ && level_stack_.Empty(); Writer(OutputStream& os, StackAllocator* stackAllocator = 0, size_t levelDepth = kDefaultLevelDepth) :
} os_(&os), level_stack_(stackAllocator, levelDepth * sizeof(Level)), maxDecimalPlaces_(kDefaultMaxDecimalPlaces), hasRoot_(false) {}
/*!@name Implementation of Handler explicit
\see Handler Writer(StackAllocator* allocator = 0, size_t levelDepth = kDefaultLevelDepth) :
*/ os_(0), level_stack_(allocator, levelDepth * sizeof(Level)), maxDecimalPlaces_(kDefaultMaxDecimalPlaces), hasRoot_(false) {}
//@{
#if RAPIDJSON_HAS_CXX11_RVALUE_REFS
bool Null() { Prefix(kNullType); return WriteNull(); } Writer(Writer&& rhs) :
bool Bool(bool b) { Prefix(b ? kTrueType : kFalseType); return WriteBool(b); } os_(rhs.os_), level_stack_(std::move(rhs.level_stack_)), maxDecimalPlaces_(rhs.maxDecimalPlaces_), hasRoot_(rhs.hasRoot_) {
bool Int(int i) { Prefix(kNumberType); return WriteInt(i); } rhs.os_ = 0;
bool Uint(unsigned u) { Prefix(kNumberType); return WriteUint(u); } }
bool Int64(int64_t i64) { Prefix(kNumberType); return WriteInt64(i64); } #endif
bool Uint64(uint64_t u64) { Prefix(kNumberType); return WriteUint64(u64); }
//! Reset the writer with a new stream.
//! Writes the given \c double value to the stream /*!
/*! This function reset the writer with a new stream and default settings,
\param d The value to be written. in order to make a Writer object reusable for output multiple JSONs.
\return Whether it is succeed.
*/ \param os New output stream.
bool Double(double d) { Prefix(kNumberType); return WriteDouble(d); } \code
Writer<OutputStream> writer(os1);
bool String(const Ch* str, SizeType length, bool copy = false) { writer.StartObject();
(void)copy; // ...
Prefix(kStringType); writer.EndObject();
return WriteString(str, length);
} writer.Reset(os2);
writer.StartObject();
bool StartObject() { // ...
Prefix(kObjectType); writer.EndObject();
new (level_stack_.template Push<Level>()) Level(false); \endcode
return WriteStartObject(); */
} void Reset(OutputStream& os) {
os_ = &os;
bool Key(const Ch* str, SizeType length, bool copy = false) { return String(str, length, copy); } hasRoot_ = false;
level_stack_.Clear();
bool EndObject(SizeType memberCount = 0) { }
(void)memberCount;
RAPIDJSON_ASSERT(level_stack_.GetSize() >= sizeof(Level)); //! Checks whether the output is a complete JSON.
RAPIDJSON_ASSERT(!level_stack_.template Top<Level>()->inArray); /*!
level_stack_.template Pop<Level>(1); A complete JSON has a complete root object or array.
bool ret = WriteEndObject(); */
if (level_stack_.Empty()) // end of json text bool IsComplete() const {
os_->Flush(); return hasRoot_ && level_stack_.Empty();
return ret; }
}
int GetMaxDecimalPlaces() const {
bool StartArray() { return maxDecimalPlaces_;
Prefix(kArrayType); }
new (level_stack_.template Push<Level>()) Level(true);
return WriteStartArray(); //! Sets the maximum number of decimal places for double output.
} /*!
This setting truncates the output with specified number of decimal places.
bool EndArray(SizeType elementCount = 0) {
(void)elementCount; For example,
RAPIDJSON_ASSERT(level_stack_.GetSize() >= sizeof(Level));
RAPIDJSON_ASSERT(level_stack_.template Top<Level>()->inArray); \code
level_stack_.template Pop<Level>(1); writer.SetMaxDecimalPlaces(3);
bool ret = WriteEndArray(); writer.StartArray();
if (level_stack_.Empty()) // end of json text writer.Double(0.12345); // "0.123"
os_->Flush(); writer.Double(0.0001); // "0.0"
return ret; writer.Double(1.234567890123456e30); // "1.234567890123456e30" (do not truncate significand for positive exponent)
} writer.Double(1.23e-4); // "0.0" (do truncate significand for negative exponent)
//@} writer.EndArray();
\endcode
/*! @name Convenience extensions */
//@{ The default setting does not truncate any decimal places. You can restore to this setting by calling
\code
//! Simpler but slower overload. writer.SetMaxDecimalPlaces(Writer::kDefaultMaxDecimalPlaces);
bool String(const Ch* str) { return String(str, internal::StrLen(str)); } \endcode
bool Key(const Ch* str) { return Key(str, internal::StrLen(str)); } */
void SetMaxDecimalPlaces(int maxDecimalPlaces) {
//@} maxDecimalPlaces_ = maxDecimalPlaces;
}
protected:
//! Information for each nested level /*!@name Implementation of Handler
struct Level { \see Handler
Level(bool inArray_) : valueCount(0), inArray(inArray_) {} */
size_t valueCount; //!< number of values in this level //@{
bool inArray; //!< true if in array, otherwise in object
}; bool Null() { Prefix(kNullType); return EndValue(WriteNull()); }
bool Bool(bool b) { Prefix(b ? kTrueType : kFalseType); return EndValue(WriteBool(b)); }
static const size_t kDefaultLevelDepth = 32; bool Int(int i) { Prefix(kNumberType); return EndValue(WriteInt(i)); }
bool Uint(unsigned u) { Prefix(kNumberType); return EndValue(WriteUint(u)); }
bool WriteNull() { bool Int64(int64_t i64) { Prefix(kNumberType); return EndValue(WriteInt64(i64)); }
os_->Put('n'); os_->Put('u'); os_->Put('l'); os_->Put('l'); return true; bool Uint64(uint64_t u64) { Prefix(kNumberType); return EndValue(WriteUint64(u64)); }
}
//! Writes the given \c double value to the stream
bool WriteBool(bool b) { /*!
if (b) { \param d The value to be written.
os_->Put('t'); os_->Put('r'); os_->Put('u'); os_->Put('e'); \return Whether it is succeed.
} */
else { bool Double(double d) { Prefix(kNumberType); return EndValue(WriteDouble(d)); }
os_->Put('f'); os_->Put('a'); os_->Put('l'); os_->Put('s'); os_->Put('e');
} bool RawNumber(const Ch* str, SizeType length, bool copy = false) {
return true; RAPIDJSON_ASSERT(str != 0);
} (void)copy;
Prefix(kNumberType);
bool WriteInt(int i) { return EndValue(WriteString(str, length));
char buffer[11]; }
const char* end = internal::i32toa(i, buffer);
for (const char* p = buffer; p != end; ++p) bool String(const Ch* str, SizeType length, bool copy = false) {
os_->Put(*p); RAPIDJSON_ASSERT(str != 0);
return true; (void)copy;
} Prefix(kStringType);
return EndValue(WriteString(str, length));
bool WriteUint(unsigned u) { }
char buffer[10];
const char* end = internal::u32toa(u, buffer); #if RAPIDJSON_HAS_STDSTRING
for (const char* p = buffer; p != end; ++p) bool String(const std::basic_string<Ch>& str) {
os_->Put(*p); return String(str.data(), SizeType(str.size()));
return true; }
} #endif
bool WriteInt64(int64_t i64) { bool StartObject() {
char buffer[21]; Prefix(kObjectType);
const char* end = internal::i64toa(i64, buffer); new (level_stack_.template Push<Level>()) Level(false);
for (const char* p = buffer; p != end; ++p) return WriteStartObject();
os_->Put(*p); }
return true;
} bool Key(const Ch* str, SizeType length, bool copy = false) { return String(str, length, copy); }
bool WriteUint64(uint64_t u64) { #if RAPIDJSON_HAS_STDSTRING
char buffer[20]; bool Key(const std::basic_string<Ch>& str)
char* end = internal::u64toa(u64, buffer); {
for (char* p = buffer; p != end; ++p) return Key(str.data(), SizeType(str.size()));
os_->Put(*p); }
return true; #endif
}
bool EndObject(SizeType memberCount = 0) {
bool WriteDouble(double d) { (void)memberCount;
char buffer[25]; RAPIDJSON_ASSERT(level_stack_.GetSize() >= sizeof(Level)); // not inside an Object
char* end = internal::dtoa(d, buffer); RAPIDJSON_ASSERT(!level_stack_.template Top<Level>()->inArray); // currently inside an Array, not Object
for (char* p = buffer; p != end; ++p) RAPIDJSON_ASSERT(0 == level_stack_.template Top<Level>()->valueCount % 2); // Object has a Key without a Value
os_->Put(*p); level_stack_.template Pop<Level>(1);
return true; return EndValue(WriteEndObject());
} }
bool WriteString(const Ch* str, SizeType length) { bool StartArray() {
static const char hexDigits[16] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' }; Prefix(kArrayType);
static const char escape[256] = { new (level_stack_.template Push<Level>()) Level(true);
#define Z16 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 return WriteStartArray();
//0 1 2 3 4 5 6 7 8 9 A B C D E F }
'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'b', 't', 'n', 'u', 'f', 'r', 'u', 'u', // 00
'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', // 10 bool EndArray(SizeType elementCount = 0) {
0, 0, '"', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 20 (void)elementCount;
Z16, Z16, // 30~4F RAPIDJSON_ASSERT(level_stack_.GetSize() >= sizeof(Level));
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,'\\', 0, 0, 0, // 50 RAPIDJSON_ASSERT(level_stack_.template Top<Level>()->inArray);
Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16 // 60~FF level_stack_.template Pop<Level>(1);
#undef Z16 return EndValue(WriteEndArray());
}; }
//@}
os_->Put('\"');
GenericStringStream<SourceEncoding> is(str); /*! @name Convenience extensions */
while (is.Tell() < length) { //@{
const Ch c = is.Peek();
if (!TargetEncoding::supportUnicode && (unsigned)c >= 0x80) { //! Simpler but slower overload.
// Unicode escaping bool String(const Ch* const& str) { return String(str, internal::StrLen(str)); }
unsigned codepoint; bool Key(const Ch* const& str) { return Key(str, internal::StrLen(str)); }
if (!SourceEncoding::Decode(is, &codepoint))
return false; //@}
os_->Put('\\');
os_->Put('u'); //! Write a raw JSON value.
if (codepoint <= 0xD7FF || (codepoint >= 0xE000 && codepoint <= 0xFFFF)) { /*!
os_->Put(hexDigits[(codepoint >> 12) & 15]); For user to write a stringified JSON as a value.
os_->Put(hexDigits[(codepoint >> 8) & 15]);
os_->Put(hexDigits[(codepoint >> 4) & 15]); \param json A well-formed JSON value. It should not contain null character within [0, length - 1] range.
os_->Put(hexDigits[(codepoint ) & 15]); \param length Length of the json.
} \param type Type of the root of json.
else if (codepoint >= 0x010000 && codepoint <= 0x10FFFF) { */
// Surrogate pair bool RawValue(const Ch* json, size_t length, Type type) {
unsigned s = codepoint - 0x010000; RAPIDJSON_ASSERT(json != 0);
unsigned lead = (s >> 10) + 0xD800; Prefix(type);
unsigned trail = (s & 0x3FF) + 0xDC00; return EndValue(WriteRawValue(json, length));
os_->Put(hexDigits[(lead >> 12) & 15]); }
os_->Put(hexDigits[(lead >> 8) & 15]);
os_->Put(hexDigits[(lead >> 4) & 15]); //! Flush the output stream.
os_->Put(hexDigits[(lead ) & 15]); /*!
os_->Put('\\'); Allows the user to flush the output stream immediately.
os_->Put('u'); */
os_->Put(hexDigits[(trail >> 12) & 15]); void Flush() {
os_->Put(hexDigits[(trail >> 8) & 15]); os_->Flush();
os_->Put(hexDigits[(trail >> 4) & 15]); }
os_->Put(hexDigits[(trail ) & 15]);
} static const size_t kDefaultLevelDepth = 32;
else
return false; // invalid code point protected:
} //! Information for each nested level
else if ((sizeof(Ch) == 1 || (unsigned)c < 256) && escape[(unsigned char)c]) { struct Level {
is.Take(); Level(bool inArray_) : valueCount(0), inArray(inArray_) {}
os_->Put('\\'); size_t valueCount; //!< number of values in this level
os_->Put(escape[(unsigned char)c]); bool inArray; //!< true if in array, otherwise in object
if (escape[(unsigned char)c] == 'u') { };
os_->Put('0');
os_->Put('0'); bool WriteNull() {
os_->Put(hexDigits[(unsigned char)c >> 4]); PutReserve(*os_, 4);
os_->Put(hexDigits[(unsigned char)c & 0xF]); PutUnsafe(*os_, 'n'); PutUnsafe(*os_, 'u'); PutUnsafe(*os_, 'l'); PutUnsafe(*os_, 'l'); return true;
} }
}
else bool WriteBool(bool b) {
Transcoder<SourceEncoding, TargetEncoding>::Transcode(is, *os_); if (b) {
} PutReserve(*os_, 4);
os_->Put('\"'); PutUnsafe(*os_, 't'); PutUnsafe(*os_, 'r'); PutUnsafe(*os_, 'u'); PutUnsafe(*os_, 'e');
return true; }
} else {
PutReserve(*os_, 5);
bool WriteStartObject() { os_->Put('{'); return true; } PutUnsafe(*os_, 'f'); PutUnsafe(*os_, 'a'); PutUnsafe(*os_, 'l'); PutUnsafe(*os_, 's'); PutUnsafe(*os_, 'e');
bool WriteEndObject() { os_->Put('}'); return true; } }
bool WriteStartArray() { os_->Put('['); return true; } return true;
bool WriteEndArray() { os_->Put(']'); return true; } }
void Prefix(Type type) { bool WriteInt(int i) {
(void)type; char buffer[11];
if (level_stack_.GetSize() != 0) { // this value is not at root const char* end = internal::i32toa(i, buffer);
Level* level = level_stack_.template Top<Level>(); PutReserve(*os_, static_cast<size_t>(end - buffer));
if (level->valueCount > 0) { for (const char* p = buffer; p != end; ++p)
if (level->inArray) PutUnsafe(*os_, static_cast<typename OutputStream::Ch>(*p));
os_->Put(','); // add comma if it is not the first element in array return true;
else // in object }
os_->Put((level->valueCount % 2 == 0) ? ',' : ':');
} bool WriteUint(unsigned u) {
if (!level->inArray && level->valueCount % 2 == 0) char buffer[10];
RAPIDJSON_ASSERT(type == kStringType); // if it's in object, then even number should be a name const char* end = internal::u32toa(u, buffer);
level->valueCount++; PutReserve(*os_, static_cast<size_t>(end - buffer));
} for (const char* p = buffer; p != end; ++p)
else { PutUnsafe(*os_, static_cast<typename OutputStream::Ch>(*p));
RAPIDJSON_ASSERT(!hasRoot_); // Should only has one and only one root. return true;
hasRoot_ = true; }
}
} bool WriteInt64(int64_t i64) {
char buffer[21];
OutputStream* os_; const char* end = internal::i64toa(i64, buffer);
internal::Stack<StackAllocator> level_stack_; PutReserve(*os_, static_cast<size_t>(end - buffer));
bool hasRoot_; for (const char* p = buffer; p != end; ++p)
PutUnsafe(*os_, static_cast<typename OutputStream::Ch>(*p));
private: return true;
// Prohibit copy constructor & assignment operator. }
Writer(const Writer&);
Writer& operator=(const Writer&); bool WriteUint64(uint64_t u64) {
}; char buffer[20];
char* end = internal::u64toa(u64, buffer);
// Full specialization for StringStream to prevent memory copying PutReserve(*os_, static_cast<size_t>(end - buffer));
for (char* p = buffer; p != end; ++p)
template<> PutUnsafe(*os_, static_cast<typename OutputStream::Ch>(*p));
inline bool Writer<StringBuffer>::WriteInt(int i) { return true;
char *buffer = os_->Push(11); }
const char* end = internal::i32toa(i, buffer);
os_->Pop(11 - (end - buffer)); bool WriteDouble(double d) {
return true; if (internal::Double(d).IsNanOrInf()) {
} if (!(writeFlags & kWriteNanAndInfFlag))
return false;
template<> if (internal::Double(d).IsNan()) {
inline bool Writer<StringBuffer>::WriteUint(unsigned u) { PutReserve(*os_, 3);
char *buffer = os_->Push(10); PutUnsafe(*os_, 'N'); PutUnsafe(*os_, 'a'); PutUnsafe(*os_, 'N');
const char* end = internal::u32toa(u, buffer); return true;
os_->Pop(10 - (end - buffer)); }
return true; if (internal::Double(d).Sign()) {
} PutReserve(*os_, 9);
PutUnsafe(*os_, '-');
template<> }
inline bool Writer<StringBuffer>::WriteInt64(int64_t i64) { else
char *buffer = os_->Push(21); PutReserve(*os_, 8);
const char* end = internal::i64toa(i64, buffer); PutUnsafe(*os_, 'I'); PutUnsafe(*os_, 'n'); PutUnsafe(*os_, 'f');
os_->Pop(21 - (end - buffer)); PutUnsafe(*os_, 'i'); PutUnsafe(*os_, 'n'); PutUnsafe(*os_, 'i'); PutUnsafe(*os_, 't'); PutUnsafe(*os_, 'y');
return true; return true;
} }
template<> char buffer[25];
inline bool Writer<StringBuffer>::WriteUint64(uint64_t u) { char* end = internal::dtoa(d, buffer, maxDecimalPlaces_);
char *buffer = os_->Push(20); PutReserve(*os_, static_cast<size_t>(end - buffer));
const char* end = internal::u64toa(u, buffer); for (char* p = buffer; p != end; ++p)
os_->Pop(20 - (end - buffer)); PutUnsafe(*os_, static_cast<typename OutputStream::Ch>(*p));
return true; return true;
} }
template<> bool WriteString(const Ch* str, SizeType length) {
inline bool Writer<StringBuffer>::WriteDouble(double d) { static const typename OutputStream::Ch hexDigits[16] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
char *buffer = os_->Push(25); static const char escape[256] = {
char* end = internal::dtoa(d, buffer); #define Z16 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
os_->Pop(25 - (end - buffer)); //0 1 2 3 4 5 6 7 8 9 A B C D E F
return true; 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'b', 't', 'n', 'u', 'f', 'r', 'u', 'u', // 00
} 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u', // 10
0, 0, '"', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 20
RAPIDJSON_NAMESPACE_END Z16, Z16, // 30~4F
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,'\\', 0, 0, 0, // 50
#ifdef _MSC_VER Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16, Z16 // 60~FF
RAPIDJSON_DIAG_POP #undef Z16
#endif };
#endif // RAPIDJSON_RAPIDJSON_H_ if (TargetEncoding::supportUnicode)
PutReserve(*os_, 2 + length * 6); // "\uxxxx..."
else
PutReserve(*os_, 2 + length * 12); // "\uxxxx\uyyyy..."
PutUnsafe(*os_, '\"');
GenericStringStream<SourceEncoding> is(str);
while (ScanWriteUnescapedString(is, length)) {
const Ch c = is.Peek();
if (!TargetEncoding::supportUnicode && static_cast<unsigned>(c) >= 0x80) {
// Unicode escaping
unsigned codepoint;
if (RAPIDJSON_UNLIKELY(!SourceEncoding::Decode(is, &codepoint)))
return false;
PutUnsafe(*os_, '\\');
PutUnsafe(*os_, 'u');
if (codepoint <= 0xD7FF || (codepoint >= 0xE000 && codepoint <= 0xFFFF)) {
PutUnsafe(*os_, hexDigits[(codepoint >> 12) & 15]);
PutUnsafe(*os_, hexDigits[(codepoint >> 8) & 15]);
PutUnsafe(*os_, hexDigits[(codepoint >> 4) & 15]);
PutUnsafe(*os_, hexDigits[(codepoint ) & 15]);
}
else {
RAPIDJSON_ASSERT(codepoint >= 0x010000 && codepoint <= 0x10FFFF);
// Surrogate pair
unsigned s = codepoint - 0x010000;
unsigned lead = (s >> 10) + 0xD800;
unsigned trail = (s & 0x3FF) + 0xDC00;
PutUnsafe(*os_, hexDigits[(lead >> 12) & 15]);
PutUnsafe(*os_, hexDigits[(lead >> 8) & 15]);
PutUnsafe(*os_, hexDigits[(lead >> 4) & 15]);
PutUnsafe(*os_, hexDigits[(lead ) & 15]);
PutUnsafe(*os_, '\\');
PutUnsafe(*os_, 'u');
PutUnsafe(*os_, hexDigits[(trail >> 12) & 15]);
PutUnsafe(*os_, hexDigits[(trail >> 8) & 15]);
PutUnsafe(*os_, hexDigits[(trail >> 4) & 15]);
PutUnsafe(*os_, hexDigits[(trail ) & 15]);
}
}
else if ((sizeof(Ch) == 1 || static_cast<unsigned>(c) < 256) && RAPIDJSON_UNLIKELY(escape[static_cast<unsigned char>(c)])) {
is.Take();
PutUnsafe(*os_, '\\');
PutUnsafe(*os_, static_cast<typename OutputStream::Ch>(escape[static_cast<unsigned char>(c)]));
if (escape[static_cast<unsigned char>(c)] == 'u') {
PutUnsafe(*os_, '0');
PutUnsafe(*os_, '0');
PutUnsafe(*os_, hexDigits[static_cast<unsigned char>(c) >> 4]);
PutUnsafe(*os_, hexDigits[static_cast<unsigned char>(c) & 0xF]);
}
}
else if (RAPIDJSON_UNLIKELY(!(writeFlags & kWriteValidateEncodingFlag ?
Transcoder<SourceEncoding, TargetEncoding>::Validate(is, *os_) :
Transcoder<SourceEncoding, TargetEncoding>::TranscodeUnsafe(is, *os_))))
return false;
}
PutUnsafe(*os_, '\"');
return true;
}
bool ScanWriteUnescapedString(GenericStringStream<SourceEncoding>& is, size_t length) {
return RAPIDJSON_LIKELY(is.Tell() < length);
}
bool WriteStartObject() { os_->Put('{'); return true; }
bool WriteEndObject() { os_->Put('}'); return true; }
bool WriteStartArray() { os_->Put('['); return true; }
bool WriteEndArray() { os_->Put(']'); return true; }
bool WriteRawValue(const Ch* json, size_t length) {
PutReserve(*os_, length);
GenericStringStream<SourceEncoding> is(json);
while (RAPIDJSON_LIKELY(is.Tell() < length)) {
RAPIDJSON_ASSERT(is.Peek() != '\0');
if (RAPIDJSON_UNLIKELY(!(writeFlags & kWriteValidateEncodingFlag ?
Transcoder<SourceEncoding, TargetEncoding>::Validate(is, *os_) :
Transcoder<SourceEncoding, TargetEncoding>::TranscodeUnsafe(is, *os_))))
return false;
}
return true;
}
void Prefix(Type type) {
(void)type;
if (RAPIDJSON_LIKELY(level_stack_.GetSize() != 0)) { // this value is not at root
Level* level = level_stack_.template Top<Level>();
if (level->valueCount > 0) {
if (level->inArray)
os_->Put(','); // add comma if it is not the first element in array
else // in object
os_->Put((level->valueCount % 2 == 0) ? ',' : ':');
}
if (!level->inArray && level->valueCount % 2 == 0)
RAPIDJSON_ASSERT(type == kStringType); // if it's in object, then even number should be a name
level->valueCount++;
}
else {
RAPIDJSON_ASSERT(!hasRoot_); // Should only has one and only one root.
hasRoot_ = true;
}
}
// Flush the value if it is the top level one.
bool EndValue(bool ret) {
if (RAPIDJSON_UNLIKELY(level_stack_.Empty())) // end of json text
Flush();
return ret;
}
OutputStream* os_;
internal::Stack<StackAllocator> level_stack_;
int maxDecimalPlaces_;
bool hasRoot_;
private:
// Prohibit copy constructor & assignment operator.
Writer(const Writer&);
Writer& operator=(const Writer&);
};
// Full specialization for StringStream to prevent memory copying
template<>
inline bool Writer<StringBuffer>::WriteInt(int i) {
char *buffer = os_->Push(11);
const char* end = internal::i32toa(i, buffer);
os_->Pop(static_cast<size_t>(11 - (end - buffer)));
return true;
}
template<>
inline bool Writer<StringBuffer>::WriteUint(unsigned u) {
char *buffer = os_->Push(10);
const char* end = internal::u32toa(u, buffer);
os_->Pop(static_cast<size_t>(10 - (end - buffer)));
return true;
}
template<>
inline bool Writer<StringBuffer>::WriteInt64(int64_t i64) {
char *buffer = os_->Push(21);
const char* end = internal::i64toa(i64, buffer);
os_->Pop(static_cast<size_t>(21 - (end - buffer)));
return true;
}
template<>
inline bool Writer<StringBuffer>::WriteUint64(uint64_t u) {
char *buffer = os_->Push(20);
const char* end = internal::u64toa(u, buffer);
os_->Pop(static_cast<size_t>(20 - (end - buffer)));
return true;
}
template<>
inline bool Writer<StringBuffer>::WriteDouble(double d) {
if (internal::Double(d).IsNanOrInf()) {
// Note: This code path can only be reached if (RAPIDJSON_WRITE_DEFAULT_FLAGS & kWriteNanAndInfFlag).
if (!(kWriteDefaultFlags & kWriteNanAndInfFlag))
return false;
if (internal::Double(d).IsNan()) {
PutReserve(*os_, 3);
PutUnsafe(*os_, 'N'); PutUnsafe(*os_, 'a'); PutUnsafe(*os_, 'N');
return true;
}
if (internal::Double(d).Sign()) {
PutReserve(*os_, 9);
PutUnsafe(*os_, '-');
}
else
PutReserve(*os_, 8);
PutUnsafe(*os_, 'I'); PutUnsafe(*os_, 'n'); PutUnsafe(*os_, 'f');
PutUnsafe(*os_, 'i'); PutUnsafe(*os_, 'n'); PutUnsafe(*os_, 'i'); PutUnsafe(*os_, 't'); PutUnsafe(*os_, 'y');
return true;
}
char *buffer = os_->Push(25);
char* end = internal::dtoa(d, buffer, maxDecimalPlaces_);
os_->Pop(static_cast<size_t>(25 - (end - buffer)));
return true;
}
#if defined(RAPIDJSON_SSE2) || defined(RAPIDJSON_SSE42)
template<>
inline bool Writer<StringBuffer>::ScanWriteUnescapedString(StringStream& is, size_t length) {
if (length < 16)
return RAPIDJSON_LIKELY(is.Tell() < length);
if (!RAPIDJSON_LIKELY(is.Tell() < length))
return false;
const char* p = is.src_;
const char* end = is.head_ + length;
const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
const char* endAligned = reinterpret_cast<const char*>(reinterpret_cast<size_t>(end) & static_cast<size_t>(~15));
if (nextAligned > end)
return true;
while (p != nextAligned)
if (*p < 0x20 || *p == '\"' || *p == '\\') {
is.src_ = p;
return RAPIDJSON_LIKELY(is.Tell() < length);
}
else
os_->PutUnsafe(*p++);
// The rest of string using SIMD
static const char dquote[16] = { '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"', '\"' };
static const char bslash[16] = { '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\', '\\' };
static const char space[16] = { 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F, 0x1F };
const __m128i dq = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&dquote[0]));
const __m128i bs = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&bslash[0]));
const __m128i sp = _mm_loadu_si128(reinterpret_cast<const __m128i *>(&space[0]));
for (; p != endAligned; p += 16) {
const __m128i s = _mm_load_si128(reinterpret_cast<const __m128i *>(p));
const __m128i t1 = _mm_cmpeq_epi8(s, dq);
const __m128i t2 = _mm_cmpeq_epi8(s, bs);
const __m128i t3 = _mm_cmpeq_epi8(_mm_max_epu8(s, sp), sp); // s < 0x20 <=> max(s, 0x1F) == 0x1F
const __m128i x = _mm_or_si128(_mm_or_si128(t1, t2), t3);
unsigned short r = static_cast<unsigned short>(_mm_movemask_epi8(x));
if (RAPIDJSON_UNLIKELY(r != 0)) { // some of characters is escaped
SizeType len;
#ifdef _MSC_VER // Find the index of first escaped
unsigned long offset;
_BitScanForward(&offset, r);
len = offset;
#else
len = static_cast<SizeType>(__builtin_ffs(r) - 1);
#endif
char* q = reinterpret_cast<char*>(os_->PushUnsafe(len));
for (size_t i = 0; i < len; i++)
q[i] = p[i];
p += len;
break;
}
_mm_storeu_si128(reinterpret_cast<__m128i *>(os_->PushUnsafe(16)), s);
}
is.src_ = p;
return RAPIDJSON_LIKELY(is.Tell() < length);
}
#elif defined(RAPIDJSON_NEON)
template<>
inline bool Writer<StringBuffer>::ScanWriteUnescapedString(StringStream& is, size_t length) {
if (length < 16)
return RAPIDJSON_LIKELY(is.Tell() < length);
if (!RAPIDJSON_LIKELY(is.Tell() < length))
return false;
const char* p = is.src_;
const char* end = is.head_ + length;
const char* nextAligned = reinterpret_cast<const char*>((reinterpret_cast<size_t>(p) + 15) & static_cast<size_t>(~15));
const char* endAligned = reinterpret_cast<const char*>(reinterpret_cast<size_t>(end) & static_cast<size_t>(~15));
if (nextAligned > end)
return true;
while (p != nextAligned)
if (*p < 0x20 || *p == '\"' || *p == '\\') {
is.src_ = p;
return RAPIDJSON_LIKELY(is.Tell() < length);
}
else
os_->PutUnsafe(*p++);
// The rest of string using SIMD
const uint8x16_t s0 = vmovq_n_u8('"');
const uint8x16_t s1 = vmovq_n_u8('\\');
const uint8x16_t s2 = vmovq_n_u8('\b');
const uint8x16_t s3 = vmovq_n_u8(32);
for (; p != endAligned; p += 16) {
const uint8x16_t s = vld1q_u8(reinterpret_cast<const uint8_t *>(p));
uint8x16_t x = vceqq_u8(s, s0);
x = vorrq_u8(x, vceqq_u8(s, s1));
x = vorrq_u8(x, vceqq_u8(s, s2));
x = vorrq_u8(x, vcltq_u8(s, s3));
x = vrev64q_u8(x); // Rev in 64
uint64_t low = vgetq_lane_u64(vreinterpretq_u64_u8(x), 0); // extract
uint64_t high = vgetq_lane_u64(vreinterpretq_u64_u8(x), 1); // extract
SizeType len = 0;
bool escaped = false;
if (low == 0) {
if (high != 0) {
uint32_t lz = internal::clzll(high);
len = 8 + (lz >> 3);
escaped = true;
}
} else {
uint32_t lz = internal::clzll(low);
len = lz >> 3;
escaped = true;
}
if (RAPIDJSON_UNLIKELY(escaped)) { // some of characters is escaped
char* q = reinterpret_cast<char*>(os_->PushUnsafe(len));
for (size_t i = 0; i < len; i++)
q[i] = p[i];
p += len;
break;
}
vst1q_u8(reinterpret_cast<uint8_t *>(os_->PushUnsafe(16)), s);
}
is.src_ = p;
return RAPIDJSON_LIKELY(is.Tell() < length);
}
#endif // RAPIDJSON_NEON
RAPIDJSON_NAMESPACE_END
#if defined(_MSC_VER) || defined(__clang__)
RAPIDJSON_DIAG_POP
#endif
#endif // RAPIDJSON_RAPIDJSON_H_
/**
* `decode.c' - b64
*
* copyright (c) 2014 joseph werle
*
*
* + 'encode.c' - b64
* (RYT: This source file has been joined from encode.c and decode.c)
*/
/*
From https://github.com/littlstar/b64.c/blob/master/LICENSE :
The MIT License (MIT)
Copyright (c) 2014 Little Star Media, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
#include <stdio.h>
#include <stdlib.h>
#include <ctype.h>
#include "b64/b64.h"
#ifdef b64_USE_CUSTOM_MALLOC
extern void* b64_malloc(size_t);
#endif
#ifdef b64_USE_CUSTOM_REALLOC
extern void* b64_realloc(void*, size_t);
#endif
// RYT start
std::string base64_encode(const std::string& bin)
{
char* enc = b64_encode((const unsigned char*) bin.c_str(), bin.length());
std::string tmp(enc);
free(enc);
return tmp;
}
std::string base64_decode(const std::string& b64)
{
size_t len;
unsigned char* chbin = b64_decode_ex(b64.c_str(), b64.length(), &len);
std::string bin((char*) chbin, len);
free(chbin);
return bin;
}
// RYT end
unsigned char *
b64_decode (const char *src, size_t len) {
return b64_decode_ex(src, len, NULL);
}
unsigned char *
b64_decode_ex (const char *src, size_t len, size_t *decsize) {
int i = 0;
int j = 0;
int l = 0;
size_t size = 0;
unsigned char *dec = NULL;
unsigned char buf[3];
unsigned char tmp[4];
// alloc
dec = (unsigned char *) b64_malloc(1);
if (NULL == dec) { return NULL; }
// parse until end of source
while (len--) {
// break if char is `=' or not base64 char
if ('=' == src[j]) { break; }
// RYT start
// (... but silently skip whitespace / line breaks ...)
if ((' ' == src[j]) || ('\t' == src[j]) || ('\n' == src[j]) || ('\r' == src[j])) { j++; continue; }
// RYT end
if (!(isalnum(src[j]) || '+' == src[j] || '/' == src[j])) { break; }
// read up to 4 bytes at a time into `tmp'
tmp[i++] = src[j++];
// if 4 bytes read then decode into `buf'
if (4 == i) {
// translate values in `tmp' from table
for (i = 0; i < 4; ++i) {
// find translation char in `b64_table'
for (l = 0; l < 64; ++l) {
if (tmp[i] == b64_table[l]) {
tmp[i] = l;
break;
}
}
}
// decode
buf[0] = (tmp[0] << 2) + ((tmp[1] & 0x30) >> 4);
buf[1] = ((tmp[1] & 0xf) << 4) + ((tmp[2] & 0x3c) >> 2);
buf[2] = ((tmp[2] & 0x3) << 6) + tmp[3];
// write decoded buffer to `dec'
dec = (unsigned char *) b64_realloc(dec, size + 3);
if (dec != NULL){
for (i = 0; i < 3; ++i) {
dec[size++] = buf[i];
}
} else {
return NULL;
}
// reset
i = 0;
}
}
// remainder
if (i > 0) {
// fill `tmp' with `\0' at most 4 times
for (j = i; j < 4; ++j) {
tmp[j] = '\0';
}
// translate remainder
for (j = 0; j < 4; ++j) {
// find translation char in `b64_table'
for (l = 0; l < 64; ++l) {
if (tmp[j] == b64_table[l]) {
tmp[j] = l;
break;
}
}
}
// decode remainder
buf[0] = (tmp[0] << 2) + ((tmp[1] & 0x30) >> 4);
buf[1] = ((tmp[1] & 0xf) << 4) + ((tmp[2] & 0x3c) >> 2);
buf[2] = ((tmp[2] & 0x3) << 6) + tmp[3];
// write remainer decoded buffer to `dec'
dec = (unsigned char *) b64_realloc(dec, size + (i - 1));
if (dec != NULL){
for (j = 0; (j < i - 1); ++j) {
dec[size++] = buf[j];
}
} else {
return NULL;
}
}
// Make sure we have enough space to add '\0' character at end.
dec = (unsigned char *) b64_realloc(dec, size + 1);
if (dec != NULL){
dec[size] = '\0';
} else {
return NULL;
}
// Return back the size of decoded string if demanded.
if (decsize != NULL) {
*decsize = size;
}
return dec;
}
char *
b64_encode (const unsigned char *src, size_t len) {
int i = 0;
int j = 0;
char *enc = NULL;
size_t size = 0;
unsigned char buf[4];
unsigned char tmp[3];
// alloc
enc = (char *) b64_malloc(1);
if (NULL == enc) { return NULL; }
// parse until end of source
while (len--) {
// read up to 3 bytes at a time into `tmp'
tmp[i++] = *(src++);
// if 3 bytes read then encode into `buf'
if (3 == i) {
buf[0] = (tmp[0] & 0xfc) >> 2;
buf[1] = ((tmp[0] & 0x03) << 4) + ((tmp[1] & 0xf0) >> 4);
buf[2] = ((tmp[1] & 0x0f) << 2) + ((tmp[2] & 0xc0) >> 6);
buf[3] = tmp[2] & 0x3f;
// allocate 4 new byts for `enc` and
// then translate each encoded buffer
// part by index from the base 64 index table
// into `enc' unsigned char array
enc = (char *) b64_realloc(enc, size + 4);
for (i = 0; i < 4; ++i) {
enc[size++] = b64_table[buf[i]];
}
// reset index
i = 0;
}
}
// remainder
if (i > 0) {
// fill `tmp' with `\0' at most 3 times
for (j = i; j < 3; ++j) {
tmp[j] = '\0';
}
// perform same codec as above
buf[0] = (tmp[0] & 0xfc) >> 2;
buf[1] = ((tmp[0] & 0x03) << 4) + ((tmp[1] & 0xf0) >> 4);
buf[2] = ((tmp[1] & 0x0f) << 2) + ((tmp[2] & 0xc0) >> 6);
buf[3] = tmp[2] & 0x3f;
// perform same write to `enc` with new allocation
for (j = 0; (j < i + 1); ++j) {
enc = (char *) b64_realloc(enc, size + 1);
enc[size++] = b64_table[buf[j]];
}
// while there is still a remainder
// append `=' to `enc'
while ((i++ < 3)) {
enc = (char *) b64_realloc(enc, size + 1);
enc[size++] = '=';
}
}
// Make sure we have enough space to add '\0' character at end.
enc = (char *) b64_realloc(enc, size + 1);
enc[size] = '\0';
return enc;
}
/*
* This file is part of IPAACA, the
* "Incremental Processing Architecture
* for Artificial Conversational Agents".
*
* Copyright (c) 2009-2022 Social Cognitive Systems Group
* (formerly the Sociable Agents Group)
* CITEC, Bielefeld University
*
* http://opensource.cit-ec.de/projects/ipaaca/
* http://purl.org/net/ipaaca
*
* This file may be licensed under the terms of of the
* GNU Lesser General Public License Version 3 (the ``LGPL''),
* or (at your option) any later version.
*
* Software distributed under the License is distributed
* on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
* express or implied. See the LGPL for the specific language
* governing rights and limitations.
*
* You should have received a copy of the LGPL along with this
* program. If not, go to http://www.gnu.org/licenses/lgpl.html
* or write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* The development of this software was supported by the
* Excellence Cluster EXC 277 Cognitive Interaction Technology.
* The Excellence Cluster EXC 277 is a grant of the Deutsche
* Forschungsgemeinschaft (DFG) in the context of the German
* Excellence Initiative.
*/
/**
* \file ipaaca-backend-mqtt.cc
*
* \brief Source file for the MQTT backend
*
* \author Ramin Yaghoubzadeh Torky (ryaghoubzadeh@uni-bielefeld.de)
* \date January, 2019
*/
#include <ipaaca/ipaaca.h>
#include <thread>
#include <chrono>
namespace ipaaca {
#include <ipaaca/ipaaca-backend-mqtt.h>
namespace backend {
namespace mqtt {
// The following is a required static library initialization hook.
// This way, the backend gets registered into the global store just by getting selectively
// compiled in (i.e. without insertion into the general code or plugin loading).
// The back-end name is taken from the one provided in the BackEnd constructor.
IPAACA_EXPORT static bool __initialize_mqtt_backend = BackEndLibrary::get()->register_backend(MQTTBackEnd::get());
// MQTTBackEnd{{{
// The backend class is the interface to the rest of the IPAACA library,
// which does not know any of the implementation details here.
// It is available via its (unique) given name (here "mqtt", just below)
IPAACA_EXPORT MQTTBackEnd::MQTTBackEnd()
: BackEnd("mqtt") {
}
IPAACA_EXPORT BackEnd::ptr MQTTBackEnd::get() {
static ptr backend_singleton;
if (!backend_singleton) {
::mosqpp::lib_init();
backend_singleton = std::shared_ptr<MQTTBackEnd>(new MQTTBackEnd());
}
return backend_singleton;
}
IPAACA_EXPORT Informer::ptr MQTTBackEnd::createInformer(const std::string& scope)
{
auto res = std::make_shared<MQTTInformer>(generate_client_id(), scope, get_global_config());
res->wait_live();
// TODO wait for it to come live?
return res;
}
IPAACA_EXPORT Listener::ptr MQTTBackEnd::createListener(const std::string& scope, InputBuffer* buf)
{
auto res = std::make_shared<MQTTListener>(generate_client_id(), scope, buf, get_global_config());
res->wait_live();
// TODO wait for it to come live?
return res;
}
IPAACA_EXPORT LocalServer::ptr MQTTBackEnd::createLocalServer(const std::string& scope, OutputBuffer* buf)
{
auto res = std::make_shared<MQTTLocalServer>(generate_client_id(), scope, buf, get_global_config());
res->wait_live();
// TODO wait for it to come live?
return res;
}
IPAACA_EXPORT RemoteServer::ptr MQTTBackEnd::createRemoteServer(const std::string& scope)
{
auto res = std::make_shared<MQTTRemoteServer>(generate_client_id(), scope, get_global_config());
res->wait_live();
//res->wait_live();
// TODO wait for it to come live?
return res;
}
IPAACA_EXPORT void MQTTBackEnd::teardown()
{
IPAACA_DEBUG("MQTTBackEnd sleeping for 1 sec before teardown, for messages in transit.")
std::this_thread::sleep_for(std::chrono::seconds(1));
}
//}}}
//
// Internal implementation follows
//
// ParticipantCore{{{
IPAACA_EXPORT ParticipantCore::ParticipantCore()
: _running(false), _live(false)
{
}
IPAACA_EXPORT void ParticipantCore::signal_live() {
IPAACA_DEBUG("Notifying to wake up an async MQTT session (now live)")
_live = true;
_condvar.notify_one();
}
IPAACA_EXPORT bool ParticipantCore::wait_live(long timeout_milliseconds) {
IPAACA_DEBUG("Waiting for an MQTT session to come live")
std::unique_lock<std::mutex> lock(_condvar_mutex);
// mqtt handlers will notify this after connect or subscribe (depending on the subclass)
auto success = _condvar.wait_for(lock, std::chrono::milliseconds(timeout_milliseconds), [this]{return this->_live;});
if (!success) {
IPAACA_ERROR("Backend timeout: failed to go live")
return false; // TODO throw here or in construction wrapper (below)
}
return true;
}
//}}}
// MQTTParticipant{{{
IPAACA_EXPORT int MQTTParticipant::get_next_mid() {
static int _curmid = 0;
_curmid++;
return _curmid;
}
IPAACA_EXPORT MQTTParticipant::MQTTParticipant(const std::string& client_id, const std::string& scope, Config::ptr config)
: ParticipantCore(), ::mosqpp::mosquittopp(client_id.c_str(), true), _scope(scope)
{
//threaded_set(true);
_client_id = client_id;
// get connection parameters from config
if (config) {
host = config->get_with_default_and_warning<std::string>("transport.mqtt.host", "localhost");
std::cout << "HOST from config: " << host << std::endl;
port = config->get_with_default_and_warning<int>("transport.mqtt.port", 1883);
keepalive = config->get_with_default<int>("transport.mqtt.keepalive", 60);
} else {
host = "localhost";
port = 1883;
keepalive = 60;
IPAACA_ERROR("No Config provided in MQTT backend, using defaults: host=localhost port=1883 keepalive=60")
}
IPAACA_DEBUG("Created MQTTParticipant on " << host << ":" << port << " for scope " << _scope << " with prepared client id " << _client_id)
}
IPAACA_EXPORT void MQTTParticipant::connect_and_background()
{
static const char* mosquitto_reasons[] = {
//"MOSQ_ERR_CONN_PENDING" is -1
"MOSQ_ERR_SUCCESS",
"MOSQ_ERR_NOMEM",
"MOSQ_ERR_PROTOCOL",
"MOSQ_ERR_INVAL",
"MOSQ_ERR_NO_CONN",
"MOSQ_ERR_CONN_REFUSED",
"MOSQ_ERR_NOT_FOUND",
"MOSQ_ERR_CONN_LOST",
"MOSQ_ERR_TLS",
"MOSQ_ERR_PAYLOAD_SIZE",
"MOSQ_ERR_NOT_SUPPORTED",
"MOSQ_ERR_AUTH",
"MOSQ_ERR_ACL_DENIED",
"MOSQ_ERR_UNKNOWN",
"MOSQ_ERR_ERRNO",
"MOSQ_ERR_EAI",
"MOSQ_ERR_PROXY"
};
int res = connect(host.c_str(), port, keepalive);
loop_start();
if (res!=0) {
const char* reason = "unknown";
if (res==-1) {
reason = "MOSQ_ERR_CONN_PENDING";
} else if ((res>0) && (res<17)) {
reason = mosquitto_reasons[res];
}
IPAACA_ERROR("MQTT connect (for scope " << _scope << ") returned an error " << res << " (mosquitto's " << reason << ") - please double-check parameters")
if (res==14) {
IPAACA_ERROR("The underlying system error: errno " << errno << " (" << strerror(errno) << ")")
}
throw BackEndConnectionFailedError();
} else {
IPAACA_DEBUG("connect OK for scope " << _scope)
}
}
IPAACA_EXPORT void MQTTParticipant::on_error()
{
IPAACA_ERROR("MQTT error")
}
IPAACA_EXPORT void MQTTParticipant::on_disconnect(int rc)
{
IPAACA_ERROR("MQTT disconnect of " << _scope << " with rc " << rc)
}
//}}}
// MQTTInformer {{{
IPAACA_EXPORT MQTTInformer::MQTTInformer(const std::string& client_id, const std::string& scope, Config::ptr config)
: MQTTParticipant(client_id, scope, config)
{
IPAACA_DEBUG("Create MQTTInformer for scope " << ((std::string) scope))
connect_and_background();
}
IPAACA_EXPORT void MQTTInformer::on_connect(int rc)
{
signal_live();
}
IPAACA_EXPORT bool MQTTInformer::internal_publish(const std::string& wire)
{
IPAACA_DEBUG("Trying to publish via MQTT, topic " << _scope)
//int mid = MQTTParticipant::get_next_mid();
int result = mosquittopp::publish(NULL, _scope.c_str(), wire.size(), wire.c_str(), 2, false);
return (result==0);
}
//}}}
// MQTTListener {{{
IPAACA_EXPORT MQTTListener::MQTTListener(const std::string& client_id, const std::string& scope, InputBuffer* buffer_ptr, Config::ptr config)
: MQTTParticipant(client_id, scope, config), Listener(buffer_ptr)
{
IPAACA_DEBUG("Create MQTTListener for scope " << ((std::string) scope))
connect_and_background();
}
IPAACA_EXPORT void MQTTListener::on_connect(int rc)
{
int res = subscribe(NULL, _scope.c_str(), 2);
if (res!=0) {
IPAACA_ERROR("subscribe (on topic " << _scope << ") returned an error " << res)
} else {
IPAACA_DEBUG("subscribe returned OK for topic " << _scope)
}
}
IPAACA_EXPORT void MQTTListener::on_subscribe(int mid, int qos_count, const int * granted_qos)
{
//IPAACA_DEBUG("on_subscribe: " << mid << " " << qos_count << " for scope " << _scope)
if (qos_count < 1) {
IPAACA_WARNING("No QoS grants reported")
} else if (qos_count > 1) {
IPAACA_WARNING("More than one QoS grant reported for Listener, should not happen")
} else {
int qos = granted_qos[0];
if (qos!=2) {
IPAACA_WARNING("MQTT QoS level 2 (guaranteed delivery) has NOT been granted on " << _scope << " (we got level " << qos << ")")
}
}
signal_live();
}
IPAACA_EXPORT void MQTTListener::on_message(const struct mosquitto_message * message) {
// internal_deserialize expects a string, which we construct here from the received char* and len
auto event = ipaaca::converters::internal_deserialize(std::string((const char*) message->payload, message->payloadlen));
//std::cout << "GOT AN EVENT of type " << event->getType() << std::endl;
// let the Listener base class handle the propagation into a Buffer:
Listener::relay_received_event_to_buffer_threaded(event);
}
//}}}
// MQTTLocalServer {{{
IPAACA_EXPORT MQTTLocalServer::MQTTLocalServer(const std::string& client_id, const std::string& scope, ipaaca::OutputBuffer* buffer_ptr, Config::ptr config)
: MQTTParticipant(client_id, scope, config), LocalServer(buffer_ptr)
{
IPAACA_DEBUG("Create MQTTLocalServer for scope " << ((std::string) scope));
connect_and_background();
}
IPAACA_EXPORT void MQTTLocalServer::on_connect(int rc)
{
//IPAACA_DEBUG("LocalServer::on_connect: " << rc)
int res = subscribe(NULL, _scope.c_str(), 2);
if (res!=0) {
IPAACA_ERROR("subscribe (on topic " << _scope << ") returned an error " << res)
} else {
IPAACA_DEBUG("subscribe returned OK for topic " << _scope)
}
}
IPAACA_EXPORT void MQTTLocalServer::on_subscribe(int mid, int qos_count, const int * granted_qos)
{
//IPAACA_DEBUG("LocalServer::on_subscribe: " << mid << " " << qos_count << " for scope " << _scope)
if (qos_count < 1) {
IPAACA_WARNING("No QoS grants reported")
} else if (qos_count > 1) {
IPAACA_WARNING("More than one QoS grant reported for Listener, should not happen")
} else {
int qos = granted_qos[0];
if (qos!=2) {
IPAACA_WARNING("MQTT QoS level 2 (guaranteed delivery) has NOT been granted on " << _scope << " (we got level " << qos << ")")
}
}
signal_live();
}
IPAACA_EXPORT void MQTTLocalServer::send_result_for_request(const std::string& request_endpoint, const std::string& request_uid, int64_t result)
{
std::string wire;
std::shared_ptr<protobuf::RemoteRequestResult> pbo(new protobuf::RemoteRequestResult());
pbo->set_request_uid(request_uid);
pbo->set_result(result);
wire = ipaaca::converters::internal_serialize(pbo);
IPAACA_DEBUG("Trying to send result to RemoteServer " << request_endpoint)
int send_res = mosquittopp::publish(NULL, request_endpoint.c_str(), wire.size(), wire.c_str(), 2, false);
}
IPAACA_EXPORT void MQTTLocalServer::on_message(const struct mosquitto_message * message)
{
auto event = ipaaca::converters::internal_deserialize(std::string((const char*) message->payload, message->payloadlen));
auto type = event->getType();
IPAACA_DEBUG("LocalServer " << _scope << " got an object of type " << type)
int64_t result = 0;
std::string request_endpoint("");
std::string request_uid("");
if (type == "ipaaca::IUPayloadUpdate") {
auto obj = std::static_pointer_cast<ipaaca::IUPayloadUpdate>(event->getData());
request_uid = obj->request_uid;
request_endpoint = obj->request_endpoint;
result = LocalServer::attempt_to_apply_remote_payload_update(obj);
} else if (type == "ipaaca::IULinkUpdate") {
auto obj = std::static_pointer_cast<ipaaca::IULinkUpdate>(event->getData());
request_uid = obj->request_uid;
result = LocalServer::attempt_to_apply_remote_link_update(obj);
} else if (type == "ipaaca::protobuf::IUCommission") {
auto obj = std::static_pointer_cast<ipaaca::protobuf::IUCommission>(event->getData());
request_uid = obj->request_uid();
result = LocalServer::attempt_to_apply_remote_commission(obj);
} else if (type == "ipaaca::protobuf::IUResendRequest") {
auto obj = std::static_pointer_cast<ipaaca::protobuf::IUResendRequest>(event->getData());
request_uid = obj->request_uid();
result = LocalServer::attempt_to_apply_remote_resend_request(obj);
} else {
IPAACA_ERROR("MQTTLocalServer: unhandled request wire type " << type)
}
if (request_uid.length()) {
send_result_for_request(request_endpoint, request_uid, result);
} else {
IPAACA_ERROR("MQTTLocalServer: cannot reply since request_uid is unknown")
}
}
//}}}
// MQTTRemoteServer{{{
// RemoteServer (= the side that sends requests to the owner of a remote IU)
IPAACA_EXPORT MQTTRemoteServer::MQTTRemoteServer(const std::string& client_id, const std::string& scope, Config::ptr config)
: MQTTParticipant(client_id, scope, config) {
_remote_end_scope = _scope;
auto uuid = ipaaca::generate_uuid_string().substr(0,8);
_name = "LocalServer_" + uuid; // TODO add e.g. pid as in Python
_scope = "/ipaaca/remotes/" + _name; // overwrites constructed MQTTParticipant::_scope here (!)
IPAACA_DEBUG("Create MQTTRemoteServer for remote scope " << ((std::string) _remote_end_scope) << " and reply listener on " << _scope)
connect_and_background();
}
IPAACA_EXPORT int64_t MQTTRemoteServer::request_remote_payload_update(std::shared_ptr<IUPayloadUpdate> update)
{
return blocking_call(std::make_shared<Event>("ipaaca::IUPayloadUpdate", update));
}
IPAACA_EXPORT int64_t MQTTRemoteServer::request_remote_link_update(std::shared_ptr<IULinkUpdate> update)
{
return blocking_call(std::make_shared<Event>("ipaaca::IULinkUpdate", update));
}
IPAACA_EXPORT int64_t MQTTRemoteServer::request_remote_commission(std::shared_ptr<protobuf::IUCommission> update)
{
return blocking_call(std::make_shared<Event>("ipaaca::protobuf::IUCommission", update));
}
IPAACA_EXPORT int64_t MQTTRemoteServer::request_remote_resend_request(std::shared_ptr<protobuf::IUResendRequest> update)
{
return blocking_call(std::make_shared<Event>("ipaaca::protobuf::IUResendRequest", update));
}
IPAACA_EXPORT void MQTTRemoteServer::on_connect(int rc)
{
int res = subscribe(NULL, _scope.c_str(), 2);
if (res!=0) {
IPAACA_ERROR("subscribe (on topic " << _scope << ") returned an error " << res)
} else {
IPAACA_DEBUG("subscribe returned OK for topic " << _scope)
}
}
IPAACA_EXPORT void MQTTRemoteServer::on_subscribe(int mid, int qos_count, const int * granted_qos)
{
if (qos_count < 1) {
IPAACA_WARNING("No QoS grants reported")
} else if (qos_count > 1) {
IPAACA_WARNING("More than one QoS grant reported for Listener, should not happen")
} else {
int qos = granted_qos[0];
if (qos!=2) {
IPAACA_WARNING("MQTT QoS level 2 (guaranteed delivery) has NOT been granted on " << _scope << " (we got level " << qos << ")")
}
}
signal_live();
}
IPAACA_EXPORT void MQTTRemoteServer::on_message(const struct mosquitto_message * message)
{
auto event = ipaaca::converters::internal_deserialize(std::string((const char*) message->payload, message->payloadlen));
auto type = event->getType();
IPAACA_DEBUG("RemoteServer " << _scope << " for remote " << _remote_end_scope << " got an object of type " << type)
if (type == "ipaaca::protobuf::RemoteRequestResult") {
auto reply = std::static_pointer_cast<ipaaca::protobuf::RemoteRequestResult>(event->getData());
auto uid = reply->request_uid();
auto result = reply->result();
PendingRequest::ptr pending_request;
{
ipaaca::Locker locker(_pending_requests_lock);
auto it = _pending_requests.find(uid);
if (it != _pending_requests.end()) {
pending_request = it->second;
_pending_requests.erase(it);
}
}
if (pending_request) {
pending_request->reply_with_result(result);
} else {
IPAACA_ERROR("MQTTRemoteServer: got a reply for a request that is not pending: " << uid)
}
} else {
IPAACA_ERROR("MQTTRemoteServer: unhandled request wire type " << type)
}
}
IPAACA_EXPORT PendingRequest::ptr MQTTRemoteServer::queue_pending_request(Event::ptr request)
{
PendingRequest::ptr pending_request = std::make_shared<PendingRequest>(request);
{
ipaaca::Locker locker(_pending_requests_lock);
if ((_MQTT_REMOTE_SERVER_MAX_QUEUED_REQUESTS > 0) && (_pending_requests.size() >= _MQTT_REMOTE_SERVER_MAX_QUEUED_REQUESTS)) {
IPAACA_ERROR("MQTTRemoteServer: maximum number of pending requests exceeded")
throw BackEndBadConditionError();
} else {
_pending_requests[pending_request->_request_uid] = pending_request;
return pending_request;
}
}
}
IPAACA_EXPORT int64_t MQTTRemoteServer::blocking_call(Event::ptr request)
{
std::string wire;
auto pending_request = queue_pending_request(request);
if (request->getType() == "ipaaca::IUPayloadUpdate") {
auto obj = std::static_pointer_cast<ipaaca::IUPayloadUpdate>(request->getData());
obj->request_uid = pending_request->_request_uid;
obj->request_endpoint = _scope;
wire = ipaaca::converters::internal_serialize(obj);
} else if (request->getType() == "ipaaca::IULinkUpdate") {
auto obj = std::static_pointer_cast<ipaaca::IULinkUpdate>(request->getData());
obj->request_uid = pending_request->_request_uid;
obj->request_endpoint = _scope;
wire = ipaaca::converters::internal_serialize(obj);
} else if (request->getType() == "ipaaca::protobuf::IUCommission") {
auto obj = std::static_pointer_cast<ipaaca::protobuf::IUCommission>(request->getData());
obj->set_request_uid(pending_request->_request_uid);
obj->set_request_endpoint(_scope);
wire = ipaaca::converters::internal_serialize(obj);
} else if (request->getType() == "ipaaca::protobuf::IUResendRequest") {
auto obj = std::static_pointer_cast<ipaaca::protobuf::IUResendRequest>(request->getData());
obj->set_request_uid(pending_request->_request_uid);
obj->set_request_endpoint(_scope);
wire = ipaaca::converters::internal_serialize(obj);
} else {
IPAACA_ERROR("Unhandled request type " << request->getType())
throw BackEndBadConditionError();
}
IPAACA_DEBUG("Trying to send request to remote LocalServer at " << _remote_end_scope)
int send_res = mosquittopp::publish(NULL, _remote_end_scope.c_str(), wire.size(), wire.c_str(), 2, false);
IPAACA_DEBUG("Waiting for the remote server")
auto result = pending_request->wait_for_reply();
IPAACA_DEBUG("RPC wait ended.")
if (result<0) {
IPAACA_WARNING("A RemoteServer request timed out, remote end was " << _remote_end_scope)
return 0;
} else {
return result;
}
}
//}}}
} // of namespace mqtt
} // of namespace backend
} // of namespace ipaaca
/*
* This file is part of IPAACA, the
* "Incremental Processing Architecture
* for Artificial Conversational Agents".
*
* Copyright (c) 2009-2022 Social Cognitive Systems Group
* (formerly the Sociable Agents Group)
* CITEC, Bielefeld University
*
* http://opensource.cit-ec.de/projects/ipaaca/
* http://purl.org/net/ipaaca
*
* This file may be licensed under the terms of of the
* GNU Lesser General Public License Version 3 (the ``LGPL''),
* or (at your option) any later version.
*
* Software distributed under the License is distributed
* on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
* express or implied. See the LGPL for the specific language
* governing rights and limitations.
*
* You should have received a copy of the LGPL along with this
* program. If not, go to http://www.gnu.org/licenses/lgpl.html
* or write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* The development of this software was supported by the
* Excellence Cluster EXC 277 Cognitive Interaction Technology.
* The Excellence Cluster EXC 277 is a grant of the Deutsche
* Forschungsgemeinschaft (DFG) in the context of the German
* Excellence Initiative.
*/
/**
* \file ipaaca-backend-ros.cc
*
* \brief Source file for the ROS backend
*
* \author Ramin Yaghoubzadeh Torky (ryaghoubzadeh@uni-bielefeld.de)
* \date January, 2019
*/
#include <ipaaca/ipaaca.h>
#include "b64/b64.h"
namespace ipaaca {
#include <ipaaca/ipaaca-backend-ros.h>
namespace backend {
namespace ros {
// The following is a required static library initialization hook.
// This way, the backend gets registered into the global store just by getting selectively
// compiled in (i.e. without insertion into the general code or plugin loading).
// The back-end name is taken from the one provided in the BackEnd constructor.
IPAACA_EXPORT static bool __initialize_ros_backend = BackEndLibrary::get()->register_backend(ROSBackEnd::get());
// ROSBackEnd{{{
// The backend class is the interface to the rest of the IPAACA library,
// which does not know any of the implementation details here.
// It is available via its (unique) given name (here "ros", just below)
IPAACA_EXPORT ROSBackEnd::ROSBackEnd()
: BackEnd("ros"), _need_init(true), _node_handle(NULL)
{
}
void sigint_ros_shutdown(int signal)
{
std::cout << "SIGINT" << std::endl;
::ros::shutdown();
exit(0);
}
IPAACA_EXPORT void ROSBackEnd::init_once()
{
if (_need_init) {
IPAACA_INFO("Initializing ROS back-end ...")
std::string client_id = generate_client_id();
const char* fakename = client_id.c_str(); //"ipaaca_cpp_bin";
_cfakename = (char*) malloc(32);
strncpy(_cfakename, fakename, 31);
char* fake_argv[] = { _cfakename, NULL };
int fake_argc = 1;
int num_spinner_threads = 4;
_need_init = false;
IPAACA_INFO("Initializing ROS node ...")
::ros::init(fake_argc, fake_argv, fakename, ::ros::init_options::AnonymousName); // | ::ros::init_options::NoRosout
//
IPAACA_INFO("Starting ROS node ...")
_node_handle = new ::ros::NodeHandle(); // internally calls ::ros::start()
//
signal(SIGINT, sigint_ros_shutdown);
//
IPAACA_INFO("Starting ROS spinner thread ...")
_spinner = new ::ros::AsyncSpinner(num_spinner_threads);
_spinner->start();
}
}
IPAACA_EXPORT void ROSBackEnd::teardown()
{
IPAACA_INFO("Stopping ROS spinner thread ...")
_spinner->stop();
delete _spinner;
_spinner = NULL;
IPAACA_INFO("Shutting down ROS node ...")
::ros::shutdown();
delete _node_handle;
_node_handle = NULL;
free(_cfakename);
}
IPAACA_EXPORT BackEnd::ptr ROSBackEnd::get() {
static ptr backend_singleton;
if (!backend_singleton) {
backend_singleton = std::shared_ptr<ROSBackEnd>(new ROSBackEnd());
}
return backend_singleton;
}
IPAACA_EXPORT Informer::ptr ROSBackEnd::createInformer(const std::string& scope)
{
init_once();
auto res = std::make_shared<ROSInformer>(_node_handle, scope, get_global_config());
return res;
}
IPAACA_EXPORT Listener::ptr ROSBackEnd::createListener(const std::string& scope, InputBuffer* buf)
{
init_once();
auto res = std::make_shared<ROSListener>(_node_handle, scope, buf, get_global_config());
return res;
}
IPAACA_EXPORT LocalServer::ptr ROSBackEnd::createLocalServer(const std::string& scope, OutputBuffer* buf)
{
init_once();
auto res = std::make_shared<ROSLocalServer>(_node_handle, scope, buf, get_global_config());
return res;
}
IPAACA_EXPORT RemoteServer::ptr ROSBackEnd::createRemoteServer(const std::string& scope)
{
init_once();
auto res = std::make_shared<ROSRemoteServer>(_node_handle, scope, get_global_config());
return res;
}
//}}}
//
// Internal implementation follows
//
// ParticipantCore{{{
IPAACA_EXPORT ParticipantCore::ParticipantCore()
: _running(false), _live(false)
{
}
IPAACA_EXPORT void ParticipantCore::signal_live() {
IPAACA_DEBUG("Notifying to wake up an async ROS session (now live)")
_live = true;
_condvar.notify_one();
}
IPAACA_EXPORT bool ParticipantCore::wait_live(long timeout_milliseconds) {
IPAACA_DEBUG("Waiting for an ROS session to come live")
std::unique_lock<std::mutex> lock(_condvar_mutex);
// ros handlers will notify this after connect or subscribe (depending on the subclass)
auto success = _condvar.wait_for(lock, std::chrono::milliseconds(timeout_milliseconds), [this]{return this->_live;});
if (!success) {
IPAACA_ERROR("Backend timeout: failed to go live")
return false; // TODO throw here or in construction wrapper (below)
}
return true;
}
//}}}
// ROSParticipant{{{
IPAACA_EXPORT ROSParticipant::ROSParticipant(::ros::NodeHandle* node, const std::string& scope, Config::ptr config)
: ParticipantCore(), _node_handle(node), _scope(scope)
{
//_client_id = client_id;
// get connection parameters from config
if (config) {
host = config->get_with_default_and_warning<std::string>("transport.ros.host", "localhost");
port = config->get_with_default_and_warning<int>("transport.ros.port", 1883);
keepalive = config->get_with_default<int>("transport.ros.keepalive", 60);
} else {
host = "localhost";
port = 1883;
keepalive = 60;
IPAACA_ERROR("No Config provided in ROS backend, using defaults: host=localhost port=1883 keepalive=60")
}
//IPAACA_DEBUG("Created ROSParticipant on " << host << ":" << port << " for scope " << _scope) // << " with prepared client id " << _client_id)
//IPAACA_DEBUG("Created ROSParticipant for scope " << _scope) // << " with prepared client id " << _client_id)
}
//}}}
// ROSInformer {{{
IPAACA_EXPORT ROSInformer::ROSInformer(::ros::NodeHandle* node, const std::string& scope, Config::ptr config)
: ROSParticipant(node, scope, config)
{
IPAACA_DEBUG("Create ROS Publisher for scope " << ((std::string) scope))
_ros_pub = node->advertise<std_msgs::String>(scope, 100, true); // latch == true
//connect_and_background();
}
IPAACA_EXPORT bool ROSInformer::internal_publish(const std::string& wire)
{
IPAACA_DEBUG("Trying to publish via ROS, topic " << _scope)
//int mid = ROSParticipant::get_next_mid();
std_msgs::String msg;
msg.data = base64_encode(wire);
// // This would be a way to ensure that an event has at least
// // one actual recipient (e.g. if you know there must be a receiver
// // by convention and wait to avoid lost events due to connection delay)
//while (_ros_pub.getNumSubscribers() == 0) {
// std::this_thread().sleep_for(std::chrono::milliseconds(10));
//}
_ros_pub.publish(msg);
IPAACA_DEBUG("... returned from publish.")
return true;
}
//}}}
// ROSListener {{{
IPAACA_EXPORT ROSListener::ROSListener(::ros::NodeHandle* node, const std::string& scope, InputBuffer* buffer_ptr, Config::ptr config)
: ROSParticipant(node, scope, config), Listener(buffer_ptr)
{
IPAACA_DEBUG("Create ROSListener for scope " << ((std::string) scope))
_ros_sub = node->subscribe(scope, 1000, &ROSListener::on_message, this, ::ros::TransportHints().tcpNoDelay().reliable().unreliable());
}
IPAACA_EXPORT void ROSListener::on_message(const std_msgs::String::ConstPtr& msg) {
// internal_deserialize expects a string, which we construct here from the received char* and len
auto message = base64_decode(msg->data);
auto event = ipaaca::converters::internal_deserialize(message);
std::cout << "GOT AN EVENT of type " << event->getType() << std::endl;
// let the Listener base class handle the propagation into a Buffer:
Listener::relay_received_event_to_buffer_threaded(event);
}
//}}}
// ROSLocalServer {{{
IPAACA_EXPORT ROSLocalServer::ROSLocalServer(::ros::NodeHandle* node, const std::string& scope, ipaaca::OutputBuffer* buffer_ptr, Config::ptr config)
: ROSParticipant(node, scope, config), LocalServer(buffer_ptr)
{
IPAACA_DEBUG("Create ROSLocalServer for scope " << ((std::string) scope));
_ros_sub = node->subscribe(scope, 1000, &ROSLocalServer::on_message, this, ::ros::TransportHints().tcpNoDelay().reliable().unreliable());
}
// int res = subscribe(NULL, _scope.c_str(), 2);
IPAACA_EXPORT ::ros::Publisher ROSLocalServer::get_publisher(const std::string& endpoint)
{
if (_ros_pubs.count(endpoint)) return _ros_pubs[endpoint];
_ros_pubs[endpoint] = _node_handle->advertise<std_msgs::String>(endpoint, 100, true); // latch == true
return _ros_pubs[endpoint];
}
IPAACA_EXPORT void ROSLocalServer::send_result_for_request(const std::string& request_endpoint, const std::string& request_uid, int64_t result)
{
std::string wire;
std::shared_ptr<protobuf::RemoteRequestResult> pbo(new protobuf::RemoteRequestResult());
pbo->set_request_uid(request_uid);
pbo->set_result(result);
wire = ipaaca::converters::internal_serialize(pbo);
IPAACA_DEBUG("Trying to send result to RemoteServer " << request_endpoint)
std_msgs::String msg;
msg.data = base64_encode(wire);
auto pub = get_publisher(request_endpoint);
// // if latching should be insufficient for reliable RPC, activate this:
//while (pub.getNumSubscribers() == 0) {
// std::this_thread().sleep_for(std::chrono::milliseconds(10));
//}
pub.publish(msg);
}
IPAACA_EXPORT void ROSLocalServer::on_message(const std_msgs::String::ConstPtr& msg)
{
auto message = base64_decode(msg->data);
auto event = ipaaca::converters::internal_deserialize(message);
auto type = event->getType();
IPAACA_DEBUG("LocalServer " << _scope << " got an object of type " << type)
int64_t result = 0;
std::string request_endpoint("");
std::string request_uid("");
if (type == "ipaaca::IUPayloadUpdate") {
auto obj = std::static_pointer_cast<ipaaca::IUPayloadUpdate>(event->getData());
request_uid = obj->request_uid;
request_endpoint = obj->request_endpoint;
result = LocalServer::attempt_to_apply_remote_payload_update(obj);
} else if (type == "ipaaca::IULinkUpdate") {
auto obj = std::static_pointer_cast<ipaaca::IULinkUpdate>(event->getData());
request_uid = obj->request_uid;
result = LocalServer::attempt_to_apply_remote_link_update(obj);
} else if (type == "ipaaca::protobuf::IUCommission") {
auto obj = std::static_pointer_cast<ipaaca::protobuf::IUCommission>(event->getData());
request_uid = obj->request_uid();
result = LocalServer::attempt_to_apply_remote_commission(obj);
} else if (type == "ipaaca::protobuf::IUResendRequest") {
auto obj = std::static_pointer_cast<ipaaca::protobuf::IUResendRequest>(event->getData());
request_uid = obj->request_uid();
result = LocalServer::attempt_to_apply_remote_resend_request(obj);
} else {
IPAACA_ERROR("ROSLocalServer: unhandled request wire type " << type)
}
if (request_uid.length()) {
send_result_for_request(request_endpoint, request_uid, result);
} else {
IPAACA_ERROR("ROSLocalServer: cannot reply since request_uid is unknown")
}
}
//}}}
// ROSRemoteServer{{{
// RemoteServer (= the side that sends requests to the owner of a remote IU)
IPAACA_EXPORT ROSRemoteServer::ROSRemoteServer(::ros::NodeHandle* node, const std::string& scope, Config::ptr config)
: ROSParticipant(node, scope, config) {
_remote_end_scope = _scope;
auto uuid = ipaaca::generate_uuid_string().substr(0,8);
_name = "LocalServer_" + uuid; // TODO add e.g. pid as in Python
_scope = "/ipaaca/remotes/" + _name; // overwrites constructed ROSParticipant::_scope here (!)
IPAACA_DEBUG("Create ROSRemoteServer for remote scope " << ((std::string) _remote_end_scope) << " and reply listener on " << _scope)
_ros_sub = node->subscribe(_scope, 1000, &ROSRemoteServer::on_message, this, ::ros::TransportHints().tcpNoDelay().reliable().unreliable());
_ros_pub = node->advertise<std_msgs::String>(_remote_end_scope, 100, true); // latch == true
}
IPAACA_EXPORT int64_t ROSRemoteServer::request_remote_payload_update(std::shared_ptr<IUPayloadUpdate> update)
{
return blocking_call(std::make_shared<Event>("ipaaca::IUPayloadUpdate", update));
}
IPAACA_EXPORT int64_t ROSRemoteServer::request_remote_link_update(std::shared_ptr<IULinkUpdate> update)
{
return blocking_call(std::make_shared<Event>("ipaaca::IULinkUpdate", update));
}
IPAACA_EXPORT int64_t ROSRemoteServer::request_remote_commission(std::shared_ptr<protobuf::IUCommission> update)
{
return blocking_call(std::make_shared<Event>("ipaaca::protobuf::IUCommission", update));
}
IPAACA_EXPORT int64_t ROSRemoteServer::request_remote_resend_request(std::shared_ptr<protobuf::IUResendRequest> update)
{
return blocking_call(std::make_shared<Event>("ipaaca::protobuf::IUResendRequest", update));
}
IPAACA_EXPORT void ROSRemoteServer::on_message(const std_msgs::String::ConstPtr& msg)
{
auto message = base64_decode(msg->data);
auto event = ipaaca::converters::internal_deserialize(message);
auto type = event->getType();
IPAACA_DEBUG("RemoteServer " << _scope << " for remote " << _remote_end_scope << " got an object of type " << type)
if (type == "ipaaca::protobuf::RemoteRequestResult") {
auto reply = std::static_pointer_cast<ipaaca::protobuf::RemoteRequestResult>(event->getData());
auto uid = reply->request_uid();
auto result = reply->result();
PendingRequest::ptr pending_request;
{
ipaaca::Locker locker(_pending_requests_lock);
auto it = _pending_requests.find(uid);
if (it != _pending_requests.end()) {
pending_request = it->second;
_pending_requests.erase(it);
}
}
if (pending_request) {
pending_request->reply_with_result(result);
} else {
IPAACA_ERROR("ROSRemoteServer: got a reply for a request that is not pending: " << uid)
}
} else {
IPAACA_ERROR("ROSRemoteServer: unhandled request wire type " << type)
}
}
IPAACA_EXPORT PendingRequest::ptr ROSRemoteServer::queue_pending_request(Event::ptr request)
{
PendingRequest::ptr pending_request = std::make_shared<PendingRequest>(request);
{
ipaaca::Locker locker(_pending_requests_lock);
if ((_ROS_REMOTE_SERVER_MAX_QUEUED_REQUESTS > 0) && (_pending_requests.size() >= _ROS_REMOTE_SERVER_MAX_QUEUED_REQUESTS)) {
IPAACA_ERROR("ROSRemoteServer: maximum number of pending requests exceeded")
throw BackEndBadConditionError();
} else {
_pending_requests[pending_request->_request_uid] = pending_request;
return pending_request;
}
}
}
IPAACA_EXPORT int64_t ROSRemoteServer::blocking_call(Event::ptr request)
{
std::string wire;
auto pending_request = queue_pending_request(request);
if (request->getType() == "ipaaca::IUPayloadUpdate") {
auto obj = std::static_pointer_cast<ipaaca::IUPayloadUpdate>(request->getData());
obj->request_uid = pending_request->_request_uid;
obj->request_endpoint = _scope;
wire = ipaaca::converters::internal_serialize(obj);
} else if (request->getType() == "ipaaca::IULinkUpdate") {
auto obj = std::static_pointer_cast<ipaaca::IULinkUpdate>(request->getData());
obj->request_uid = pending_request->_request_uid;
obj->request_endpoint = _scope;
wire = ipaaca::converters::internal_serialize(obj);
} else if (request->getType() == "ipaaca::protobuf::IUCommission") {
auto obj = std::static_pointer_cast<ipaaca::protobuf::IUCommission>(request->getData());
obj->set_request_uid(pending_request->_request_uid);
obj->set_request_endpoint(_scope);
wire = ipaaca::converters::internal_serialize(obj);
} else if (request->getType() == "ipaaca::protobuf::IUResendRequest") {
auto obj = std::static_pointer_cast<ipaaca::protobuf::IUResendRequest>(request->getData());
obj->set_request_uid(pending_request->_request_uid);
obj->set_request_endpoint(_scope);
wire = ipaaca::converters::internal_serialize(obj);
} else {
IPAACA_ERROR("Unhandled request type " << request->getType())
throw BackEndBadConditionError();
}
IPAACA_DEBUG("Trying to send request to remote LocalServer at " << _remote_end_scope)
std_msgs::String msg;
msg.data = base64_encode(wire);
_ros_pub.publish(msg);
IPAACA_DEBUG("Waiting for the remote server")
auto result = pending_request->wait_for_reply();
IPAACA_DEBUG("RPC wait ended.")
if (result<0) {
IPAACA_WARNING("A RemoteServer request timed out, remote end was " << _remote_end_scope)
return 0;
} else {
return result;
}
}
//}}}
} // of namespace ros
} // of namespace backend
} // of namespace ipaaca
/*
* This file is part of IPAACA, the
* "Incremental Processing Architecture
* for Artificial Conversational Agents".
*
* Copyright (c) 2009-2022 Social Cognitive Systems Group
* (formerly the Sociable Agents Group)
* CITEC, Bielefeld University
*
* http://opensource.cit-ec.de/projects/ipaaca/
* http://purl.org/net/ipaaca
*
* This file may be licensed under the terms of of the
* GNU Lesser General Public License Version 3 (the ``LGPL''),
* or (at your option) any later version.
*
* Software distributed under the License is distributed
* on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
* express or implied. See the LGPL for the specific language
* governing rights and limitations.
*
* You should have received a copy of the LGPL along with this
* program. If not, go to http://www.gnu.org/licenses/lgpl.html
* or write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* The development of this software was supported by the
* Excellence Cluster EXC 277 Cognitive Interaction Technology.
* The Excellence Cluster EXC 277 is a grant of the Deutsche
* Forschungsgemeinschaft (DFG) in the context of the German
* Excellence Initiative.
*/
/**
* \file ipaaca-backend.cc
*
* \brief Source file for abstract backend participant implementation
* (used in the core library and as a base to derive specific backends).
*
* \author Ramin Yaghoubzadeh Torky (ryaghoubzadeh@uni-bielefeld.de)
* \date December, 2018
*/
#include <ipaaca/ipaaca.h>
namespace ipaaca {
namespace backend {
// LocalServer (= the side that owns the actual IUs and tries to honor remote requests){{{
IPAACA_EXPORT int64_t LocalServer::attempt_to_apply_remote_payload_update(std::shared_ptr<IUPayloadUpdate> update)
{
IUInterface::ptr iui = _buffer->get(update->uid);
if (! iui) {
IPAACA_WARNING("Remote InBuffer tried to spuriously write non-existent IU " << update->uid)
return 0;
}
IU::ptr iu = std::static_pointer_cast<IU>(iui);
iu->_revision_lock.lock();
if ((update->revision != 0) && (update->revision != iu->_revision)) {
IPAACA_WARNING("Remote write operation failed because request was out of date; IU " << update->uid)
IPAACA_WARNING(" Referred-to revision was " << update->revision << " while local one is " << iu->_revision)
iu->_revision_lock.unlock();
return 0;
} else if (iu->committed()) {
iu->_revision_lock.unlock();
return 0;
} else if (iu->retracted()) {
iu->_revision_lock.unlock();
return 0;
} else if (iu->committed()) {
iu->_revision_lock.unlock();
return 0;
} else if (iu->retracted()) {
iu->_revision_lock.unlock();
return 0;
}
if (update->is_delta) {
// FIXME TODO this is an unsolved problem atm: deletions in a delta update are
// sent individually. We should have something like _internal_merge_and_remove
for (std::vector<std::string>::const_iterator it=update->keys_to_remove.begin(); it!=update->keys_to_remove.end(); ++it) {
iu->payload()._internal_remove(*it, update->writer_name); //_buffer->unique_name());
}
// but it is solved for pure merges:
iu->payload()._internal_merge(update->new_items, update->writer_name);
} else {
iu->payload()._internal_replace_all(update->new_items, update->writer_name); //_buffer->unique_name());
}
_buffer->call_iu_event_handlers(iu, true, IU_UPDATED, iu->category());
revision_t revision = iu->revision();
iu->_revision_lock.unlock();
return revision;
}
IPAACA_EXPORT int64_t LocalServer::attempt_to_apply_remote_link_update(std::shared_ptr<IULinkUpdate> update)
{
IUInterface::ptr iui = _buffer->get(update->uid);
if (! iui) {
IPAACA_WARNING("Remote InBuffer tried to spuriously write non-existent IU " << update->uid)
return 0;
}
IU::ptr iu = std::static_pointer_cast<IU>(iui);
iu->_revision_lock.lock();
if ((update->revision != 0) && (update->revision != iu->_revision)) {
IPAACA_WARNING("Remote write operation failed because request was out of date; IU " << update->uid)
iu->_revision_lock.unlock();
return 0;
} else if (iu->committed()) {
iu->_revision_lock.unlock();
return 0;
} else if (iu->retracted()) {
iu->_revision_lock.unlock();
return 0;
} else if (iu->committed()) {
iu->_revision_lock.unlock();
return 0;
} else if (iu->retracted()) {
iu->_revision_lock.unlock();
return 0;
}
if (update->is_delta) {
iu->modify_links(update->new_links, update->links_to_remove, update->writer_name);
} else {
iu->set_links(update->new_links, update->writer_name);
}
_buffer->call_iu_event_handlers(iu, true, IU_LINKSUPDATED, iu->category());
revision_t revision = iu->revision();
iu->_revision_lock.unlock();
return revision;
}
IPAACA_EXPORT int64_t LocalServer::attempt_to_apply_remote_commission(std::shared_ptr<protobuf::IUCommission> update)
{
IUInterface::ptr iui = _buffer->get(update->uid());
if (! iui) {
IPAACA_WARNING("Remote InBuffer tried to spuriously write non-existent IU " << update->uid())
return 0;
}
IU::ptr iu = std::static_pointer_cast<IU>(iui);
iu->_revision_lock.lock();
if ((update->revision() != 0) && (update->revision() != iu->_revision)) {
IPAACA_WARNING("Remote write operation failed because request was out of date; IU " << update->uid())
iu->_revision_lock.unlock();
return 0;
} else if (iu->committed()) {
iu->_revision_lock.unlock();
return 0;
} else if (iu->retracted()) {
iu->_revision_lock.unlock();
return 0;
} else {
}
iu->_internal_commit(update->writer_name());
_buffer->call_iu_event_handlers(iu, true, IU_LINKSUPDATED, iu->category());
revision_t revision = iu->revision();
iu->_revision_lock.unlock();
return revision;
}
IPAACA_EXPORT int64_t LocalServer::attempt_to_apply_remote_resend_request(std::shared_ptr<protobuf::IUResendRequest> update)
{
IUInterface::ptr iui = _buffer->get(update->uid());
if (! iui) {
IPAACA_WARNING("Remote InBuffer tried to spuriously write non-existent IU " << update->uid())
return 0;
}
IU::ptr iu = std::static_pointer_cast<IU>(iui);
if ((update->has_hidden_scope_name() == true)&&(update->hidden_scope_name().compare("") != 0)){
revision_t revision = iu->revision();
_buffer->_publish_iu_resend(iu, update->hidden_scope_name());
return revision;
} else {
revision_t revision = 0;
return revision;
}
}
//}}}
IPAACA_EXPORT void Listener::relay_received_event_to_buffer(Event::ptr event)
{
//std::cout << "Will relay it" << std::endl;
_buffer->_handle_iu_events(event);
}
IPAACA_EXPORT void Listener::relay_received_event_to_buffer_threaded(Event::ptr event)
{
auto buffer = _buffer; // avoid a 'this' lambda capture
std::thread dispatcher(
[buffer,event] () {
buffer->_handle_iu_events(event);
});
dispatcher.detach();
}
} // of namespace backend
} // of namespace ipaaca
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
* "Incremental Processing Architecture * "Incremental Processing Architecture
* for Artificial Conversational Agents". * for Artificial Conversational Agents".
* *
* Copyright (c) 2009-2015 Social Cognitive Systems Group * Copyright (c) 2009-2022 Social Cognitive Systems Group
* (formerly the Sociable Agents Group) * (formerly the Sociable Agents Group)
* CITEC, Bielefeld University * CITEC, Bielefeld University
* *
...@@ -37,11 +37,6 @@ ...@@ -37,11 +37,6 @@
namespace ipaaca { namespace ipaaca {
using namespace rsb;
using namespace rsb::filter;
using namespace rsb::converter;
using namespace rsb::patterns;
IPAACA_EXPORT std::ostream& operator<<(std::ostream& os, const IUPayloadUpdate& obj)//{{{ IPAACA_EXPORT std::ostream& operator<<(std::ostream& os, const IUPayloadUpdate& obj)//{{{
{ {
os << "PayloadUpdate(uid=" << obj.uid << ", revision=" << obj.revision; os << "PayloadUpdate(uid=" << obj.uid << ", revision=" << obj.revision;
...@@ -115,7 +110,7 @@ IPAACA_EXPORT IUEventHandler::IUEventHandler(IUEventHandlerFunction function, IU ...@@ -115,7 +110,7 @@ IPAACA_EXPORT IUEventHandler::IUEventHandler(IUEventHandlerFunction function, IU
_categories = categories; _categories = categories;
} }
} }
IPAACA_EXPORT void IUEventHandler::call(Buffer* buffer, boost::shared_ptr<IUInterface> iu, bool local, IUEventType event_type, const std::string& category) IPAACA_EXPORT void IUEventHandler::call(Buffer* buffer, std::shared_ptr<IUInterface> iu, bool local, IUEventType event_type, const std::string& category)
{ {
if (_condition_met(event_type, category)) { if (_condition_met(event_type, category)) {
#if VERBOSE_HANDLERS == 1 #if VERBOSE_HANDLERS == 1
...@@ -138,17 +133,17 @@ IPAACA_EXPORT void Buffer::_allocate_unique_name(const std::string& basename, co ...@@ -138,17 +133,17 @@ IPAACA_EXPORT void Buffer::_allocate_unique_name(const std::string& basename, co
} }
IPAACA_EXPORT void Buffer::register_handler(IUEventHandlerFunction function, IUEventType event_mask, const std::set<std::string>& categories) IPAACA_EXPORT void Buffer::register_handler(IUEventHandlerFunction function, IUEventType event_mask, const std::set<std::string>& categories)
{ {
IPAACA_DEBUG("register_handler " << function << " " << event_mask << " " << categories) //IPAACA_DEBUG("register_handler " << event_mask << " " << categories)
IUEventHandler::ptr handler = IUEventHandler::ptr(new IUEventHandler(function, event_mask, categories)); IUEventHandler::ptr handler = IUEventHandler::ptr(new IUEventHandler(function, event_mask, categories));
_event_handlers.push_back(handler); _event_handlers.push_back(handler);
} }
IPAACA_EXPORT void Buffer::register_handler(IUEventHandlerFunction function, IUEventType event_mask, const std::string& category) IPAACA_EXPORT void Buffer::register_handler(IUEventHandlerFunction function, IUEventType event_mask, const std::string& category)
{ {
IPAACA_DEBUG("register_handler " << function << " " << event_mask << " " << category) //IPAACA_DEBUG("register_handler " << event_mask << " " << category)
IUEventHandler::ptr handler = IUEventHandler::ptr(new IUEventHandler(function, event_mask, category)); IUEventHandler::ptr handler = IUEventHandler::ptr(new IUEventHandler(function, event_mask, category));
_event_handlers.push_back(handler); _event_handlers.push_back(handler);
} }
IPAACA_EXPORT void Buffer::call_iu_event_handlers(boost::shared_ptr<IUInterface> iu, bool local, IUEventType event_type, const std::string& category) IPAACA_EXPORT void Buffer::call_iu_event_handlers(std::shared_ptr<IUInterface> iu, bool local, IUEventType event_type, const std::string& category)
{ {
//IPAACA_DEBUG("handling an event " << ipaaca::iu_event_type_to_str(event_type) << " for IU " << iu->uid()) //IPAACA_DEBUG("handling an event " << ipaaca::iu_event_type_to_str(event_type) << " for IU " << iu->uid())
for (std::vector<IUEventHandler::ptr>::iterator it = _event_handlers.begin(); it != _event_handlers.end(); ++it) { for (std::vector<IUEventHandler::ptr>::iterator it = _event_handlers.begin(); it != _event_handlers.end(); ++it) {
...@@ -157,136 +152,14 @@ IPAACA_EXPORT void Buffer::call_iu_event_handlers(boost::shared_ptr<IUInterface> ...@@ -157,136 +152,14 @@ IPAACA_EXPORT void Buffer::call_iu_event_handlers(boost::shared_ptr<IUInterface>
} }
//}}} //}}}
/*
// Callbacks for OutputBuffer//{{{ // Callbacks for OutputBuffer//{{{
IPAACA_EXPORT CallbackIUPayloadUpdate::CallbackIUPayloadUpdate(Buffer* buffer): _buffer(buffer) { } IPAACA_EXPORT CallbackIUPayloadUpdate::CallbackIUPayloadUpdate(Buffer* buffer): _buffer(buffer) { }
IPAACA_EXPORT CallbackIULinkUpdate::CallbackIULinkUpdate(Buffer* buffer): _buffer(buffer) { } IPAACA_EXPORT CallbackIULinkUpdate::CallbackIULinkUpdate(Buffer* buffer): _buffer(buffer) { }
IPAACA_EXPORT CallbackIUCommission::CallbackIUCommission(Buffer* buffer): _buffer(buffer) { } IPAACA_EXPORT CallbackIUCommission::CallbackIUCommission(Buffer* buffer): _buffer(buffer) { }
IPAACA_EXPORT CallbackIUResendRequest::CallbackIUResendRequest(Buffer* buffer): _buffer(buffer) { } IPAACA_EXPORT CallbackIUResendRequest::CallbackIUResendRequest(Buffer* buffer): _buffer(buffer) { }
*/
IPAACA_EXPORT boost::shared_ptr<int64_t> CallbackIUPayloadUpdate::call(const std::string& methodName, boost::shared_ptr<IUPayloadUpdate> update)
{
IUInterface::ptr iui = _buffer->get(update->uid);
if (! iui) {
IPAACA_WARNING("Remote InBuffer tried to spuriously write non-existent IU " << update->uid)
return boost::shared_ptr<int64_t>(new int64_t(0));
}
IU::ptr iu = boost::static_pointer_cast<IU>(iui);
iu->_revision_lock.lock();
if ((update->revision != 0) && (update->revision != iu->_revision)) {
IPAACA_WARNING("Remote write operation failed because request was out of date; IU " << update->uid)
IPAACA_WARNING(" Referred-to revision was " << update->revision << " while local one is " << iu->_revision)
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else if (iu->committed()) {
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else if (iu->retracted()) {
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else if (iu->committed()) {
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else if (iu->retracted()) {
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
}
if (update->is_delta) {
// FIXME TODO this is an unsolved problem atm: deletions in a delta update are
// sent individually. We should have something like _internal_merge_and_remove
for (std::vector<std::string>::const_iterator it=update->keys_to_remove.begin(); it!=update->keys_to_remove.end(); ++it) {
iu->payload()._internal_remove(*it, update->writer_name); //_buffer->unique_name());
}
// but it is solved for pure merges:
iu->payload()._internal_merge(update->new_items, update->writer_name);
} else {
iu->payload()._internal_replace_all(update->new_items, update->writer_name); //_buffer->unique_name());
}
_buffer->call_iu_event_handlers(iu, true, IU_UPDATED, iu->category());
revision_t revision = iu->revision();
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(revision));
}
IPAACA_EXPORT boost::shared_ptr<int64_t> CallbackIULinkUpdate::call(const std::string& methodName, boost::shared_ptr<IULinkUpdate> update)
{
IUInterface::ptr iui = _buffer->get(update->uid);
if (! iui) {
IPAACA_WARNING("Remote InBuffer tried to spuriously write non-existent IU " << update->uid)
return boost::shared_ptr<int64_t>(new int64_t(0));
}
IU::ptr iu = boost::static_pointer_cast<IU>(iui);
iu->_revision_lock.lock();
if ((update->revision != 0) && (update->revision != iu->_revision)) {
IPAACA_WARNING("Remote write operation failed because request was out of date; IU " << update->uid)
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else if (iu->committed()) {
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else if (iu->retracted()) {
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else if (iu->committed()) {
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else if (iu->retracted()) {
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
}
if (update->is_delta) {
iu->modify_links(update->new_links, update->links_to_remove, update->writer_name);
} else {
iu->set_links(update->new_links, update->writer_name);
}
_buffer->call_iu_event_handlers(iu, true, IU_LINKSUPDATED, iu->category());
revision_t revision = iu->revision();
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(revision));
}
IPAACA_EXPORT boost::shared_ptr<int64_t> CallbackIUCommission::call(const std::string& methodName, boost::shared_ptr<protobuf::IUCommission> update)
{
IUInterface::ptr iui = _buffer->get(update->uid());
if (! iui) {
IPAACA_WARNING("Remote InBuffer tried to spuriously write non-existent IU " << update->uid())
return boost::shared_ptr<int64_t>(new int64_t(0));
}
IU::ptr iu = boost::static_pointer_cast<IU>(iui);
iu->_revision_lock.lock();
if ((update->revision() != 0) && (update->revision() != iu->_revision)) {
IPAACA_WARNING("Remote write operation failed because request was out of date; IU " << update->uid())
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else if (iu->committed()) {
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else if (iu->retracted()) {
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(0));
} else {
}
iu->_internal_commit(update->writer_name());
_buffer->call_iu_event_handlers(iu, true, IU_LINKSUPDATED, iu->category());
revision_t revision = iu->revision();
iu->_revision_lock.unlock();
return boost::shared_ptr<int64_t>(new int64_t(revision));
}
IPAACA_EXPORT boost::shared_ptr<int64_t> CallbackIUResendRequest::call(const std::string& methodName, boost::shared_ptr<protobuf::IUResendRequest> update)
{
IUInterface::ptr iui = _buffer->get(update->uid());
if (! iui) {
IPAACA_WARNING("Remote InBuffer tried to spuriously write non-existent IU " << update->uid())
return boost::shared_ptr<int64_t>(new int64_t(0));
}
IU::ptr iu = boost::static_pointer_cast<IU>(iui);
if ((update->has_hidden_scope_name() == true)&&(update->hidden_scope_name().compare("") != 0)){
revision_t revision = iu->revision();
_buffer->_publish_iu_resend(iu, update->hidden_scope_name());
return boost::shared_ptr<int64_t>(new int64_t(revision));
} else {
revision_t revision = 0;
return boost::shared_ptr<int64_t>(new int64_t(revision));
}
}
//}}} //}}}
// OutputBuffer//{{{ // OutputBuffer//{{{
...@@ -300,11 +173,13 @@ IPAACA_EXPORT OutputBuffer::OutputBuffer(const std::string& basename, const std: ...@@ -300,11 +173,13 @@ IPAACA_EXPORT OutputBuffer::OutputBuffer(const std::string& basename, const std:
} }
IPAACA_EXPORT void OutputBuffer::_initialize_server() IPAACA_EXPORT void OutputBuffer::_initialize_server()
{ {
_server = getFactory().createLocalServer( Scope( _unique_name ) ); _server = ipaaca::backend::get_default_backend()->createLocalServer( ipaaca::backend::get_default_backend()->make_valid_scope( _unique_name + "/Server" ), this );
_server->registerMethod("updatePayload", LocalServer::CallbackPtr(new CallbackIUPayloadUpdate(this))); //_server.connect_to_buffer(this);
/*_server->registerMethod("updatePayload", LocalServer::CallbackPtr(new CallbackIUPayloadUpdate(this)));
_server->registerMethod("updateLinks", LocalServer::CallbackPtr(new CallbackIULinkUpdate(this))); _server->registerMethod("updateLinks", LocalServer::CallbackPtr(new CallbackIULinkUpdate(this)));
_server->registerMethod("commit", LocalServer::CallbackPtr(new CallbackIUCommission(this))); _server->registerMethod("commit", LocalServer::CallbackPtr(new CallbackIUCommission(this)));
_server->registerMethod("resendRequest", LocalServer::CallbackPtr(new CallbackIUResendRequest(this))); _server->registerMethod("resendRequest", LocalServer::CallbackPtr(new CallbackIUResendRequest(this)));
*/
} }
IPAACA_EXPORT OutputBuffer::ptr OutputBuffer::create(const std::string& basename) IPAACA_EXPORT OutputBuffer::ptr OutputBuffer::create(const std::string& basename)
{ {
...@@ -326,8 +201,7 @@ IPAACA_EXPORT std::set<IUInterface::ptr> OutputBuffer::get_ius() ...@@ -326,8 +201,7 @@ IPAACA_EXPORT std::set<IUInterface::ptr> OutputBuffer::get_ius()
IPAACA_EXPORT void OutputBuffer::_send_iu_link_update(IUInterface* iu, bool is_delta, revision_t revision, const LinkMap& new_links, const LinkMap& links_to_remove, const std::string& writer_name) IPAACA_EXPORT void OutputBuffer::_send_iu_link_update(IUInterface* iu, bool is_delta, revision_t revision, const LinkMap& new_links, const LinkMap& links_to_remove, const std::string& writer_name)
{ {
IULinkUpdate* lup = new ipaaca::IULinkUpdate(); auto lup = std::make_shared<IULinkUpdate>();
Informer<ipaaca::IULinkUpdate>::DataPtr ldata(lup);
lup->uid = iu->uid(); lup->uid = iu->uid();
lup->is_delta = is_delta; lup->is_delta = is_delta;
lup->revision = revision; lup->revision = revision;
...@@ -336,14 +210,13 @@ IPAACA_EXPORT void OutputBuffer::_send_iu_link_update(IUInterface* iu, bool is_d ...@@ -336,14 +210,13 @@ IPAACA_EXPORT void OutputBuffer::_send_iu_link_update(IUInterface* iu, bool is_d
if (is_delta) lup->links_to_remove = links_to_remove; if (is_delta) lup->links_to_remove = links_to_remove;
if (writer_name=="") lup->writer_name = _unique_name; if (writer_name=="") lup->writer_name = _unique_name;
else lup->writer_name = writer_name; else lup->writer_name = writer_name;
Informer<AnyType>::Ptr informer = _get_informer(iu->category()); auto informer = _get_informer(iu->category());
informer->publish(ldata); informer->publish(lup);
} }
IPAACA_EXPORT void OutputBuffer::_send_iu_payload_update(IUInterface* iu, bool is_delta, revision_t revision, const std::map<std::string, PayloadDocumentEntry::ptr>& new_items, const std::vector<std::string>& keys_to_remove, const std::string& writer_name) IPAACA_EXPORT void OutputBuffer::_send_iu_payload_update(IUInterface* iu, bool is_delta, revision_t revision, const std::map<std::string, PayloadDocumentEntry::ptr>& new_items, const std::vector<std::string>& keys_to_remove, const std::string& writer_name)
{ {
IUPayloadUpdate* pup = new ipaaca::IUPayloadUpdate(); auto pup = std::make_shared<IUPayloadUpdate>();
Informer<ipaaca::IUPayloadUpdate>::DataPtr pdata(pup);
pup->payload_type = iu->payload_type(); pup->payload_type = iu->payload_type();
pup->uid = iu->uid(); pup->uid = iu->uid();
pup->is_delta = is_delta; pup->is_delta = is_delta;
...@@ -352,19 +225,19 @@ IPAACA_EXPORT void OutputBuffer::_send_iu_payload_update(IUInterface* iu, bool i ...@@ -352,19 +225,19 @@ IPAACA_EXPORT void OutputBuffer::_send_iu_payload_update(IUInterface* iu, bool i
if (is_delta) pup->keys_to_remove = keys_to_remove; if (is_delta) pup->keys_to_remove = keys_to_remove;
if (writer_name=="") pup->writer_name = _unique_name; if (writer_name=="") pup->writer_name = _unique_name;
else pup->writer_name = writer_name; else pup->writer_name = writer_name;
Informer<AnyType>::Ptr informer = _get_informer(iu->category()); auto informer = _get_informer(iu->category());
informer->publish(pdata); informer->publish(pup);
} }
IPAACA_EXPORT void OutputBuffer::_send_iu_commission(IUInterface* iu, revision_t revision, const std::string& writer_name) IPAACA_EXPORT void OutputBuffer::_send_iu_commission(IUInterface* iu, revision_t revision, const std::string& writer_name)
{ {
Informer<protobuf::IUCommission>::DataPtr data(new protobuf::IUCommission()); auto data = std::make_shared<protobuf::IUCommission>();
data->set_uid(iu->uid()); data->set_uid(iu->uid());
data->set_revision(revision); data->set_revision(revision);
if (writer_name=="") data->set_writer_name(_unique_name); if (writer_name=="") data->set_writer_name(_unique_name);
else data->set_writer_name(writer_name); else data->set_writer_name(writer_name);
Informer<AnyType>::Ptr informer = _get_informer(iu->category()); auto informer = _get_informer(iu->category());
informer->publish(data); informer->publish(data);
} }
...@@ -388,19 +261,17 @@ IPAACA_EXPORT void OutputBuffer::add(IU::ptr iu) ...@@ -388,19 +261,17 @@ IPAACA_EXPORT void OutputBuffer::add(IU::ptr iu)
IPAACA_EXPORT void OutputBuffer::_publish_iu(IU::ptr iu) IPAACA_EXPORT void OutputBuffer::_publish_iu(IU::ptr iu)
{ {
Informer<AnyType>::Ptr informer = _get_informer(iu->_category); auto informer = _get_informer(iu->_category);
Informer<ipaaca::IU>::DataPtr iu_data(iu); informer->publish(iu);
informer->publish(iu_data);
} }
IPAACA_EXPORT void OutputBuffer::_publish_iu_resend(IU::ptr iu, const std::string& hidden_scope_name) IPAACA_EXPORT void OutputBuffer::_publish_iu_resend(IU::ptr iu, const std::string& hidden_scope_name)
{ {
Informer<AnyType>::Ptr informer = _get_informer(hidden_scope_name); auto informer = _get_informer(hidden_scope_name);
Informer<ipaaca::IU>::DataPtr iu_data(iu); informer->publish(iu);
informer->publish(iu_data);
} }
IPAACA_EXPORT Informer<AnyType>::Ptr OutputBuffer::_get_informer(const std::string& category) IPAACA_EXPORT ipaaca::backend::Informer::ptr OutputBuffer::_get_informer(const std::string& category)
{ {
if (_informer_store.count(category) > 0) { if (_informer_store.count(category) > 0) {
return _informer_store[category]; return _informer_store[category];
...@@ -409,12 +280,12 @@ IPAACA_EXPORT Informer<AnyType>::Ptr OutputBuffer::_get_informer(const std::stri ...@@ -409,12 +280,12 @@ IPAACA_EXPORT Informer<AnyType>::Ptr OutputBuffer::_get_informer(const std::stri
std::string scope_string = "/ipaaca/channel/" + _channel + "/category/" + category; std::string scope_string = "/ipaaca/channel/" + _channel + "/category/" + category;
IPAACA_INFO("Creating new informer for " << scope_string) IPAACA_INFO("Creating new informer for " << scope_string)
Informer<AnyType>::Ptr informer = getFactory().createInformer<AnyType> ( Scope(scope_string)); auto informer = ipaaca::backend::get_default_backend()->createInformer(ipaaca::backend::get_default_backend()->make_valid_scope(scope_string));
_informer_store[category] = informer; _informer_store[category] = informer;
return informer; return informer;
} }
} }
IPAACA_EXPORT boost::shared_ptr<IU> OutputBuffer::remove(const std::string& iu_uid) IPAACA_EXPORT std::shared_ptr<IU> OutputBuffer::remove(const std::string& iu_uid)
{ {
IUStore::iterator it = _iu_store.find(iu_uid); IUStore::iterator it = _iu_store.find(iu_uid);
if (it == _iu_store.end()) { if (it == _iu_store.end()) {
...@@ -426,7 +297,7 @@ IPAACA_EXPORT boost::shared_ptr<IU> OutputBuffer::remove(const std::string& iu_u ...@@ -426,7 +297,7 @@ IPAACA_EXPORT boost::shared_ptr<IU> OutputBuffer::remove(const std::string& iu_u
_iu_store.erase(iu_uid); _iu_store.erase(iu_uid);
return iu; return iu;
} }
IPAACA_EXPORT boost::shared_ptr<IU> OutputBuffer::remove(IU::ptr iu) IPAACA_EXPORT std::shared_ptr<IU> OutputBuffer::remove(IU::ptr iu)
{ {
return remove(iu->uid()); // to make sure it is in the store return remove(iu->uid()); // to make sure it is in the store
} }
...@@ -435,10 +306,10 @@ IPAACA_EXPORT void OutputBuffer::_retract_iu(IU::ptr iu) ...@@ -435,10 +306,10 @@ IPAACA_EXPORT void OutputBuffer::_retract_iu(IU::ptr iu)
{ {
if (iu->_retracted) return; // ignore subsequent retractions if (iu->_retracted) return; // ignore subsequent retractions
iu->_retracted = true; iu->_retracted = true;
Informer<protobuf::IURetraction>::DataPtr data(new protobuf::IURetraction()); auto data = std::make_shared<protobuf::IURetraction>();
data->set_uid(iu->uid()); data->set_uid(iu->uid());
data->set_revision(iu->revision()); data->set_revision(iu->revision());
Informer<AnyType>::Ptr informer = _get_informer(iu->category()); auto informer = _get_informer(iu->category());
informer->publish(data); informer->publish(data);
} }
...@@ -587,49 +458,51 @@ IPAACA_EXPORT std::set<IUInterface::ptr> InputBuffer::get_ius() ...@@ -587,49 +458,51 @@ IPAACA_EXPORT std::set<IUInterface::ptr> InputBuffer::get_ius()
return set; return set;
} }
IPAACA_EXPORT RemoteServerPtr InputBuffer::_get_remote_server(const std::string& unique_server_name) IPAACA_EXPORT ipaaca::backend::RemoteServer::ptr InputBuffer::_get_remote_server(const std::string& unique_server_name)
{ {
std::map<std::string, RemoteServerPtr>::iterator it = _remote_server_store.find(unique_server_name); std::string fullname = unique_server_name + "/Server";
if (it!=_remote_server_store.end()) return it->second; auto it = _remote_server_store.find(fullname);
RemoteServerPtr remote_server = getFactory().createRemoteServer(Scope(unique_server_name)); if (it != _remote_server_store.end()) return it->second;
_remote_server_store[unique_server_name] = remote_server; auto remote_server = ipaaca::backend::get_default_backend()->createRemoteServer(ipaaca::backend::get_default_backend()->make_valid_scope(fullname));
_remote_server_store[fullname] = remote_server;
return remote_server; return remote_server;
} }
IPAACA_EXPORT ListenerPtr InputBuffer::_create_category_listener_if_needed(const std::string& category) IPAACA_EXPORT ipaaca::backend::Listener::ptr InputBuffer::_create_category_listener_if_needed(const std::string& category)
{ {
std::map<std::string, ListenerPtr>::iterator it = _listener_store.find(category); auto it = _listener_store.find(category);
if (it!=_listener_store.end()) { if (it!=_listener_store.end()) {
return it->second; return it->second;
} }
std::string scope_string = "/ipaaca/channel/" + _channel + "/category/" + category; std::string scope_string = "/ipaaca/channel/" + _channel + "/category/" + category;
IPAACA_INFO("Creating new listener for " << scope_string) IPAACA_INFO("Creating new listener for " << scope_string)
ListenerPtr listener = getFactory().createListener( Scope(scope_string) ); auto listener = ipaaca::backend::get_default_backend()->createListener( ipaaca::backend::get_default_backend()->make_valid_scope(scope_string), this );
HandlerPtr event_handler = HandlerPtr( /*HandlerPtr event_handler = HandlerPtr(
new EventFunctionHandler( new EventFunctionHandler(
boost::bind(&InputBuffer::_handle_iu_events, this, _1) std::bind(&InputBuffer::_handle_iu_events, this, _1)
) )
); );
listener->addHandler(event_handler); listener->addHandler(event_handler);
_listener_store[category] = listener; */
_listener_store[category] = listener;
return listener; return listener;
} }
IPAACA_EXPORT void InputBuffer::_trigger_resend_request(EventPtr event) { IPAACA_EXPORT void InputBuffer::_trigger_resend_request(ipaaca::backend::Event::ptr event) {
if (!triggerResend) return; if (!triggerResend) return;
std::string type = event->getType(); std::string type = event->getType();
std::string uid = ""; std::string uid = "";
std::string writerName = ""; std::string writerName = "";
if (type == "ipaaca::IUPayloadUpdate") { if (type == "ipaaca::IUPayloadUpdate") {
boost::shared_ptr<IUPayloadUpdate> update = boost::static_pointer_cast<IUPayloadUpdate>(event->getData()); std::shared_ptr<IUPayloadUpdate> update = std::static_pointer_cast<IUPayloadUpdate>(event->getData());
uid = update->uid; uid = update->uid;
writerName = update->writer_name; writerName = update->writer_name;
} else if (type == "ipaaca::IULinkUpdate") { } else if (type == "ipaaca::IULinkUpdate") {
boost::shared_ptr<IULinkUpdate> update = boost::static_pointer_cast<IULinkUpdate>(event->getData()); std::shared_ptr<IULinkUpdate> update = std::static_pointer_cast<IULinkUpdate>(event->getData());
uid = update->uid; uid = update->uid;
writerName = update->writer_name; writerName = update->writer_name;
} else if (type == "ipaaca::protobuf::IUCommission") { } else if (type == "ipaaca::protobuf::IUCommission") {
boost::shared_ptr<protobuf::IUCommission> update = boost::static_pointer_cast<protobuf::IUCommission>(event->getData()); std::shared_ptr<protobuf::IUCommission> update = std::static_pointer_cast<protobuf::IUCommission>(event->getData());
uid = update->uid(); uid = update->uid();
writerName = update->writer_name(); writerName = update->writer_name();
} else { } else {
...@@ -638,23 +511,23 @@ IPAACA_EXPORT void InputBuffer::_trigger_resend_request(EventPtr event) { ...@@ -638,23 +511,23 @@ IPAACA_EXPORT void InputBuffer::_trigger_resend_request(EventPtr event) {
} }
if (!writerName.empty()) { if (!writerName.empty()) {
RemoteServerPtr server = _get_remote_server(writerName); auto server = _get_remote_server(writerName);
if (!uid.empty()) { if (!uid.empty()) {
boost::shared_ptr<protobuf::IUResendRequest> update = boost::shared_ptr<protobuf::IUResendRequest>(new protobuf::IUResendRequest()); std::shared_ptr<protobuf::IUResendRequest> update = std::shared_ptr<protobuf::IUResendRequest>(new protobuf::IUResendRequest());
update->set_uid(uid); update->set_uid(uid);
update->set_hidden_scope_name(_uuid); update->set_hidden_scope_name(_uuid);
boost::shared_ptr<int> result = server->call<int>("resendRequest", update, IPAACA_REMOTE_SERVER_TIMEOUT); int result = server->request_remote_resend_request(update);
if (*result == 0) { if (result == 0) {
throw IUResendRequestFailedError(); throw IUResendRequestFailedError();
} }
} }
} }
} }
IPAACA_EXPORT void InputBuffer::_handle_iu_events(EventPtr event) IPAACA_EXPORT void InputBuffer::_handle_iu_events(ipaaca::backend::Event::ptr event)
{ {
std::string type = event->getType(); std::string type = event->getType();
if (type == "ipaaca::RemotePushIU") { if (type == "ipaaca::RemotePushIU") {
boost::shared_ptr<RemotePushIU> iu = boost::static_pointer_cast<RemotePushIU>(event->getData()); std::shared_ptr<RemotePushIU> iu = std::static_pointer_cast<RemotePushIU>(event->getData());
if (_iu_store.count(iu->category()) > 0) { if (_iu_store.count(iu->category()) > 0) {
// already got the IU... ignore // already got the IU... ignore
} else { } else {
...@@ -663,12 +536,12 @@ IPAACA_EXPORT void InputBuffer::_handle_iu_events(EventPtr event) ...@@ -663,12 +536,12 @@ IPAACA_EXPORT void InputBuffer::_handle_iu_events(EventPtr event)
call_iu_event_handlers(iu, false, IU_ADDED, iu->category() ); call_iu_event_handlers(iu, false, IU_ADDED, iu->category() );
} }
} else if (type == "ipaaca::RemoteMessage") { } else if (type == "ipaaca::RemoteMessage") {
boost::shared_ptr<RemoteMessage> iu = boost::static_pointer_cast<RemoteMessage>(event->getData()); std::shared_ptr<RemoteMessage> iu = std::static_pointer_cast<RemoteMessage>(event->getData());
call_iu_event_handlers(iu, false, IU_MESSAGE, iu->category() ); call_iu_event_handlers(iu, false, IU_MESSAGE, iu->category() );
} else { } else {
RemotePushIUStore::iterator it; RemotePushIUStore::iterator it;
if (type == "ipaaca::IUPayloadUpdate") { if (type == "ipaaca::IUPayloadUpdate") {
boost::shared_ptr<IUPayloadUpdate> update = boost::static_pointer_cast<IUPayloadUpdate>(event->getData()); std::shared_ptr<IUPayloadUpdate> update = std::static_pointer_cast<IUPayloadUpdate>(event->getData());
if (update->writer_name == _unique_name) { if (update->writer_name == _unique_name) {
return; return;
} }
...@@ -681,7 +554,7 @@ IPAACA_EXPORT void InputBuffer::_handle_iu_events(EventPtr event) ...@@ -681,7 +554,7 @@ IPAACA_EXPORT void InputBuffer::_handle_iu_events(EventPtr event)
it->second->_apply_update(update); it->second->_apply_update(update);
call_iu_event_handlers(it->second, false, IU_UPDATED, it->second->category() ); call_iu_event_handlers(it->second, false, IU_UPDATED, it->second->category() );
} else if (type == "ipaaca::IULinkUpdate") { } else if (type == "ipaaca::IULinkUpdate") {
boost::shared_ptr<IULinkUpdate> update = boost::static_pointer_cast<IULinkUpdate>(event->getData()); std::shared_ptr<IULinkUpdate> update = std::static_pointer_cast<IULinkUpdate>(event->getData());
if (update->writer_name == _unique_name) { if (update->writer_name == _unique_name) {
return; return;
} }
...@@ -694,7 +567,7 @@ IPAACA_EXPORT void InputBuffer::_handle_iu_events(EventPtr event) ...@@ -694,7 +567,7 @@ IPAACA_EXPORT void InputBuffer::_handle_iu_events(EventPtr event)
it->second->_apply_link_update(update); it->second->_apply_link_update(update);
call_iu_event_handlers(it->second, false, IU_LINKSUPDATED, it->second->category() ); call_iu_event_handlers(it->second, false, IU_LINKSUPDATED, it->second->category() );
} else if (type == "ipaaca::protobuf::IUCommission") { } else if (type == "ipaaca::protobuf::IUCommission") {
boost::shared_ptr<protobuf::IUCommission> update = boost::static_pointer_cast<protobuf::IUCommission>(event->getData()); std::shared_ptr<protobuf::IUCommission> update = std::static_pointer_cast<protobuf::IUCommission>(event->getData());
if (update->writer_name() == _unique_name) { if (update->writer_name() == _unique_name) {
return; return;
} }
...@@ -708,7 +581,7 @@ IPAACA_EXPORT void InputBuffer::_handle_iu_events(EventPtr event) ...@@ -708,7 +581,7 @@ IPAACA_EXPORT void InputBuffer::_handle_iu_events(EventPtr event)
it->second->_revision = update->revision(); it->second->_revision = update->revision();
call_iu_event_handlers(it->second, false, IU_COMMITTED, it->second->category() ); call_iu_event_handlers(it->second, false, IU_COMMITTED, it->second->category() );
} else if (type == "ipaaca::protobuf::IURetraction") { } else if (type == "ipaaca::protobuf::IURetraction") {
boost::shared_ptr<protobuf::IURetraction> update = boost::static_pointer_cast<protobuf::IURetraction>(event->getData()); std::shared_ptr<protobuf::IURetraction> update = std::static_pointer_cast<protobuf::IURetraction>(event->getData());
it = _iu_store.find(update->uid()); it = _iu_store.find(update->uid());
if (it == _iu_store.end()) { if (it == _iu_store.end()) {
IPAACA_INFO("Ignoring RETRACTED message for an IU that we did not fully receive before") IPAACA_INFO("Ignoring RETRACTED message for an IU that we did not fully receive before")
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
* "Incremental Processing Architecture * "Incremental Processing Architecture
* for Artificial Conversational Agents". * for Artificial Conversational Agents".
* *
* Copyright (c) 2009-2015 Social Cognitive Systems Group * Copyright (c) 2009-2022 Social Cognitive Systems Group
* (formerly the Sociable Agents Group) * (formerly the Sociable Agents Group)
* CITEC, Bielefeld University * CITEC, Bielefeld University
* *
......
/*
* This file is part of IPAACA, the
* "Incremental Processing Architecture
* for Artificial Conversational Agents".
*
* Copyright (c) 2009-2022 Social Cognitive Systems Group
* (formerly the Sociable Agents Group)
* CITEC, Bielefeld University
*
* http://opensource.cit-ec.de/projects/ipaaca/
* http://purl.org/net/ipaaca
*
* This file may be licensed under the terms of of the
* GNU Lesser General Public License Version 3 (the ``LGPL''),
* or (at your option) any later version.
*
* Software distributed under the License is distributed
* on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
* express or implied. See the LGPL for the specific language
* governing rights and limitations.
*
* You should have received a copy of the LGPL along with this
* program. If not, go to http://www.gnu.org/licenses/lgpl.html
* or write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* The development of this software was supported by the
* Excellence Cluster EXC 277 Cognitive Interaction Technology.
* The Excellence Cluster EXC 277 is a grant of the Deutsche
* Forschungsgemeinschaft (DFG) in the context of the German
* Excellence Initiative.
*/
/**
* \file ipaaca-config.cc
*
* \brief Source file for IPAACA configuration handling
*
* \author Ramin Yaghoubzadeh Torky (ryaghoubzadeh@uni-bielefeld.de)
* \date January, 2019
*/
#include <ipaaca/ipaaca.h>
extern char **environ;
namespace ipaaca {
template<>
int Config::get_with_default_internal<int>(const std::string& key, int const& default_value, bool warn)
{
auto it = _data.find(key);
if (it==_data.end()) {
config_key_not_found(key, default_value, warn);
return default_value;
}
std::size_t processed;
int res = std::stoi(it->second, &processed);
if (processed != it->second.size()) {
config_conversion_failed(key, default_value);
return default_value;
}
return res;
}
template<>
std::string Config::get_with_default_internal<std::string>(const std::string& key, std::string const& default_value, bool warn)
{
auto it = _data.find(key);
if (it==_data.end()) {
config_key_not_found(key, default_value, warn);
return default_value;
}
return it->second;
}
Config::ptr get_global_config(bool auto_parse_on_demand){
static bool first = true;
static Config::ptr global_config = std::make_shared<Config>();
if (first) {
first = false;
IPAACA_DEBUG("This is IPAACA-C++ version " << IPAACA_PROTOCOL_VERSION_MAJOR << "." << IPAACA_PROTOCOL_VERSION_MINOR << " (release " << IPAACA_CPP_RELEASE_NUMBER << " with nominal release date " << IPAACA_CPP_RELEASE_DATE << ") - library compiled on " << __DATE__)
if (auto_parse_on_demand) {
IPAACA_DEBUG("Populating global configuration from default sources")
global_config->populate_from_global_sources();
}
}
return global_config;
}
void Config::populate_from_global_sources()
{
_messages_delivered.clear(); // message variable warnings
populate_from_any_conf_files();
populate_from_environment();
}
bool Config::get_key_and_value(const std::string& s, std::string& key, std::string& value)
{
bool good = true;
size_t pos = 0;
for (pos=0; pos<s.size(); ++pos) {
auto c = s[pos]; //std::tolower(s[pos]);
if (c=='=') {
value = ipaaca::str_trim(s.substr(pos+1));
break;
} else if (c=='_') {
key += '.';
} else if (c=='.') {
key += '.';
} else if ((c>='a')&&(c<='z')) {
key += c;
} else if ((c>='A')&&(c<='Z')) {
key += (c+32);
} else {
good = false;
break;
}
}
if (!good) {
IPAACA_ERROR("Malformed configuration ignored: " << s.substr(0, pos+1))
}
return good;
}
void Config::populate_from_environment()
{
int i = 1;
char* envline = *environ;
while (envline) {
if(strncmp(envline, "IPAACA_", 7) == 0) {
if (strlen(envline) > 1023) {
IPAACA_ERROR("Ignoring overly long environment entry starting with IPAACA_")
} else {
std::string s(envline);
s = s.substr(7);
std::string key("");
std::string value("");
bool good = get_key_and_value(s, key, value);
if (good) {
IPAACA_INFO("Configuration set from environment: " << key << "=\"" << value << "\"");
_data[key] = value;
}
}
}
envline = *(environ+i);
i++;
}
}
void Config::populate_from_any_conf_files()
{
std::fstream f1;
f1.open("ipaaca.conf", std::fstream::in);
bool had_file = false;
if (f1.is_open()) {
IPAACA_DEBUG("Including configuration from ./ipaaca.conf")
populate_from_conf_file(f1);
f1.close();
had_file = true;
} else {
std::fstream f2;
char* homedir = std::getenv("HOME"); // TODO: windows
if (homedir) {
std::string conf_in_home(homedir);
conf_in_home += "/.config/ipaaca.conf";
f2.open(conf_in_home, std::fstream::in);
if (f2.is_open()) {
IPAACA_DEBUG("Including configuration from ~/.config/ipaaca.conf")
populate_from_conf_file(f2);
f2.close();
had_file = true;
}
}
}
if (!had_file) {
IPAACA_INFO("Could not load ipaaca.conf either here or in ~/.config")
}
}
void Config::populate_from_conf_file(std::fstream& fs)
{
std::string line;
while (std::getline(fs, line)) {
//std::cout << "---> " << line << std::endl;
line = ipaaca::str_trim(line);
std::string key("");
std::string value("");
if ((line.length() > 0) && (line[0] != '#') && (line[0] != '[')) {
bool good = get_key_and_value(line, key, value);
if (good) {
IPAACA_INFO("Configuration set from conf file: " << key << "=\"" << value << "\"");
_data[key] = value;
}
}
}
}
} // of namespace ipaaca
/*
* This file is part of IPAACA, the
* "Incremental Processing Architecture
* for Artificial Conversational Agents".
*
* Copyright (c) 2009-2022 Social Cognitive Systems Group
* (formerly the Sociable Agents Group)
* CITEC, Bielefeld University
*
* http://opensource.cit-ec.de/projects/ipaaca/
* http://purl.org/net/ipaaca
*
* This file may be licensed under the terms of of the
* GNU Lesser General Public License Version 3 (the ``LGPL''),
* or (at your option) any later version.
*
* Software distributed under the License is distributed
* on an ``AS IS'' basis, WITHOUT WARRANTY OF ANY KIND, either
* express or implied. See the LGPL for the specific language
* governing rights and limitations.
*
* You should have received a copy of the LGPL along with this
* program. If not, go to http://www.gnu.org/licenses/lgpl.html
* or write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* The development of this software was supported by the
* Excellence Cluster EXC 277 Cognitive Interaction Technology.
* The Excellence Cluster EXC 277 is a grant of the Deutsche
* Forschungsgemeinschaft (DFG) in the context of the German
* Excellence Initiative.
*/
#include <ipaaca/ipaaca.h>
namespace ipaaca {
namespace converters {
// Wrap a serialized inner object and a wire type in a protobuf::TransportLevelWrapper
std::string cooked_message(const std::string& raw_message, ipaaca::protobuf::TransportMessageType msg_type)
{
std::string cooked_msg;
std::shared_ptr<protobuf::TransportLevelWrapper> pbo(new protobuf::TransportLevelWrapper());
pbo->set_raw_message(raw_message);
pbo->set_transport_message_type(msg_type);
pbo->SerializeToString(&cooked_msg);
return cooked_msg;
};
// protobuf serialization for all supported types (replaces converter repository)
std::string internal_serialize(ipaaca::IU::ptr iu) {
std::string raw_message = IUConverter::serialize(iu);
return cooked_message(raw_message,
(iu->access_mode()==IU_ACCESS_MESSAGE) ? protobuf::TransportMessageType::WireTypeMessageIU
: protobuf::TransportMessageType::WireTypeIU);
};
/*
std::string internal_serialize(ipaaca::Message::ptr msg) {
std::string raw_message = MessageConverter::serialize(msg);
return cooked_message(raw_message, protobuf::TransportMessageType::WireTypeMessageIU);
};
*/
std::string internal_serialize(ipaaca::IUPayloadUpdate::ptr pup) {
std::string raw_message = IUPayloadUpdateConverter::serialize(pup);
return cooked_message(raw_message, protobuf::TransportMessageType::WireTypeIUPayloadUpdate);
};
std::string internal_serialize(ipaaca::IULinkUpdate::ptr lup) {
std::string raw_message = IULinkUpdateConverter::serialize(lup);
return cooked_message(raw_message, protobuf::TransportMessageType::WireTypeIULinkUpdate);
};
std::string internal_serialize(std::shared_ptr<protobuf::RemoteRequestResult> pb) {
std::string raw_message;
pb->SerializeToString(&raw_message);
return cooked_message(raw_message, protobuf::TransportMessageType::WireTypeRemoteRequestResult);
};
std::string internal_serialize(std::shared_ptr<protobuf::IURetraction> pb) {
std::string raw_message;
pb->SerializeToString(&raw_message);
return cooked_message(raw_message, protobuf::TransportMessageType::WireTypeIURetraction);
};
std::string internal_serialize(std::shared_ptr<protobuf::IUCommission> pb) {
std::string raw_message;
pb->SerializeToString(&raw_message);
return cooked_message(raw_message, protobuf::TransportMessageType::WireTypeIUCommission);
};
std::string internal_serialize(std::shared_ptr<protobuf::IUResendRequest> pb) {
std::string raw_message;
pb->SerializeToString(&raw_message);
return cooked_message(raw_message, protobuf::TransportMessageType::WireTypeIUResendRequest);
};
std::string internal_serialize(std::shared_ptr<protobuf::IUPayloadUpdateRequest> pb) {
std::string raw_message;
pb->SerializeToString(&raw_message);
return cooked_message(raw_message, protobuf::TransportMessageType::WireTypeIUPayloadUpdateRequest);
};
std::string internal_serialize(std::shared_ptr<protobuf::IULinkUpdateRequest> pb) {
std::string raw_message;
pb->SerializeToString(&raw_message);
return cooked_message(raw_message, protobuf::TransportMessageType::WireTypeIULinkUpdateRequest);
};
std::string internal_serialize(std::shared_ptr<protobuf::IUCommissionRequest> pb) {
std::string raw_message;
pb->SerializeToString(&raw_message);
return cooked_message(raw_message, protobuf::TransportMessageType::WireTypeIUCommissionRequest);
};
// deserialization (just switching here instead of the converter registry business)
ipaaca::backend::Event::ptr internal_deserialize(const std::string& wire)
{
std::shared_ptr<protobuf::TransportLevelWrapper> pbo(new protobuf::TransportLevelWrapper());
pbo->ParseFromString(wire);
std::shared_ptr<ipaaca::backend::Event> event;
//std::cout << "internal_deserialize of TransportMessageType " << pbo->transport_message_type() << std::endl;
switch (pbo->transport_message_type()) {
case protobuf::TransportMessageType::WireTypeIU:
{ event = std::make_shared<ipaaca::backend::Event>("ipaaca::RemotePushIU", std::static_pointer_cast<RemotePushIU>(IUConverter::deserialize(pbo->raw_message()))); }
break;
case protobuf::TransportMessageType::WireTypeMessageIU:
{ event = std::make_shared<ipaaca::backend::Event>("ipaaca::RemoteMessage", std::static_pointer_cast<RemoteMessage>(IUConverter::deserialize(pbo->raw_message()))); }
break;
case protobuf::TransportMessageType::WireTypeIUPayloadUpdate:
{ event = std::make_shared<ipaaca::backend::Event>("ipaaca::IUPayloadUpdate", IUPayloadUpdateConverter::deserialize(pbo->raw_message())); }
break;
case protobuf::TransportMessageType::WireTypeIULinkUpdate:
{ event = std::make_shared<ipaaca::backend::Event>("ipaaca::IULinkUpdate", IULinkUpdateConverter::deserialize(pbo->raw_message())); }
break;
case protobuf::TransportMessageType::WireTypeIURetraction:
{
std::shared_ptr<protobuf::IURetraction> inner(new protobuf::IURetraction());
inner->ParseFromString(pbo->raw_message());
event = std::make_shared<ipaaca::backend::Event>("ipaaca::protobuf::IURetraction", inner);
}
break;
case protobuf::TransportMessageType::WireTypeIUCommission:
{
std::shared_ptr<protobuf::IUCommission> inner(new protobuf::IUCommission());
inner->ParseFromString(pbo->raw_message());
event = std::make_shared<ipaaca::backend::Event>("ipaaca::protobuf::IUCommission", inner);
}
break;
case protobuf::TransportMessageType::WireTypeRemoteRequestResult:
{
std::shared_ptr<protobuf::RemoteRequestResult> inner(new protobuf::RemoteRequestResult());
inner->ParseFromString(pbo->raw_message());
event = std::make_shared<ipaaca::backend::Event>("ipaaca::protobuf::RemoteRequestResult", inner);
}
break;
case protobuf::TransportMessageType::WireTypeIUResendRequest:
{
std::shared_ptr<protobuf::IUResendRequest> inner(new protobuf::IUResendRequest());
inner->ParseFromString(pbo->raw_message());
event = std::make_shared<ipaaca::backend::Event>("ipaaca::protobuf::IUResendRequest", inner);
}
break;
case protobuf::TransportMessageType::WireTypeIUPayloadUpdateRequest:
{
std::shared_ptr<protobuf::IUPayloadUpdateRequest> inner(new protobuf::IUPayloadUpdateRequest());
inner->ParseFromString(pbo->raw_message());
event = std::make_shared<ipaaca::backend::Event>("ipaaca::protobuf::IUPayloadUpdateRequest", inner);
}
break;
case protobuf::TransportMessageType::WireTypeIULinkUpdateRequest:
{
std::shared_ptr<protobuf::IULinkUpdateRequest> inner(new protobuf::IULinkUpdateRequest());
inner->ParseFromString(pbo->raw_message());
event = std::make_shared<ipaaca::backend::Event>("ipaaca::protobuf::IULinkUpdateRequest", inner);
}
break;
case protobuf::TransportMessageType::WireTypeIUCommissionRequest:
{
std::shared_ptr<protobuf::IUCommissionRequest> inner(new protobuf::IUCommissionRequest());
inner->ParseFromString(pbo->raw_message());
event = std::make_shared<ipaaca::backend::Event>("ipaaca::protobuf::IUCommissionRequest", inner);
}
break;
default:
throw ipaaca::UnhandledWireTypeError(pbo->transport_message_type());
};
return event;
}
// RSB backend Converters
// IUConverter//{{{
IPAACA_EXPORT std::string IUConverter::serialize(ipaaca::IU::ptr obj)
{
std::string wire;
std::shared_ptr<protobuf::IU> pbo(new protobuf::IU());
// transfer obj data to pbo
pbo->set_uid(obj->uid());
pbo->set_revision(obj->revision());
pbo->set_category(obj->category());
pbo->set_payload_type(obj->payload_type());
pbo->set_owner_name(obj->owner_name());
pbo->set_committed(obj->committed());
ipaaca::protobuf::IU_AccessMode a_m;
switch(obj->access_mode()) {
case IU_ACCESS_PUSH:
a_m = ipaaca::protobuf::IU_AccessMode_PUSH;
break;
case IU_ACCESS_REMOTE:
a_m = ipaaca::protobuf::IU_AccessMode_REMOTE;
break;
case IU_ACCESS_MESSAGE:
a_m = ipaaca::protobuf::IU_AccessMode_MESSAGE;
break;
}
pbo->set_access_mode(a_m);
pbo->set_read_only(obj->read_only());
for (auto& kv: obj->_payload._document_store) {
protobuf::PayloadItem* item = pbo->add_payload();
item->set_key(kv.first);
IPAACA_DEBUG("Payload type: " << obj->_payload_type)
if (obj->_payload_type=="JSON") {
item->set_value( kv.second->to_json_string_representation() );
item->set_type("JSON");
} else if ((obj->_payload_type=="MAP") || (obj->_payload_type=="STR")) {
// legacy mode
item->set_value( json_value_cast<std::string>(kv.second->document));
item->set_type("STR");
}
}
for (LinkMap::const_iterator it=obj->_links._links.begin(); it!=obj->_links._links.end(); ++it) {
protobuf::LinkSet* links = pbo->add_links();
links->set_type(it->first);
for (std::set<std::string>::const_iterator it2=it->second.begin(); it2!=it->second.end(); ++it2) {
links->add_targets(*it2);
}
}
pbo->SerializeToString(&wire);
return wire;
}
IPAACA_EXPORT ipaaca::IUInterface::ptr IUConverter::deserialize(const std::string& wire) {
//assert(wireSchema == getWireSchema()); // "ipaaca-iu"
std::shared_ptr<protobuf::IU> pbo(new protobuf::IU());
pbo->ParseFromString(wire);
IUAccessMode mode = static_cast<IUAccessMode>(pbo->access_mode());
ipaaca::IUInterface::ptr obj;
switch(mode) {
case IU_ACCESS_PUSH:
{
// Create a "remote push IU"
auto inst = RemotePushIU::create();
inst->_access_mode = IU_ACCESS_PUSH;
obj = inst;
for (int i=0; i<pbo->payload_size(); i++) {
const protobuf::PayloadItem& it = pbo->payload(i);
PayloadDocumentEntry::ptr entry;
if (it.type() == "JSON") {
// fully parse json text
entry = PayloadDocumentEntry::from_json_string_representation( it.value() );
} else {
// assuming legacy "str" -> just copy value to raw string in document
entry = std::make_shared<PayloadDocumentEntry>();
entry->document.SetString(it.value(), entry->document.GetAllocator());
}
inst->_payload._document_store[it.key()] = entry;
}
}
break;
case IU_ACCESS_MESSAGE:
{
auto inst = RemoteMessage::create();
inst->_access_mode = IU_ACCESS_MESSAGE;
obj = inst;
for (int i=0; i<pbo->payload_size(); i++) {
const protobuf::PayloadItem& it = pbo->payload(i);
PayloadDocumentEntry::ptr entry;
if (it.type() == "JSON") {
// fully parse json text
entry = PayloadDocumentEntry::from_json_string_representation( it.value() );
} else {
// assuming legacy "str" -> just copy value to raw string in document
entry = std::make_shared<PayloadDocumentEntry>();
entry->document.SetString(it.value(), entry->document.GetAllocator());
}
inst->_payload._document_store[it.key()] = entry;
}
}
break;
default:
throw NotImplementedError();
}
// transfer pbo data to obj
obj->_uid = pbo->uid();
obj->_revision = pbo->revision();
obj->_category = pbo->category();
obj->_payload_type = pbo->payload_type();
obj->_owner_name = pbo->owner_name();
obj->_committed = pbo->committed();
obj->_read_only = pbo->read_only();
for (int i=0; i<pbo->links_size(); i++) {
const protobuf::LinkSet& pls = pbo->links(i);
LinkSet& ls = obj->_links._links[pls.type()];
for (int j=0; j<pls.targets_size(); j++) {
ls.insert(pls.targets(j));
}
}
return obj;
}
//}}}
// IUPayloadUpdateConverter//{{{
IPAACA_EXPORT std::string IUPayloadUpdateConverter::serialize(ipaaca::IUPayloadUpdate::ptr obj)
{
std::string wire;
//assert(data.first == getDataType()); // "ipaaca::IUPayloadUpdate"
std::shared_ptr<protobuf::IUPayloadUpdate> pbo(new protobuf::IUPayloadUpdate());
// transfer obj data to pbo
pbo->set_uid(obj->uid);
pbo->set_revision(obj->revision);
pbo->set_writer_name(obj->writer_name);
pbo->set_is_delta(obj->is_delta);
pbo->set_request_uid(obj->request_uid);
pbo->set_request_endpoint(obj->request_endpoint);
for (auto& kv: obj->new_items) {
protobuf::PayloadItem* item = pbo->add_new_items();
item->set_key(kv.first);
if (obj->payload_type=="JSON") {
item->set_value( kv.second->to_json_string_representation() );
item->set_type("JSON");
} else if ((obj->payload_type=="MAP") || (obj->payload_type=="STR")) {
// legacy mode
item->set_value( json_value_cast<std::string>(kv.second->document));
item->set_type("STR");
} else {
IPAACA_ERROR("Uninitialized payload update type!")
throw NotImplementedError();
}
IPAACA_DEBUG("Adding updated item (type " << item->type() << "): " << item->key() << " -> " << item->value() )
}
for (auto& key: obj->keys_to_remove) {
pbo->add_keys_to_remove(key);
IPAACA_DEBUG("Adding removed key: " << key)
}
pbo->SerializeToString(&wire);
return wire;
}
ipaaca::IUPayloadUpdate::ptr IUPayloadUpdateConverter::deserialize(const std::string& wire) {
//assert(wireSchema == getWireSchema()); // "ipaaca-iu-payload-update"
std::shared_ptr<protobuf::IUPayloadUpdate> pbo(new protobuf::IUPayloadUpdate());
pbo->ParseFromString(wire);
std::shared_ptr<IUPayloadUpdate> obj(new IUPayloadUpdate());
// transfer pbo data to obj
obj->uid = pbo->uid();
obj->revision = pbo->revision();
obj->writer_name = pbo->writer_name();
obj->is_delta = pbo->is_delta();
obj->request_uid = pbo->request_uid();
obj->request_endpoint = pbo->request_endpoint();
for (int i=0; i<pbo->new_items_size(); i++) {
const protobuf::PayloadItem& it = pbo->new_items(i);
PayloadDocumentEntry::ptr entry;
if (it.type() == "JSON") {
// fully parse json text
entry = PayloadDocumentEntry::from_json_string_representation( it.value() );
IPAACA_DEBUG("New/updated payload entry: " << it.key() << " -> " << it.value() )
} else {
// assuming legacy "str" -> just copy value to raw string in document
entry = std::make_shared<PayloadDocumentEntry>();
entry->document.SetString(it.value(), entry->document.GetAllocator());
}
obj->new_items[it.key()] = entry;
}
for (int i=0; i<pbo->keys_to_remove_size(); i++) {
obj->keys_to_remove.push_back(pbo->keys_to_remove(i));
}
return obj;
}
//}}}
// IULinkUpdateConverter//{{{
IPAACA_EXPORT std::string IULinkUpdateConverter::serialize(ipaaca::IULinkUpdate::ptr obj)
{
std::string wire;
//assert(data.first == getDataType());
std::shared_ptr<protobuf::IULinkUpdate> pbo(new protobuf::IULinkUpdate());
// transfer obj data to pbo
pbo->set_uid(obj->uid);
pbo->set_revision(obj->revision);
pbo->set_writer_name(obj->writer_name);
pbo->set_is_delta(obj->is_delta);
pbo->set_request_uid(obj->request_uid);
pbo->set_request_endpoint(obj->request_endpoint);
for (std::map<std::string, std::set<std::string> >::const_iterator it=obj->new_links.begin(); it!=obj->new_links.end(); ++it) {
protobuf::LinkSet* links = pbo->add_new_links();
links->set_type(it->first);
for (std::set<std::string>::const_iterator it2=it->second.begin(); it2!=it->second.end(); ++it2) {
links->add_targets(*it2);
}
}
for (std::map<std::string, std::set<std::string> >::const_iterator it=obj->links_to_remove.begin(); it!=obj->links_to_remove.end(); ++it) {
protobuf::LinkSet* links = pbo->add_links_to_remove();
links->set_type(it->first);
for (std::set<std::string>::const_iterator it2=it->second.begin(); it2!=it->second.end(); ++it2) {
links->add_targets(*it2);
}
}
pbo->SerializeToString(&wire);
return wire;
}
ipaaca::IULinkUpdate::ptr IULinkUpdateConverter::deserialize(const std::string& wire) {
//assert(wireSchema == getWireSchema()); // "ipaaca-iu-link-update"
std::shared_ptr<protobuf::IULinkUpdate> pbo(new protobuf::IULinkUpdate());
pbo->ParseFromString(wire);
std::shared_ptr<IULinkUpdate> obj(new IULinkUpdate());
// transfer pbo data to obj
obj->uid = pbo->uid();
obj->revision = pbo->revision();
obj->writer_name = pbo->writer_name();
obj->is_delta = pbo->is_delta();
obj->request_uid = pbo->request_uid();
obj->request_endpoint = pbo->request_endpoint();
for (int i=0; i<pbo->new_links_size(); ++i) {
const protobuf::LinkSet& it = pbo->new_links(i);
for (int j=0; j<it.targets_size(); ++j) {
obj->new_links[it.type()].insert(it.targets(j)); // = vec;
}
}
for (int i=0; i<pbo->links_to_remove_size(); ++i) {
const protobuf::LinkSet& it = pbo->links_to_remove(i);
for (int j=0; j<it.targets_size(); ++j) {
obj->links_to_remove[it.type()].insert(it.targets(j));
}
}
return obj;
}
//}}}
} // namespace converters
} // namespace ipaaca
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
* "Incremental Processing Architecture * "Incremental Processing Architecture
* for Artificial Conversational Agents". * for Artificial Conversational Agents".
* *
* Copyright (c) 2009-2015 Social Cognitive Systems Group * Copyright (c) 2009-2022 Social Cognitive Systems Group
* (formerly the Sociable Agents Group) * (formerly the Sociable Agents Group)
* CITEC, Bielefeld University * CITEC, Bielefeld University
* *
...@@ -39,10 +39,10 @@ namespace ipaaca { ...@@ -39,10 +39,10 @@ namespace ipaaca {
IPAACA_EXPORT inline FakeIU::FakeIU() { IPAACA_EXPORT inline FakeIU::FakeIU() {
IPAACA_INFO("") IPAACA_INFO("")
} }
IPAACA_EXPORT boost::shared_ptr<FakeIU> FakeIU::create() IPAACA_EXPORT std::shared_ptr<FakeIU> FakeIU::create()
{ {
IPAACA_INFO(""); IPAACA_INFO("");
auto iu = boost::shared_ptr<FakeIU>(new FakeIU()); auto iu = std::shared_ptr<FakeIU>(new FakeIU());
iu->_payload.initialize(iu); iu->_payload.initialize(iu);
return iu; return iu;
} }
......