Added pure/@safe/nothrow where possible.
This commit is contained in:
parent
37a661b034
commit
4f78702a57
|
@ -56,7 +56,7 @@ final class Composer
|
||||||
* resolver = Resolver to resolve tags (data types).
|
* resolver = Resolver to resolve tags (data types).
|
||||||
* constructor = Constructor to construct nodes.
|
* constructor = Constructor to construct nodes.
|
||||||
*/
|
*/
|
||||||
this(Parser parser, Resolver resolver, Constructor constructor)
|
this(Parser parser, Resolver resolver, Constructor constructor) @safe nothrow pure
|
||||||
{
|
{
|
||||||
parser_ = parser;
|
parser_ = parser;
|
||||||
resolver_ = resolver;
|
resolver_ = resolver;
|
||||||
|
@ -64,7 +64,7 @@ final class Composer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the composer.
|
///Destroy the composer.
|
||||||
~this()
|
pure @safe nothrow ~this()
|
||||||
{
|
{
|
||||||
parser_ = null;
|
parser_ = null;
|
||||||
resolver_ = null;
|
resolver_ = null;
|
||||||
|
@ -78,7 +78,7 @@ final class Composer
|
||||||
*
|
*
|
||||||
* Must be called before loading as it handles the stream start event.
|
* Must be called before loading as it handles the stream start event.
|
||||||
*/
|
*/
|
||||||
bool checkNode()
|
bool checkNode() @safe
|
||||||
{
|
{
|
||||||
//Drop the STREAM-START event.
|
//Drop the STREAM-START event.
|
||||||
if(parser_.checkEvent(EventID.StreamStart))
|
if(parser_.checkEvent(EventID.StreamStart))
|
||||||
|
@ -91,7 +91,7 @@ final class Composer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Get a YAML document as a node (the root of the document).
|
///Get a YAML document as a node (the root of the document).
|
||||||
Node getNode()
|
Node getNode() @safe
|
||||||
{
|
{
|
||||||
//Get the root node of the next document.
|
//Get the root node of the next document.
|
||||||
assert(!parser_.checkEvent(EventID.StreamEnd),
|
assert(!parser_.checkEvent(EventID.StreamEnd),
|
||||||
|
@ -102,7 +102,7 @@ final class Composer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Get single YAML document, throwing if there is more than one document.
|
///Get single YAML document, throwing if there is more than one document.
|
||||||
Node getSingleNode()
|
Node getSingleNode() @trusted
|
||||||
{
|
{
|
||||||
assert(!parser_.checkEvent(EventID.StreamEnd),
|
assert(!parser_.checkEvent(EventID.StreamEnd),
|
||||||
"Trying to get a node from Composer when there is no node to "
|
"Trying to get a node from Composer when there is no node to "
|
||||||
|
@ -124,7 +124,7 @@ final class Composer
|
||||||
|
|
||||||
private:
|
private:
|
||||||
///Compose a YAML document and return its root node.
|
///Compose a YAML document and return its root node.
|
||||||
Node composeDocument()
|
Node composeDocument() @trusted
|
||||||
{
|
{
|
||||||
//Drop the DOCUMENT-START event.
|
//Drop the DOCUMENT-START event.
|
||||||
parser_.getEvent();
|
parser_.getEvent();
|
||||||
|
@ -140,7 +140,7 @@ final class Composer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Compose a node.
|
///Compose a node.
|
||||||
Node composeNode()
|
Node composeNode() @system
|
||||||
{
|
{
|
||||||
if(parser_.checkEvent(EventID.Alias))
|
if(parser_.checkEvent(EventID.Alias))
|
||||||
{
|
{
|
||||||
|
@ -198,7 +198,7 @@ final class Composer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Compose a scalar node.
|
///Compose a scalar node.
|
||||||
Node composeScalarNode()
|
Node composeScalarNode() @system
|
||||||
{
|
{
|
||||||
immutable event = parser_.getEvent();
|
immutable event = parser_.getEvent();
|
||||||
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value,
|
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value,
|
||||||
|
@ -211,7 +211,7 @@ final class Composer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Compose a sequence node.
|
///Compose a sequence node.
|
||||||
Node composeSequenceNode()
|
Node composeSequenceNode() @system
|
||||||
{
|
{
|
||||||
immutable startEvent = parser_.getEvent();
|
immutable startEvent = parser_.getEvent();
|
||||||
const tag = resolver_.resolve(NodeID.Sequence, startEvent.tag, null,
|
const tag = resolver_.resolve(NodeID.Sequence, startEvent.tag, null,
|
||||||
|
@ -240,7 +240,7 @@ final class Composer
|
||||||
*
|
*
|
||||||
* Returns: Flattened mapping as pairs.
|
* Returns: Flattened mapping as pairs.
|
||||||
*/
|
*/
|
||||||
Node.Pair[] flatten(ref Node root, in Mark startMark, in Mark endMark)
|
Node.Pair[] flatten(ref Node root, in Mark startMark, in Mark endMark) @system
|
||||||
{
|
{
|
||||||
Node.Pair[] result;
|
Node.Pair[] result;
|
||||||
|
|
||||||
|
@ -250,7 +250,7 @@ final class Composer
|
||||||
throw new ConstructorException("While constructing a mapping, "
|
throw new ConstructorException("While constructing a mapping, "
|
||||||
"expected a mapping or a list of "
|
"expected a mapping or a list of "
|
||||||
"mappings for merging, but found: "
|
"mappings for merging, but found: "
|
||||||
~ node.type.toString ~
|
~ node.type.toString() ~
|
||||||
" NOTE: line/column shows topmost parent "
|
" NOTE: line/column shows topmost parent "
|
||||||
"to which the content is being merged",
|
"to which the content is being merged",
|
||||||
startMark, endMark);
|
startMark, endMark);
|
||||||
|
@ -284,7 +284,7 @@ final class Composer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Compose a mapping node.
|
///Compose a mapping node.
|
||||||
Node composeMappingNode()
|
Node composeMappingNode() @system
|
||||||
{
|
{
|
||||||
immutable startEvent = parser_.getEvent();
|
immutable startEvent = parser_.getEvent();
|
||||||
const tag = resolver_.resolve(NodeID.Mapping, startEvent.tag, null,
|
const tag = resolver_.resolve(NodeID.Mapping, startEvent.tag, null,
|
||||||
|
|
|
@ -45,6 +45,7 @@ package class ConstructorException : YAMLException
|
||||||
* end = End position of the error context.
|
* end = End position of the error context.
|
||||||
*/
|
*/
|
||||||
this(string msg, Mark start, Mark end, string file = __FILE__, int line = __LINE__)
|
this(string msg, Mark start, Mark end, string file = __FILE__, int line = __LINE__)
|
||||||
|
@safe nothrow
|
||||||
{
|
{
|
||||||
super(msg ~ "\nstart: " ~ start.toString() ~ "\nend: " ~ end.toString(),
|
super(msg ~ "\nstart: " ~ start.toString() ~ "\nend: " ~ end.toString(),
|
||||||
file, line);
|
file, line);
|
||||||
|
@ -87,7 +88,7 @@ final class Constructor
|
||||||
*
|
*
|
||||||
* Params: defaultConstructors = Use constructors for default YAML tags?
|
* Params: defaultConstructors = Use constructors for default YAML tags?
|
||||||
*/
|
*/
|
||||||
this(in bool defaultConstructors = true)
|
this(in bool defaultConstructors = true) @safe nothrow
|
||||||
{
|
{
|
||||||
if(!defaultConstructors){return;}
|
if(!defaultConstructors){return;}
|
||||||
|
|
||||||
|
@ -111,7 +112,7 @@ final class Constructor
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the constructor.
|
///Destroy the constructor.
|
||||||
~this()
|
pure @safe nothrow ~this()
|
||||||
{
|
{
|
||||||
clear(fromScalar_);
|
clear(fromScalar_);
|
||||||
fromScalar_ = null;
|
fromScalar_ = null;
|
||||||
|
@ -189,7 +190,8 @@ final class Constructor
|
||||||
* }
|
* }
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
void addConstructorScalar(T)(in string tag, T function(ref Node) ctor)
|
void addConstructorScalar(T)(const string tag, T function(ref Node) ctor)
|
||||||
|
@safe nothrow
|
||||||
{
|
{
|
||||||
const t = Tag(tag);
|
const t = Tag(tag);
|
||||||
auto deleg = addConstructor!T(t, ctor);
|
auto deleg = addConstructor!T(t, ctor);
|
||||||
|
@ -240,7 +242,8 @@ final class Constructor
|
||||||
* }
|
* }
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
void addConstructorSequence(T)(in string tag, T function(ref Node) ctor)
|
void addConstructorSequence(T)(const string tag, T function(ref Node) ctor)
|
||||||
|
@safe nothrow
|
||||||
{
|
{
|
||||||
const t = Tag(tag);
|
const t = Tag(tag);
|
||||||
auto deleg = addConstructor!T(t, ctor);
|
auto deleg = addConstructor!T(t, ctor);
|
||||||
|
@ -291,7 +294,8 @@ final class Constructor
|
||||||
* }
|
* }
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
void addConstructorMapping(T)(in string tag, T function(ref Node) ctor)
|
void addConstructorMapping(T)(const string tag, T function(ref Node) ctor)
|
||||||
|
@safe nothrow
|
||||||
{
|
{
|
||||||
const t = Tag(tag);
|
const t = Tag(tag);
|
||||||
auto deleg = addConstructor!T(t, ctor);
|
auto deleg = addConstructor!T(t, ctor);
|
||||||
|
@ -310,7 +314,8 @@ final class Constructor
|
||||||
*
|
*
|
||||||
* Returns: Constructed node.
|
* Returns: Constructed node.
|
||||||
*/
|
*/
|
||||||
Node node(T, U)(in Mark start, in Mark end, in Tag tag, T value, U style)
|
Node node(T, U)(const Mark start, const Mark end, const Tag tag,
|
||||||
|
T value, U style) @trusted
|
||||||
if((is(T : string) || is(T == Node[]) || is(T == Node.Pair[])) &&
|
if((is(T : string) || is(T == Node[]) || is(T == Node.Pair[])) &&
|
||||||
(is(U : CollectionStyle) || is(U : ScalarStyle)))
|
(is(U : CollectionStyle) || is(U : ScalarStyle)))
|
||||||
{
|
{
|
||||||
|
@ -350,7 +355,8 @@ final class Constructor
|
||||||
* Params: tag = Tag for the function to handle.
|
* Params: tag = Tag for the function to handle.
|
||||||
* ctor = Constructor function.
|
* ctor = Constructor function.
|
||||||
*/
|
*/
|
||||||
auto addConstructor(T)(in Tag tag, T function(ref Node) ctor)
|
auto addConstructor(T)(const Tag tag, T function(ref Node) ctor)
|
||||||
|
@trusted nothrow
|
||||||
{
|
{
|
||||||
assert((tag in fromScalar_) is null &&
|
assert((tag in fromScalar_) is null &&
|
||||||
(tag in fromSequence_) is null &&
|
(tag in fromSequence_) is null &&
|
||||||
|
@ -366,7 +372,7 @@ final class Constructor
|
||||||
}
|
}
|
||||||
|
|
||||||
//Get the array of constructor functions for scalar, sequence or mapping.
|
//Get the array of constructor functions for scalar, sequence or mapping.
|
||||||
auto delegates(T)()
|
auto delegates(T)() pure @safe nothrow
|
||||||
{
|
{
|
||||||
static if(is(T : string)) {return &fromScalar_;}
|
static if(is(T : string)) {return &fromScalar_;}
|
||||||
else static if(is(T : Node[])) {return &fromSequence_;}
|
else static if(is(T : Node[])) {return &fromSequence_;}
|
||||||
|
@ -852,7 +858,7 @@ struct MyStruct
|
||||||
{
|
{
|
||||||
int x, y, z;
|
int x, y, z;
|
||||||
|
|
||||||
const int opCmp(ref const MyStruct s)
|
const int opCmp(ref const MyStruct s) const pure @safe nothrow
|
||||||
{
|
{
|
||||||
if(x != s.x){return x - s.x;}
|
if(x != s.x){return x - s.x;}
|
||||||
if(y != s.y){return y - s.y;}
|
if(y != s.y){return y - s.y;}
|
||||||
|
|
|
@ -155,7 +155,7 @@ struct Dumper
|
||||||
*
|
*
|
||||||
* Throws: YAMLException if the file can not be dumped to (e.g. cannot be opened).
|
* Throws: YAMLException if the file can not be dumped to (e.g. cannot be opened).
|
||||||
*/
|
*/
|
||||||
this(string filename)
|
this(string filename) @safe
|
||||||
{
|
{
|
||||||
name_ = filename;
|
name_ = filename;
|
||||||
try{this(new File(filename, FileMode.OutNew));}
|
try{this(new File(filename, FileMode.OutNew));}
|
||||||
|
@ -167,7 +167,7 @@ struct Dumper
|
||||||
}
|
}
|
||||||
|
|
||||||
///Construct a Dumper writing to a _stream. This is useful to e.g. write to memory.
|
///Construct a Dumper writing to a _stream. This is useful to e.g. write to memory.
|
||||||
this(Stream stream)
|
this(Stream stream) pure @safe nothrow
|
||||||
{
|
{
|
||||||
resolver_ = new Resolver();
|
resolver_ = new Resolver();
|
||||||
representer_ = new Representer();
|
representer_ = new Representer();
|
||||||
|
@ -175,39 +175,39 @@ struct Dumper
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the Dumper.
|
///Destroy the Dumper.
|
||||||
~this()
|
pure @safe nothrow ~this()
|
||||||
{
|
{
|
||||||
YAMLVersion_ = null;
|
YAMLVersion_ = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set stream _name. Used in debugging messages.
|
///Set stream _name. Used in debugging messages.
|
||||||
@property void name(string name)
|
@property void name(string name) pure @safe nothrow
|
||||||
{
|
{
|
||||||
name_ = name;
|
name_ = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Specify custom Resolver to use.
|
///Specify custom Resolver to use.
|
||||||
@property void resolver(Resolver resolver)
|
@property void resolver(Resolver resolver) @trusted
|
||||||
{
|
{
|
||||||
clear(resolver_);
|
clear(resolver_);
|
||||||
resolver_ = resolver;
|
resolver_ = resolver;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Specify custom Representer to use.
|
///Specify custom Representer to use.
|
||||||
@property void representer(Representer representer)
|
@property void representer(Representer representer) @trusted
|
||||||
{
|
{
|
||||||
clear(representer_);
|
clear(representer_);
|
||||||
representer_ = representer;
|
representer_ = representer;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write scalars in _canonical form?
|
///Write scalars in _canonical form?
|
||||||
@property void canonical(bool canonical)
|
@property void canonical(bool canonical) pure @safe nothrow
|
||||||
{
|
{
|
||||||
canonical_ = canonical;
|
canonical_ = canonical;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set indentation width. 2 by default. Must not be zero.
|
///Set indentation width. 2 by default. Must not be zero.
|
||||||
@property void indent(uint indent)
|
@property void indent(uint indent) pure @safe nothrow
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(indent != 0, "Can't use zero YAML indent width");
|
assert(indent != 0, "Can't use zero YAML indent width");
|
||||||
|
@ -218,37 +218,37 @@ struct Dumper
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set preferred text _width.
|
///Set preferred text _width.
|
||||||
@property void textWidth(uint width)
|
@property void textWidth(uint width) pure @safe nothrow
|
||||||
{
|
{
|
||||||
textWidth_ = width;
|
textWidth_ = width;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set line break to use. Unix by default.
|
///Set line break to use. Unix by default.
|
||||||
@property void lineBreak(LineBreak lineBreak)
|
@property void lineBreak(LineBreak lineBreak) pure @safe nothrow
|
||||||
{
|
{
|
||||||
lineBreak_ = lineBreak;
|
lineBreak_ = lineBreak;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set character _encoding to use. UTF-8 by default.
|
///Set character _encoding to use. UTF-8 by default.
|
||||||
@property void encoding(Encoding encoding)
|
@property void encoding(Encoding encoding) pure @safe nothrow
|
||||||
{
|
{
|
||||||
encoding_ = encoding;
|
encoding_ = encoding;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Always explicitly write document start?
|
///Always explicitly write document start?
|
||||||
@property void explicitStart(bool explicit)
|
@property void explicitStart(bool explicit) pure @safe nothrow
|
||||||
{
|
{
|
||||||
explicitStart_ = explicit;
|
explicitStart_ = explicit;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Always explicitly write document end?
|
///Always explicitly write document end?
|
||||||
@property void explicitEnd(bool explicit)
|
@property void explicitEnd(bool explicit) pure @safe nothrow
|
||||||
{
|
{
|
||||||
explicitEnd_ = explicit;
|
explicitEnd_ = explicit;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Specify YAML version string. "1.1" by default.
|
///Specify YAML version string. "1.1" by default.
|
||||||
@property void YAMLVersion(string YAMLVersion)
|
@property void YAMLVersion(string YAMLVersion) pure @safe nothrow
|
||||||
{
|
{
|
||||||
YAMLVersion_ = YAMLVersion;
|
YAMLVersion_ = YAMLVersion;
|
||||||
}
|
}
|
||||||
|
@ -283,7 +283,7 @@ struct Dumper
|
||||||
* dumper.dump(Node("foo"));
|
* dumper.dump(Node("foo"));
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
@property void tagDirectives(string[string] tags)
|
@property void tagDirectives(string[string] tags) pure @trusted
|
||||||
{
|
{
|
||||||
TagDirective[] t;
|
TagDirective[] t;
|
||||||
foreach(handle, prefix; tags)
|
foreach(handle, prefix; tags)
|
||||||
|
@ -309,7 +309,7 @@ struct Dumper
|
||||||
* Throws: YAMLException on error (e.g. invalid nodes,
|
* Throws: YAMLException on error (e.g. invalid nodes,
|
||||||
* unable to write to file/stream).
|
* unable to write to file/stream).
|
||||||
*/
|
*/
|
||||||
void dump(Node[] documents ...)
|
void dump(Node[] documents ...) @trusted
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
@ -336,7 +336,7 @@ struct Dumper
|
||||||
*
|
*
|
||||||
* Throws: YAMLException if unable to emit.
|
* Throws: YAMLException if unable to emit.
|
||||||
*/
|
*/
|
||||||
void emit(Event[] events)
|
void emit(Event[] events) @system
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
|
148
dyaml/emitter.d
148
dyaml/emitter.d
|
@ -160,7 +160,7 @@ struct Emitter
|
||||||
* lineBreak = Line break character/s.
|
* lineBreak = Line break character/s.
|
||||||
*/
|
*/
|
||||||
this(Stream stream, in bool canonical, in int indent, in int width,
|
this(Stream stream, in bool canonical, in int indent, in int width,
|
||||||
in LineBreak lineBreak)
|
in LineBreak lineBreak) @trusted nothrow
|
||||||
in{assert(stream.writeable, "Can't emit YAML to a non-writable stream");}
|
in{assert(stream.writeable, "Can't emit YAML to a non-writable stream");}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
@ -178,7 +178,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the emitter.
|
///Destroy the emitter.
|
||||||
~this()
|
@trusted ~this()
|
||||||
{
|
{
|
||||||
stream_ = null;
|
stream_ = null;
|
||||||
clear(states_);
|
clear(states_);
|
||||||
|
@ -193,7 +193,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Emit an event. Throws EmitterException on error.
|
///Emit an event. Throws EmitterException on error.
|
||||||
void emit(Event event)
|
void emit(Event event) @trusted
|
||||||
{
|
{
|
||||||
events_.push(event);
|
events_.push(event);
|
||||||
while(!needMoreEvents())
|
while(!needMoreEvents())
|
||||||
|
@ -206,7 +206,7 @@ struct Emitter
|
||||||
|
|
||||||
private:
|
private:
|
||||||
///Pop and return the newest state in states_.
|
///Pop and return the newest state in states_.
|
||||||
void delegate() popState()
|
void delegate() popState() @trusted
|
||||||
{
|
{
|
||||||
enforce(states_.length > 0,
|
enforce(states_.length > 0,
|
||||||
new YAMLException("Emitter: Need to pop a state but there are no states left"));
|
new YAMLException("Emitter: Need to pop a state but there are no states left"));
|
||||||
|
@ -216,7 +216,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Pop and return the newest indent in indents_.
|
///Pop and return the newest indent in indents_.
|
||||||
int popIndent()
|
int popIndent() @trusted
|
||||||
{
|
{
|
||||||
enforce(indents_.length > 0,
|
enforce(indents_.length > 0,
|
||||||
new YAMLException("Emitter: Need to pop an indent level but there"
|
new YAMLException("Emitter: Need to pop an indent level but there"
|
||||||
|
@ -227,7 +227,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write a string to the file/stream.
|
///Write a string to the file/stream.
|
||||||
void writeString(in string str)
|
void writeString(in string str) @system
|
||||||
{
|
{
|
||||||
try final switch(encoding_)
|
try final switch(encoding_)
|
||||||
{
|
{
|
||||||
|
@ -250,11 +250,11 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///In some cases, we wait for a few next events before emitting.
|
///In some cases, we wait for a few next events before emitting.
|
||||||
bool needMoreEvents()
|
bool needMoreEvents() @trusted nothrow
|
||||||
{
|
{
|
||||||
if(events_.length == 0){return true;}
|
if(events_.length == 0){return true;}
|
||||||
|
|
||||||
immutable event = cast(immutable Event)events_.peek();
|
const event = events_.peek();
|
||||||
if(event.id == EventID.DocumentStart){return needEvents(1);}
|
if(event.id == EventID.DocumentStart){return needEvents(1);}
|
||||||
if(event.id == EventID.SequenceStart){return needEvents(2);}
|
if(event.id == EventID.SequenceStart){return needEvents(2);}
|
||||||
if(event.id == EventID.MappingStart) {return needEvents(3);}
|
if(event.id == EventID.MappingStart) {return needEvents(3);}
|
||||||
|
@ -263,7 +263,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Determines if we need specified number of more events.
|
///Determines if we need specified number of more events.
|
||||||
bool needEvents(in uint count)
|
bool needEvents(in uint count) @system nothrow
|
||||||
{
|
{
|
||||||
int level = 0;
|
int level = 0;
|
||||||
|
|
||||||
|
@ -274,7 +274,7 @@ struct Emitter
|
||||||
events_.next();
|
events_.next();
|
||||||
while(!events_.iterationOver())
|
while(!events_.iterationOver())
|
||||||
{
|
{
|
||||||
immutable event = cast(immutable Event)events_.next();
|
const event = events_.next();
|
||||||
static starts = [EventID.DocumentStart, EventID.SequenceStart, EventID.MappingStart];
|
static starts = [EventID.DocumentStart, EventID.SequenceStart, EventID.MappingStart];
|
||||||
static ends = [EventID.DocumentEnd, EventID.SequenceEnd, EventID.MappingEnd];
|
static ends = [EventID.DocumentEnd, EventID.SequenceEnd, EventID.MappingEnd];
|
||||||
if(starts.canFind(event.id)) {++level;}
|
if(starts.canFind(event.id)) {++level;}
|
||||||
|
@ -291,7 +291,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Increase indentation level.
|
///Increase indentation level.
|
||||||
void increaseIndent(in bool flow = false, in bool indentless = false)
|
void increaseIndent(in bool flow = false, in bool indentless = false) @trusted
|
||||||
{
|
{
|
||||||
indents_ ~= indent_;
|
indents_ ~= indent_;
|
||||||
if(indent_ == -1)
|
if(indent_ == -1)
|
||||||
|
@ -305,7 +305,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Determines if the type of current event is as specified. Throws if no event.
|
///Determines if the type of current event is as specified. Throws if no event.
|
||||||
bool eventTypeIs(in EventID id) const
|
bool eventTypeIs(in EventID id) const pure @trusted
|
||||||
{
|
{
|
||||||
enforce(!event_.isNull,
|
enforce(!event_.isNull,
|
||||||
new Error("Expected an event, but no event is available."));
|
new Error("Expected an event, but no event is available."));
|
||||||
|
@ -319,7 +319,7 @@ struct Emitter
|
||||||
//Stream handlers.
|
//Stream handlers.
|
||||||
|
|
||||||
///Handle start of a file/stream.
|
///Handle start of a file/stream.
|
||||||
void expectStreamStart()
|
void expectStreamStart() @trusted
|
||||||
{
|
{
|
||||||
enforce(eventTypeIs(EventID.StreamStart),
|
enforce(eventTypeIs(EventID.StreamStart),
|
||||||
new Error("Expected StreamStart, but got " ~ event_.idString));
|
new Error("Expected StreamStart, but got " ~ event_.idString));
|
||||||
|
@ -330,7 +330,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Expect nothing, throwing if we still have something.
|
///Expect nothing, throwing if we still have something.
|
||||||
void expectNothing() const
|
void expectNothing() const @trusted
|
||||||
{
|
{
|
||||||
throw new Error("Expected nothing, but got " ~ event_.idString);
|
throw new Error("Expected nothing, but got " ~ event_.idString);
|
||||||
}
|
}
|
||||||
|
@ -338,7 +338,7 @@ struct Emitter
|
||||||
//Document handlers.
|
//Document handlers.
|
||||||
|
|
||||||
///Handle start of a document.
|
///Handle start of a document.
|
||||||
void expectDocumentStart(bool first)()
|
void expectDocumentStart(bool first)() @trusted
|
||||||
{
|
{
|
||||||
enforce(eventTypeIs(EventID.DocumentStart) || eventTypeIs(EventID.StreamEnd),
|
enforce(eventTypeIs(EventID.DocumentStart) || eventTypeIs(EventID.StreamEnd),
|
||||||
new Error("Expected DocumentStart or StreamEnd, but got "
|
new Error("Expected DocumentStart or StreamEnd, but got "
|
||||||
|
@ -405,7 +405,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle end of a document.
|
///Handle end of a document.
|
||||||
void expectDocumentEnd()
|
void expectDocumentEnd() @trusted
|
||||||
{
|
{
|
||||||
enforce(eventTypeIs(EventID.DocumentEnd),
|
enforce(eventTypeIs(EventID.DocumentEnd),
|
||||||
new Error("Expected DocumentEnd, but got " ~ event_.idString));
|
new Error("Expected DocumentEnd, but got " ~ event_.idString));
|
||||||
|
@ -421,7 +421,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle the root node of a document.
|
///Handle the root node of a document.
|
||||||
void expectDocumentRoot()
|
void expectDocumentRoot() @trusted
|
||||||
{
|
{
|
||||||
states_ ~= &expectDocumentEnd;
|
states_ ~= &expectDocumentEnd;
|
||||||
expectNode(true);
|
expectNode(true);
|
||||||
|
@ -429,7 +429,7 @@ struct Emitter
|
||||||
|
|
||||||
///Handle a new node. Parameters determine context.
|
///Handle a new node. Parameters determine context.
|
||||||
void expectNode(in bool root = false, in bool sequence = false,
|
void expectNode(in bool root = false, in bool sequence = false,
|
||||||
in bool mapping = false, in bool simpleKey = false)
|
in bool mapping = false, in bool simpleKey = false) @trusted
|
||||||
{
|
{
|
||||||
rootContext_ = root;
|
rootContext_ = root;
|
||||||
sequenceContext_ = sequence;
|
sequenceContext_ = sequence;
|
||||||
|
@ -477,7 +477,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle an alias.
|
///Handle an alias.
|
||||||
void expectAlias()
|
void expectAlias() @trusted
|
||||||
{
|
{
|
||||||
enforce(!event_.anchor.isNull(), new Error("Anchor is not specified for alias"));
|
enforce(!event_.anchor.isNull(), new Error("Anchor is not specified for alias"));
|
||||||
processAnchor("*");
|
processAnchor("*");
|
||||||
|
@ -485,7 +485,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle a scalar.
|
///Handle a scalar.
|
||||||
void expectScalar()
|
void expectScalar() @trusted
|
||||||
{
|
{
|
||||||
increaseIndent(true);
|
increaseIndent(true);
|
||||||
processScalar();
|
processScalar();
|
||||||
|
@ -496,7 +496,7 @@ struct Emitter
|
||||||
//Flow sequence handlers.
|
//Flow sequence handlers.
|
||||||
|
|
||||||
///Handle a flow sequence.
|
///Handle a flow sequence.
|
||||||
void expectFlowSequence()
|
void expectFlowSequence() @trusted
|
||||||
{
|
{
|
||||||
writeIndicator("[", true, true);
|
writeIndicator("[", true, true);
|
||||||
++flowLevel_;
|
++flowLevel_;
|
||||||
|
@ -505,7 +505,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle a flow sequence item.
|
///Handle a flow sequence item.
|
||||||
void expectFlowSequenceItem(bool first)()
|
void expectFlowSequenceItem(bool first)() @trusted
|
||||||
{
|
{
|
||||||
if(event_.id == EventID.SequenceEnd)
|
if(event_.id == EventID.SequenceEnd)
|
||||||
{
|
{
|
||||||
|
@ -529,7 +529,7 @@ struct Emitter
|
||||||
//Flow mapping handlers.
|
//Flow mapping handlers.
|
||||||
|
|
||||||
///Handle a flow mapping.
|
///Handle a flow mapping.
|
||||||
void expectFlowMapping()
|
void expectFlowMapping() @trusted
|
||||||
{
|
{
|
||||||
writeIndicator("{", true, true);
|
writeIndicator("{", true, true);
|
||||||
++flowLevel_;
|
++flowLevel_;
|
||||||
|
@ -538,7 +538,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle a key in a flow mapping.
|
///Handle a key in a flow mapping.
|
||||||
void expectFlowMappingKey(bool first)()
|
void expectFlowMappingKey(bool first)() @trusted
|
||||||
{
|
{
|
||||||
if(event_.id == EventID.MappingEnd)
|
if(event_.id == EventID.MappingEnd)
|
||||||
{
|
{
|
||||||
|
@ -569,7 +569,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle a simple value in a flow mapping.
|
///Handle a simple value in a flow mapping.
|
||||||
void expectFlowMappingSimpleValue()
|
void expectFlowMappingSimpleValue() @trusted
|
||||||
{
|
{
|
||||||
writeIndicator(":", false);
|
writeIndicator(":", false);
|
||||||
states_ ~= &expectFlowMappingKey!false;
|
states_ ~= &expectFlowMappingKey!false;
|
||||||
|
@ -577,7 +577,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle a complex value in a flow mapping.
|
///Handle a complex value in a flow mapping.
|
||||||
void expectFlowMappingValue()
|
void expectFlowMappingValue() @trusted
|
||||||
{
|
{
|
||||||
if(canonical_ || column_ > bestWidth_){writeIndent();}
|
if(canonical_ || column_ > bestWidth_){writeIndent();}
|
||||||
writeIndicator(":", true);
|
writeIndicator(":", true);
|
||||||
|
@ -588,7 +588,7 @@ struct Emitter
|
||||||
//Block sequence handlers.
|
//Block sequence handlers.
|
||||||
|
|
||||||
///Handle a block sequence.
|
///Handle a block sequence.
|
||||||
void expectBlockSequence()
|
void expectBlockSequence() @safe
|
||||||
{
|
{
|
||||||
const indentless = mappingContext_ && !indentation_;
|
const indentless = mappingContext_ && !indentation_;
|
||||||
increaseIndent(false, indentless);
|
increaseIndent(false, indentless);
|
||||||
|
@ -596,7 +596,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle a block sequence item.
|
///Handle a block sequence item.
|
||||||
void expectBlockSequenceItem(bool first)()
|
void expectBlockSequenceItem(bool first)() @trusted
|
||||||
{
|
{
|
||||||
static if(!first) if(event_.id == EventID.SequenceEnd)
|
static if(!first) if(event_.id == EventID.SequenceEnd)
|
||||||
{
|
{
|
||||||
|
@ -614,14 +614,14 @@ struct Emitter
|
||||||
//Block mapping handlers.
|
//Block mapping handlers.
|
||||||
|
|
||||||
///Handle a block mapping.
|
///Handle a block mapping.
|
||||||
void expectBlockMapping()
|
void expectBlockMapping() @safe
|
||||||
{
|
{
|
||||||
increaseIndent(false);
|
increaseIndent(false);
|
||||||
state_ = &expectBlockMappingKey!true;
|
state_ = &expectBlockMappingKey!true;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle a key in a block mapping.
|
///Handle a key in a block mapping.
|
||||||
void expectBlockMappingKey(bool first)()
|
void expectBlockMappingKey(bool first)() @trusted
|
||||||
{
|
{
|
||||||
static if(!first) if(event_.id == EventID.MappingEnd)
|
static if(!first) if(event_.id == EventID.MappingEnd)
|
||||||
{
|
{
|
||||||
|
@ -644,7 +644,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle a simple value in a block mapping.
|
///Handle a simple value in a block mapping.
|
||||||
void expectBlockMappingSimpleValue()
|
void expectBlockMappingSimpleValue() @trusted
|
||||||
{
|
{
|
||||||
writeIndicator(":", false);
|
writeIndicator(":", false);
|
||||||
states_ ~= &expectBlockMappingKey!false;
|
states_ ~= &expectBlockMappingKey!false;
|
||||||
|
@ -652,7 +652,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Handle a complex value in a block mapping.
|
///Handle a complex value in a block mapping.
|
||||||
void expectBlockMappingValue()
|
void expectBlockMappingValue() @trusted
|
||||||
{
|
{
|
||||||
writeIndent();
|
writeIndent();
|
||||||
writeIndicator(":", true, false, true);
|
writeIndicator(":", true, false, true);
|
||||||
|
@ -663,35 +663,35 @@ struct Emitter
|
||||||
//Checkers.
|
//Checkers.
|
||||||
|
|
||||||
///Check if an empty sequence is next.
|
///Check if an empty sequence is next.
|
||||||
bool checkEmptySequence() const
|
bool checkEmptySequence() const @trusted pure nothrow
|
||||||
{
|
{
|
||||||
return event_.id == EventID.SequenceStart && events_.length > 0
|
return event_.id == EventID.SequenceStart && events_.length > 0
|
||||||
&& events_.peek().id == EventID.SequenceEnd;
|
&& events_.peek().id == EventID.SequenceEnd;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Check if an empty mapping is next.
|
///Check if an empty mapping is next.
|
||||||
bool checkEmptyMapping() const
|
bool checkEmptyMapping() const @trusted pure nothrow
|
||||||
{
|
{
|
||||||
return event_.id == EventID.MappingStart && events_.length > 0
|
return event_.id == EventID.MappingStart && events_.length > 0
|
||||||
&& events_.peek().id == EventID.MappingEnd;
|
&& events_.peek().id == EventID.MappingEnd;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Check if an empty document is next.
|
///Check if an empty document is next.
|
||||||
bool checkEmptyDocument() const
|
bool checkEmptyDocument() const @trusted pure nothrow
|
||||||
{
|
{
|
||||||
if(event_.id != EventID.DocumentStart || events_.length == 0)
|
if(event_.id != EventID.DocumentStart || events_.length == 0)
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
immutable event = cast(immutable Event)events_.peek();
|
const event = events_.peek();
|
||||||
const emptyScalar = event.id == EventID.Scalar && event.anchor.isNull() &&
|
const emptyScalar = event.id == EventID.Scalar && event.anchor.isNull() &&
|
||||||
event.tag.isNull() && event.implicit && event.value == "";
|
event.tag.isNull() && event.implicit && event.value == "";
|
||||||
return emptyScalar;
|
return emptyScalar;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Check if a simple key is next.
|
///Check if a simple key is next.
|
||||||
bool checkSimpleKey()
|
bool checkSimpleKey() @trusted
|
||||||
{
|
{
|
||||||
uint length = 0;
|
uint length = 0;
|
||||||
const id = event_.id;
|
const id = event_.id;
|
||||||
|
@ -730,7 +730,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Process and write a scalar.
|
///Process and write a scalar.
|
||||||
void processScalar()
|
void processScalar() @trusted
|
||||||
{
|
{
|
||||||
if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);}
|
if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);}
|
||||||
if(style_ == ScalarStyle.Invalid)
|
if(style_ == ScalarStyle.Invalid)
|
||||||
|
@ -759,7 +759,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Process and write an anchor/alias.
|
///Process and write an anchor/alias.
|
||||||
void processAnchor(in string indicator)
|
void processAnchor(in string indicator) @trusted
|
||||||
{
|
{
|
||||||
if(event_.anchor.isNull())
|
if(event_.anchor.isNull())
|
||||||
{
|
{
|
||||||
|
@ -779,7 +779,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Process and write a tag.
|
///Process and write a tag.
|
||||||
void processTag()
|
void processTag() @trusted
|
||||||
{
|
{
|
||||||
Tag tag = event_.tag;
|
Tag tag = event_.tag;
|
||||||
|
|
||||||
|
@ -814,7 +814,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Determine style to write the current scalar in.
|
///Determine style to write the current scalar in.
|
||||||
ScalarStyle chooseScalarStyle()
|
ScalarStyle chooseScalarStyle() @trusted
|
||||||
{
|
{
|
||||||
if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);}
|
if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);}
|
||||||
|
|
||||||
|
@ -857,7 +857,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Prepare YAML version string for output.
|
///Prepare YAML version string for output.
|
||||||
static string prepareVersion(in string YAMLVersion)
|
static string prepareVersion(in string YAMLVersion) @trusted
|
||||||
{
|
{
|
||||||
enforce(YAMLVersion.split(".")[0] == "1",
|
enforce(YAMLVersion.split(".")[0] == "1",
|
||||||
new Error("Unsupported YAML version: " ~ YAMLVersion));
|
new Error("Unsupported YAML version: " ~ YAMLVersion));
|
||||||
|
@ -865,7 +865,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Encode an Unicode character for tag directive and write it to writer.
|
///Encode an Unicode character for tag directive and write it to writer.
|
||||||
static void encodeChar(Writer)(ref Writer writer, in dchar c)
|
static void encodeChar(Writer)(ref Writer writer, in dchar c) @trusted
|
||||||
{
|
{
|
||||||
char[4] data;
|
char[4] data;
|
||||||
const bytes = encode(data, c);
|
const bytes = encode(data, c);
|
||||||
|
@ -877,7 +877,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Prepare tag directive handle for output.
|
///Prepare tag directive handle for output.
|
||||||
static string prepareTagHandle(in string handle)
|
static string prepareTagHandle(in string handle) @trusted
|
||||||
{
|
{
|
||||||
enforce(handle !is null && handle != "",
|
enforce(handle !is null && handle != "",
|
||||||
new Error("Tag handle must not be empty"));
|
new Error("Tag handle must not be empty"));
|
||||||
|
@ -892,7 +892,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Prepare tag directive prefix for output.
|
///Prepare tag directive prefix for output.
|
||||||
static string prepareTagPrefix(in string prefix)
|
static string prepareTagPrefix(in string prefix) @trusted
|
||||||
{
|
{
|
||||||
enforce(prefix !is null && prefix != "",
|
enforce(prefix !is null && prefix != "",
|
||||||
new Error("Tag prefix must not be empty"));
|
new Error("Tag prefix must not be empty"));
|
||||||
|
@ -923,7 +923,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Prepare tag for output.
|
///Prepare tag for output.
|
||||||
string prepareTag(in Tag tag)
|
string prepareTag(in Tag tag) @trusted
|
||||||
{
|
{
|
||||||
enforce(!tag.isNull(), new Error("Tag must not be empty"));
|
enforce(!tag.isNull(), new Error("Tag must not be empty"));
|
||||||
|
|
||||||
|
@ -970,7 +970,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Prepare anchor for output.
|
///Prepare anchor for output.
|
||||||
static string prepareAnchor(in Anchor anchor)
|
static string prepareAnchor(in Anchor anchor) @trusted
|
||||||
{
|
{
|
||||||
enforce(!anchor.isNull() && anchor.get != "",
|
enforce(!anchor.isNull() && anchor.get != "",
|
||||||
new Error("Anchor must not be empty"));
|
new Error("Anchor must not be empty"));
|
||||||
|
@ -984,7 +984,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Analyze specifed scalar and return the analysis result.
|
///Analyze specifed scalar and return the analysis result.
|
||||||
static ScalarAnalysis analyzeScalar(string scalar)
|
static ScalarAnalysis analyzeScalar(string scalar) @safe
|
||||||
{
|
{
|
||||||
ScalarAnalysis analysis;
|
ScalarAnalysis analysis;
|
||||||
analysis.flags.isNull = false;
|
analysis.flags.isNull = false;
|
||||||
|
@ -1151,7 +1151,7 @@ struct Emitter
|
||||||
//Writers.
|
//Writers.
|
||||||
|
|
||||||
///Start the YAML stream (write the unicode byte order mark).
|
///Start the YAML stream (write the unicode byte order mark).
|
||||||
void writeStreamStart()
|
void writeStreamStart() @system
|
||||||
{
|
{
|
||||||
immutable(ubyte)[] bom;
|
immutable(ubyte)[] bom;
|
||||||
//Write BOM (always, even for UTF-8)
|
//Write BOM (always, even for UTF-8)
|
||||||
|
@ -1176,11 +1176,11 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///End the YAML stream.
|
///End the YAML stream.
|
||||||
void writeStreamEnd(){stream_.flush();}
|
void writeStreamEnd() @system {stream_.flush();}
|
||||||
|
|
||||||
///Write an indicator (e.g. ":", "[", ">", etc.).
|
///Write an indicator (e.g. ":", "[", ">", etc.).
|
||||||
void writeIndicator(in string indicator, in bool needWhitespace,
|
void writeIndicator(in string indicator, in bool needWhitespace,
|
||||||
in bool whitespace = false, in bool indentation = false)
|
in bool whitespace = false, in bool indentation = false) @system
|
||||||
{
|
{
|
||||||
const bool prefixSpace = !whitespace_ && needWhitespace;
|
const bool prefixSpace = !whitespace_ && needWhitespace;
|
||||||
whitespace_ = whitespace;
|
whitespace_ = whitespace;
|
||||||
|
@ -1196,7 +1196,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write indentation.
|
///Write indentation.
|
||||||
void writeIndent()
|
void writeIndent() @system
|
||||||
{
|
{
|
||||||
const indent = indent_ == -1 ? 0 : indent_;
|
const indent = indent_ == -1 ? 0 : indent_;
|
||||||
|
|
||||||
|
@ -1222,7 +1222,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Start new line.
|
///Start new line.
|
||||||
void writeLineBreak(in string data = null)
|
void writeLineBreak(in string data = null) @system
|
||||||
{
|
{
|
||||||
whitespace_ = indentation_ = true;
|
whitespace_ = indentation_ = true;
|
||||||
++line_;
|
++line_;
|
||||||
|
@ -1231,7 +1231,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write a YAML version directive.
|
///Write a YAML version directive.
|
||||||
void writeVersionDirective(in string versionText)
|
void writeVersionDirective(in string versionText) @system
|
||||||
{
|
{
|
||||||
writeString("%YAML ");
|
writeString("%YAML ");
|
||||||
writeString(versionText);
|
writeString(versionText);
|
||||||
|
@ -1239,7 +1239,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write a tag directive.
|
///Write a tag directive.
|
||||||
void writeTagDirective(in string handle, in string prefix)
|
void writeTagDirective(in string handle, in string prefix) @system
|
||||||
{
|
{
|
||||||
writeString("%TAG ");
|
writeString("%TAG ");
|
||||||
writeString(handle);
|
writeString(handle);
|
||||||
|
@ -1290,7 +1290,7 @@ struct ScalarWriter
|
||||||
|
|
||||||
public:
|
public:
|
||||||
///Construct a ScalarWriter using emitter to output text.
|
///Construct a ScalarWriter using emitter to output text.
|
||||||
this(ref Emitter emitter, string text, in bool split = true)
|
this(ref Emitter emitter, string text, in bool split = true) @trusted nothrow
|
||||||
{
|
{
|
||||||
emitter_ = &emitter;
|
emitter_ = &emitter;
|
||||||
text_ = text;
|
text_ = text;
|
||||||
|
@ -1298,13 +1298,13 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the ScalarWriter.
|
///Destroy the ScalarWriter.
|
||||||
~this()
|
@trusted nothrow ~this()
|
||||||
{
|
{
|
||||||
text_ = null;
|
text_ = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write text as single quoted scalar.
|
///Write text as single quoted scalar.
|
||||||
void writeSingleQuoted()
|
void writeSingleQuoted() @system
|
||||||
{
|
{
|
||||||
emitter_.writeIndicator("\'", true);
|
emitter_.writeIndicator("\'", true);
|
||||||
spaces_ = breaks_ = false;
|
spaces_ = breaks_ = false;
|
||||||
|
@ -1354,7 +1354,7 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write text as double quoted scalar.
|
///Write text as double quoted scalar.
|
||||||
void writeDoubleQuoted()
|
void writeDoubleQuoted() @system
|
||||||
{
|
{
|
||||||
resetTextPosition();
|
resetTextPosition();
|
||||||
emitter_.writeIndicator("\"", true);
|
emitter_.writeIndicator("\"", true);
|
||||||
|
@ -1416,7 +1416,7 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write text as folded block scalar.
|
///Write text as folded block scalar.
|
||||||
void writeFolded()
|
void writeFolded() @system
|
||||||
{
|
{
|
||||||
initBlock('>');
|
initBlock('>');
|
||||||
bool leadingSpace = true;
|
bool leadingSpace = true;
|
||||||
|
@ -1462,7 +1462,7 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write text as literal block scalar.
|
///Write text as literal block scalar.
|
||||||
void writeLiteral()
|
void writeLiteral() @system
|
||||||
{
|
{
|
||||||
initBlock('|');
|
initBlock('|');
|
||||||
breaks_ = true;
|
breaks_ = true;
|
||||||
|
@ -1489,7 +1489,7 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write text as plain scalar.
|
///Write text as plain scalar.
|
||||||
void writePlain()
|
void writePlain() @system
|
||||||
{
|
{
|
||||||
if(emitter_.rootContext_){emitter_.openEnded_ = true;}
|
if(emitter_.rootContext_){emitter_.openEnded_ = true;}
|
||||||
if(text_ == ""){return;}
|
if(text_ == ""){return;}
|
||||||
|
@ -1536,7 +1536,7 @@ struct ScalarWriter
|
||||||
|
|
||||||
private:
|
private:
|
||||||
///Get next character and move end of the text range to it.
|
///Get next character and move end of the text range to it.
|
||||||
dchar nextChar()
|
dchar nextChar() pure @safe
|
||||||
{
|
{
|
||||||
++endChar_;
|
++endChar_;
|
||||||
endByte_ = nextEndByte_;
|
endByte_ = nextEndByte_;
|
||||||
|
@ -1552,21 +1552,21 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Get character at start of the text range.
|
///Get character at start of the text range.
|
||||||
dchar charAtStart() const
|
dchar charAtStart() const pure @safe
|
||||||
{
|
{
|
||||||
size_t idx = startByte_;
|
size_t idx = startByte_;
|
||||||
return decode(text_, idx);
|
return decode(text_, idx);
|
||||||
}
|
}
|
||||||
|
|
||||||
///Is the current line too wide?
|
///Is the current line too wide?
|
||||||
bool tooWide() const
|
bool tooWide() const pure @safe nothrow
|
||||||
{
|
{
|
||||||
return startChar_ + 1 == endChar_ &&
|
return startChar_ + 1 == endChar_ &&
|
||||||
emitter_.column_ > emitter_.bestWidth_;
|
emitter_.column_ > emitter_.bestWidth_;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Determine hints (indicators) for block scalar.
|
///Determine hints (indicators) for block scalar.
|
||||||
size_t determineBlockHints(ref char[] hints, uint bestIndent) const
|
size_t determineBlockHints(ref char[] hints, uint bestIndent) const pure @trusted
|
||||||
{
|
{
|
||||||
size_t hintsIdx = 0;
|
size_t hintsIdx = 0;
|
||||||
if(text_.length == 0){return hintsIdx;}
|
if(text_.length == 0){return hintsIdx;}
|
||||||
|
@ -1597,7 +1597,7 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Initialize for block scalar writing with specified indicator.
|
///Initialize for block scalar writing with specified indicator.
|
||||||
void initBlock(in char indicator)
|
void initBlock(in char indicator) @system
|
||||||
{
|
{
|
||||||
char[4] hints;
|
char[4] hints;
|
||||||
hints[0] = indicator;
|
hints[0] = indicator;
|
||||||
|
@ -1611,7 +1611,7 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write out the current text range.
|
///Write out the current text range.
|
||||||
void writeCurrentRange(in Flag!"UpdateColumn" updateColumn)
|
void writeCurrentRange(in Flag!"UpdateColumn" updateColumn) @system
|
||||||
{
|
{
|
||||||
emitter_.writeString(text_[startByte_ .. endByte_]);
|
emitter_.writeString(text_[startByte_ .. endByte_]);
|
||||||
if(updateColumn){emitter_.column_ += endChar_ - startChar_;}
|
if(updateColumn){emitter_.column_ += endChar_ - startChar_;}
|
||||||
|
@ -1619,7 +1619,7 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write line breaks in the text range.
|
///Write line breaks in the text range.
|
||||||
void writeLineBreaks()
|
void writeLineBreaks() @system
|
||||||
{
|
{
|
||||||
foreach(const dchar br; text_[startByte_ .. endByte_])
|
foreach(const dchar br; text_[startByte_ .. endByte_])
|
||||||
{
|
{
|
||||||
|
@ -1635,13 +1635,13 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write line break if start of the text range is a newline.
|
///Write line break if start of the text range is a newline.
|
||||||
void writeStartLineBreak()
|
void writeStartLineBreak() @system
|
||||||
{
|
{
|
||||||
if(charAtStart == '\n'){emitter_.writeLineBreak();}
|
if(charAtStart == '\n'){emitter_.writeLineBreak();}
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write indentation, optionally resetting whitespace/indentation flags.
|
///Write indentation, optionally resetting whitespace/indentation flags.
|
||||||
void writeIndent(in Flag!"ResetSpace" resetSpace)
|
void writeIndent(in Flag!"ResetSpace" resetSpace) @system
|
||||||
{
|
{
|
||||||
emitter_.writeIndent();
|
emitter_.writeIndent();
|
||||||
if(resetSpace)
|
if(resetSpace)
|
||||||
|
@ -1651,14 +1651,14 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Move start of text range to its end.
|
///Move start of text range to its end.
|
||||||
void updateRangeStart()
|
void updateRangeStart() pure @safe nothrow
|
||||||
{
|
{
|
||||||
startByte_ = endByte_;
|
startByte_ = endByte_;
|
||||||
startChar_ = endChar_;
|
startChar_ = endChar_;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Update the line breaks_ flag, optionally updating the spaces_ flag.
|
///Update the line breaks_ flag, optionally updating the spaces_ flag.
|
||||||
void updateBreaks(in dchar c, in Flag!"UpdateSpaces" updateSpaces)
|
void updateBreaks(in dchar c, in Flag!"UpdateSpaces" updateSpaces) pure @trusted
|
||||||
{
|
{
|
||||||
if(c == dcharNone){return;}
|
if(c == dcharNone){return;}
|
||||||
breaks_ = newlineSearch_.canFind(c);
|
breaks_ = newlineSearch_.canFind(c);
|
||||||
|
@ -1666,7 +1666,7 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Move to the beginning of text.
|
///Move to the beginning of text.
|
||||||
void resetTextPosition()
|
void resetTextPosition() pure @safe nothrow
|
||||||
{
|
{
|
||||||
startByte_ = endByte_ = nextEndByte_ = 0;
|
startByte_ = endByte_ = nextEndByte_ = 0;
|
||||||
startChar_ = endChar_ = -1;
|
startChar_ = endChar_ = -1;
|
||||||
|
|
|
@ -91,10 +91,10 @@ struct Event
|
||||||
CollectionStyle collectionStyle = CollectionStyle.Invalid;
|
CollectionStyle collectionStyle = CollectionStyle.Invalid;
|
||||||
|
|
||||||
///Is this a null (uninitialized) event?
|
///Is this a null (uninitialized) event?
|
||||||
@property bool isNull() const {return id == EventID.Invalid;}
|
@property bool isNull() const pure @system nothrow {return id == EventID.Invalid;}
|
||||||
|
|
||||||
///Get string representation of the token ID.
|
///Get string representation of the token ID.
|
||||||
@property string idString() const {return to!string(id);}
|
@property string idString() const @system {return to!string(id);}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -104,13 +104,14 @@ struct Event
|
||||||
* end = End position of the event in the file/stream.
|
* end = End position of the event in the file/stream.
|
||||||
* anchor = Anchor, if this is an alias event.
|
* anchor = Anchor, if this is an alias event.
|
||||||
*/
|
*/
|
||||||
Event event(EventID id)(in Mark start, in Mark end, in Anchor anchor = Anchor()) pure
|
Event event(EventID id)(const Mark start, const Mark end, const Anchor anchor = Anchor())
|
||||||
|
pure @trusted nothrow
|
||||||
{
|
{
|
||||||
Event result;
|
Event result;
|
||||||
result.startMark = start;
|
result.startMark = start;
|
||||||
result.endMark = end;
|
result.endMark = end;
|
||||||
result.anchor = anchor;
|
result.anchor = anchor;
|
||||||
result.id = id;
|
result.id = id;
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -123,19 +124,19 @@ Event event(EventID id)(in Mark start, in Mark end, in Anchor anchor = Anchor())
|
||||||
* tag = Tag of the sequence, if specified.
|
* tag = Tag of the sequence, if specified.
|
||||||
* implicit = Should the tag be implicitly resolved?
|
* implicit = Should the tag be implicitly resolved?
|
||||||
*/
|
*/
|
||||||
Event collectionStartEvent(EventID id)(in Mark start, in Mark end, in Anchor anchor,
|
Event collectionStartEvent(EventID id)
|
||||||
in Tag tag, in bool implicit,
|
(const Mark start, const Mark end, const Anchor anchor, const Tag tag,
|
||||||
in CollectionStyle style) pure
|
const bool implicit, const CollectionStyle style) pure @trusted nothrow
|
||||||
{
|
{
|
||||||
static assert(id == EventID.SequenceStart || id == EventID.SequenceEnd ||
|
static assert(id == EventID.SequenceStart || id == EventID.SequenceEnd ||
|
||||||
id == EventID.MappingStart || id == EventID.MappingEnd);
|
id == EventID.MappingStart || id == EventID.MappingEnd);
|
||||||
Event result;
|
Event result;
|
||||||
result.startMark = start;
|
result.startMark = start;
|
||||||
result.endMark = end;
|
result.endMark = end;
|
||||||
result.anchor = anchor;
|
result.anchor = anchor;
|
||||||
result.tag = tag;
|
result.tag = tag;
|
||||||
result.id = id;
|
result.id = id;
|
||||||
result.implicit = implicit;
|
result.implicit = implicit;
|
||||||
result.collectionStyle = style;
|
result.collectionStyle = style;
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -147,13 +148,14 @@ Event collectionStartEvent(EventID id)(in Mark start, in Mark end, in Anchor anc
|
||||||
* end = End position of the event in the file/stream.
|
* end = End position of the event in the file/stream.
|
||||||
* encoding = Encoding of the stream.
|
* encoding = Encoding of the stream.
|
||||||
*/
|
*/
|
||||||
Event streamStartEvent(in Mark start, in Mark end, in Encoding encoding) pure
|
Event streamStartEvent(const Mark start, const Mark end, const Encoding encoding)
|
||||||
|
pure @trusted nothrow
|
||||||
{
|
{
|
||||||
Event result;
|
Event result;
|
||||||
result.startMark = start;
|
result.startMark = start;
|
||||||
result.endMark = end;
|
result.endMark = end;
|
||||||
result.id = EventID.StreamStart;
|
result.id = EventID.StreamStart;
|
||||||
result.encoding = encoding;
|
result.encoding = encoding;
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -165,7 +167,7 @@ alias event!(EventID.MappingEnd) mappingEndEvent;
|
||||||
|
|
||||||
///Aliases for collection start events.
|
///Aliases for collection start events.
|
||||||
alias collectionStartEvent!(EventID.SequenceStart) sequenceStartEvent;
|
alias collectionStartEvent!(EventID.SequenceStart) sequenceStartEvent;
|
||||||
alias collectionStartEvent!(EventID.MappingStart) mappingStartEvent;
|
alias collectionStartEvent!(EventID.MappingStart) mappingStartEvent;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a document start event.
|
* Construct a document start event.
|
||||||
|
@ -176,16 +178,16 @@ alias collectionStartEvent!(EventID.MappingStart) mappingStartEvent;
|
||||||
* YAMLVersion = YAML version string of the document.
|
* YAMLVersion = YAML version string of the document.
|
||||||
* tagDirectives = Tag directives of the document.
|
* tagDirectives = Tag directives of the document.
|
||||||
*/
|
*/
|
||||||
Event documentStartEvent(in Mark start, in Mark end, bool explicit, string YAMLVersion,
|
Event documentStartEvent(const Mark start, const Mark end, bool explicit, string YAMLVersion,
|
||||||
TagDirective[] tagDirectives) pure
|
TagDirective[] tagDirectives) pure @trusted nothrow
|
||||||
{
|
{
|
||||||
Event result;
|
Event result;
|
||||||
result.value = YAMLVersion;
|
result.value = YAMLVersion;
|
||||||
result.startMark = start;
|
result.startMark = start;
|
||||||
result.endMark = end;
|
result.endMark = end;
|
||||||
result.id = EventID.DocumentStart;
|
result.id = EventID.DocumentStart;
|
||||||
result.explicitDocument = explicit;
|
result.explicitDocument = explicit;
|
||||||
result.tagDirectives = tagDirectives;
|
result.tagDirectives = tagDirectives;
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -196,12 +198,12 @@ Event documentStartEvent(in Mark start, in Mark end, bool explicit, string YAMLV
|
||||||
* end = End position of the event in the file/stream.
|
* end = End position of the event in the file/stream.
|
||||||
* explicit = Is this an explicit document end?
|
* explicit = Is this an explicit document end?
|
||||||
*/
|
*/
|
||||||
Event documentEndEvent(in Mark start, in Mark end, bool explicit) pure
|
Event documentEndEvent(const Mark start, const Mark end, bool explicit) pure @trusted nothrow
|
||||||
{
|
{
|
||||||
Event result;
|
Event result;
|
||||||
result.startMark = start;
|
result.startMark = start;
|
||||||
result.endMark = end;
|
result.endMark = end;
|
||||||
result.id = EventID.DocumentEnd;
|
result.id = EventID.DocumentEnd;
|
||||||
result.explicitDocument = explicit;
|
result.explicitDocument = explicit;
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -217,19 +219,19 @@ Event documentEndEvent(in Mark start, in Mark end, bool explicit) pure
|
||||||
* value = String value of the scalar.
|
* value = String value of the scalar.
|
||||||
* style = Scalar style.
|
* style = Scalar style.
|
||||||
*/
|
*/
|
||||||
Event scalarEvent(in Mark start, in Mark end, in Anchor anchor, in Tag tag,
|
Event scalarEvent(const Mark start, const Mark end, const Anchor anchor, const Tag tag,
|
||||||
in Tuple!(bool, bool) implicit, in string value,
|
const Tuple!(bool, bool) implicit, const string value,
|
||||||
in ScalarStyle style = ScalarStyle.Invalid) pure
|
const ScalarStyle style = ScalarStyle.Invalid) pure @trusted nothrow
|
||||||
{
|
{
|
||||||
Event result;
|
Event result;
|
||||||
result.value = value;
|
result.value = value;
|
||||||
result.startMark = start;
|
result.startMark = start;
|
||||||
result.endMark = end;
|
result.endMark = end;
|
||||||
result.anchor = anchor;
|
result.anchor = anchor;
|
||||||
result.tag = tag;
|
result.tag = tag;
|
||||||
result.id = EventID.Scalar;
|
result.id = EventID.Scalar;
|
||||||
result.scalarStyle = style;
|
result.scalarStyle = style;
|
||||||
result.implicit = implicit[0];
|
result.implicit = implicit[0];
|
||||||
result.implicit_2 = implicit[1];
|
result.implicit_2 = implicit[1];
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ class YAMLException : Exception
|
||||||
{
|
{
|
||||||
///Construct a YAMLException with specified message and position where it was thrown.
|
///Construct a YAMLException with specified message and position where it was thrown.
|
||||||
public this(string msg, string file = __FILE__, int line = __LINE__)
|
public this(string msg, string file = __FILE__, int line = __LINE__)
|
||||||
|
@trusted nothrow
|
||||||
{
|
{
|
||||||
super(msg, file, line);
|
super(msg, file, line);
|
||||||
}
|
}
|
||||||
|
@ -34,14 +35,14 @@ struct Mark
|
||||||
|
|
||||||
public:
|
public:
|
||||||
///Construct a Mark with specified line and column in the file.
|
///Construct a Mark with specified line and column in the file.
|
||||||
this(in uint line, in uint column)
|
this(in uint line, in uint column) pure @safe nothrow
|
||||||
{
|
{
|
||||||
line_ = cast(ushort)min(ushort.max, line);
|
line_ = cast(ushort)min(ushort.max, line);
|
||||||
column_ = cast(ushort)min(ushort.max, column);
|
column_ = cast(ushort)min(ushort.max, column);
|
||||||
}
|
}
|
||||||
|
|
||||||
///Get a string representation of the mark.
|
///Get a string representation of the mark.
|
||||||
string toString() const
|
string toString() const @trusted
|
||||||
{
|
{
|
||||||
//Line/column numbers start at zero internally, make them start at 1.
|
//Line/column numbers start at zero internally, make them start at 1.
|
||||||
string clamped(ushort v){return format(v + 1, v == ushort.max ? " or higher" : "");}
|
string clamped(ushort v){return format(v + 1, v == ushort.max ? " or higher" : "");}
|
||||||
|
@ -57,7 +58,7 @@ abstract class MarkedYAMLException : YAMLException
|
||||||
{
|
{
|
||||||
//Construct a MarkedYAMLException with specified context and problem.
|
//Construct a MarkedYAMLException with specified context and problem.
|
||||||
this(string context, Mark contextMark, string problem, Mark problemMark,
|
this(string context, Mark contextMark, string problem, Mark problemMark,
|
||||||
string file = __FILE__, int line = __LINE__)
|
string file = __FILE__, int line = __LINE__) @safe nothrow
|
||||||
{
|
{
|
||||||
const msg = context ~ '\n' ~
|
const msg = context ~ '\n' ~
|
||||||
(contextMark != problemMark ? contextMark.toString() ~ '\n' : "") ~
|
(contextMark != problemMark ? contextMark.toString() ~ '\n' : "") ~
|
||||||
|
@ -67,6 +68,7 @@ abstract class MarkedYAMLException : YAMLException
|
||||||
|
|
||||||
//Construct a MarkedYAMLException with specified problem.
|
//Construct a MarkedYAMLException with specified problem.
|
||||||
this(string problem, Mark problemMark, string file = __FILE__, int line = __LINE__)
|
this(string problem, Mark problemMark, string file = __FILE__, int line = __LINE__)
|
||||||
|
@safe nothrow
|
||||||
{
|
{
|
||||||
super(problem ~ '\n' ~ problemMark.toString(), file, line);
|
super(problem ~ '\n' ~ problemMark.toString(), file, line);
|
||||||
}
|
}
|
||||||
|
@ -76,6 +78,7 @@ abstract class MarkedYAMLException : YAMLException
|
||||||
template ExceptionCtors()
|
template ExceptionCtors()
|
||||||
{
|
{
|
||||||
public this(string msg, string file = __FILE__, int line = __LINE__)
|
public this(string msg, string file = __FILE__, int line = __LINE__)
|
||||||
|
@safe nothrow
|
||||||
{
|
{
|
||||||
super(msg, file, line);
|
super(msg, file, line);
|
||||||
}
|
}
|
||||||
|
@ -86,13 +89,14 @@ template MarkedExceptionCtors()
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
this(string context, Mark contextMark, string problem, Mark problemMark,
|
this(string context, Mark contextMark, string problem, Mark problemMark,
|
||||||
string file = __FILE__, int line = __LINE__)
|
string file = __FILE__, int line = __LINE__) @safe nothrow
|
||||||
{
|
{
|
||||||
super(context, contextMark, problem, problemMark,
|
super(context, contextMark, problem, problemMark,
|
||||||
file, line);
|
file, line);
|
||||||
}
|
}
|
||||||
|
|
||||||
this(string problem, Mark problemMark, string file = __FILE__, int line = __LINE__)
|
this(string problem, Mark problemMark, string file = __FILE__, int line = __LINE__)
|
||||||
|
@safe nothrow
|
||||||
{
|
{
|
||||||
super(problem, problemMark, file, line);
|
super(problem, problemMark, file, line);
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ template FastCharSearch(dstring chars, uint tableSize = 256)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Generate the search table and the canFind method.
|
///Generate the search table and the canFind method.
|
||||||
string searchCode(dstring chars, uint tableSize)()
|
string searchCode(dstring chars, uint tableSize)() @trusted
|
||||||
{
|
{
|
||||||
const tableSizeStr = to!string(tableSize);
|
const tableSizeStr = to!string(tableSize);
|
||||||
ubyte[tableSize] table;
|
ubyte[tableSize] table;
|
||||||
|
@ -76,14 +76,14 @@ string searchCode(dstring chars, uint tableSize)()
|
||||||
|
|
||||||
string code = tableSize ? tableCode() : "";
|
string code = tableSize ? tableCode() : "";
|
||||||
|
|
||||||
code ~= "bool canFind(in dchar c) pure\n"
|
code ~= "bool canFind(in dchar c) pure @safe nothrow\n"
|
||||||
"{\n";
|
"{\n";
|
||||||
|
|
||||||
if(tableSize)
|
if(tableSize)
|
||||||
{
|
{
|
||||||
code ~= " if(c < " ~ tableSizeStr ~ ")\n"
|
code ~= " if(c < " ~ tableSizeStr ~ ")\n"
|
||||||
" {\n"
|
" {\n"
|
||||||
" return cast(bool)table_[c];\n"
|
" return cast(immutable(bool))table_[c];\n"
|
||||||
" }\n";
|
" }\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -41,7 +41,7 @@ struct Flags(names ...) if(names.length <= 8)
|
||||||
ubyte flags_;
|
ubyte flags_;
|
||||||
|
|
||||||
///Generate a setter and a getter for each flag.
|
///Generate a setter and a getter for each flag.
|
||||||
static string flags(string[] names ...)
|
static string flags(string[] names ...) @trusted
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(names.length <= 8, "Flags struct can only hold 8 flags");
|
assert(names.length <= 8, "Flags struct can only hold 8 flags");
|
||||||
|
@ -53,14 +53,14 @@ struct Flags(names ...) if(names.length <= 8)
|
||||||
{
|
{
|
||||||
string istr = to!string(index);
|
string istr = to!string(index);
|
||||||
result ~= "\n"
|
result ~= "\n"
|
||||||
"@property bool " ~ name ~ "(bool value)\n"
|
"@property bool " ~ name ~ "(bool value) pure @safe nothrow\n"
|
||||||
"{\n"
|
"{\n"
|
||||||
" flags_ = value ? flags_ | (1 <<" ~ istr ~ ")\n"
|
" flags_ = value ? flags_ | (1 <<" ~ istr ~ ")\n"
|
||||||
" : flags_ & (0xFF ^ (1 << " ~ istr ~"));\n"
|
" : flags_ & (0xFF ^ (1 << " ~ istr ~"));\n"
|
||||||
" return value;\n"
|
" return value;\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
"\n"
|
"\n"
|
||||||
"@property bool " ~ name ~ "() const pure\n"
|
"@property bool " ~ name ~ "() const pure @safe nothrow\n"
|
||||||
"{\n"
|
"{\n"
|
||||||
" return (flags_ >> " ~ istr ~ ") & 1;\n"
|
" return (flags_ >> " ~ istr ~ ") & 1;\n"
|
||||||
"}\n";
|
"}\n";
|
||||||
|
|
|
@ -21,7 +21,7 @@ enum LineBreak
|
||||||
package:
|
package:
|
||||||
|
|
||||||
//Get line break string for specified line break.
|
//Get line break string for specified line break.
|
||||||
string lineBreak(in LineBreak b) pure
|
string lineBreak(in LineBreak b) pure @safe nothrow
|
||||||
{
|
{
|
||||||
final switch(b)
|
final switch(b)
|
||||||
{
|
{
|
||||||
|
|
|
@ -115,7 +115,7 @@ struct Loader
|
||||||
*
|
*
|
||||||
* Throws: YAMLException if the file could not be opened or read.
|
* Throws: YAMLException if the file could not be opened or read.
|
||||||
*/
|
*/
|
||||||
this(string filename)
|
this(string filename) pure @safe
|
||||||
{
|
{
|
||||||
name_ = filename;
|
name_ = filename;
|
||||||
try{this(new File(filename));}
|
try{this(new File(filename));}
|
||||||
|
@ -133,7 +133,7 @@ struct Loader
|
||||||
*
|
*
|
||||||
* Throws: YAMLException if stream could not be read.
|
* Throws: YAMLException if stream could not be read.
|
||||||
*/
|
*/
|
||||||
this(Stream stream)
|
this(Stream stream) pure @safe
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
@ -151,7 +151,7 @@ struct Loader
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the Loader.
|
///Destroy the Loader.
|
||||||
~this()
|
@trusted ~this()
|
||||||
{
|
{
|
||||||
clear(reader_);
|
clear(reader_);
|
||||||
clear(scanner_);
|
clear(scanner_);
|
||||||
|
@ -159,19 +159,19 @@ struct Loader
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set stream _name. Used in debugging messages.
|
///Set stream _name. Used in debugging messages.
|
||||||
@property void name(string name)
|
@property void name(string name) pure @safe nothrow
|
||||||
{
|
{
|
||||||
name_ = name;
|
name_ = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Specify custom Resolver to use.
|
///Specify custom Resolver to use.
|
||||||
@property void resolver(Resolver resolver)
|
@property void resolver(Resolver resolver) pure @safe nothrow
|
||||||
{
|
{
|
||||||
resolver_ = resolver;
|
resolver_ = resolver;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Specify custom Constructor to use.
|
///Specify custom Constructor to use.
|
||||||
@property void constructor(Constructor constructor)
|
@property void constructor(Constructor constructor) pure @safe nothrow
|
||||||
{
|
{
|
||||||
constructor_ = constructor;
|
constructor_ = constructor;
|
||||||
}
|
}
|
||||||
|
@ -188,7 +188,7 @@ struct Loader
|
||||||
* Throws: YAMLException if there wasn't exactly one document
|
* Throws: YAMLException if there wasn't exactly one document
|
||||||
* or on a YAML parsing error.
|
* or on a YAML parsing error.
|
||||||
*/
|
*/
|
||||||
Node load()
|
Node load() @safe
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(!done_, "Loader: Trying to load YAML twice");
|
assert(!done_, "Loader: Trying to load YAML twice");
|
||||||
|
@ -223,7 +223,7 @@ struct Loader
|
||||||
*
|
*
|
||||||
* Throws: YAMLException on a parsing error.
|
* Throws: YAMLException on a parsing error.
|
||||||
*/
|
*/
|
||||||
Node[] loadAll()
|
Node[] loadAll() @safe
|
||||||
{
|
{
|
||||||
Node[] nodes;
|
Node[] nodes;
|
||||||
foreach(ref node; this){nodes ~= node;}
|
foreach(ref node; this){nodes ~= node;}
|
||||||
|
@ -239,7 +239,7 @@ struct Loader
|
||||||
*
|
*
|
||||||
* Throws: YAMLException on a parsing error.
|
* Throws: YAMLException on a parsing error.
|
||||||
*/
|
*/
|
||||||
int opApply(int delegate(ref Node) dg)
|
int opApply(int delegate(ref Node) dg) @trusted
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(!done_, "Loader: Trying to load YAML twice");
|
assert(!done_, "Loader: Trying to load YAML twice");
|
||||||
|
@ -270,7 +270,7 @@ struct Loader
|
||||||
|
|
||||||
package:
|
package:
|
||||||
//Scan and return all tokens. Used for debugging.
|
//Scan and return all tokens. Used for debugging.
|
||||||
Token[] scan()
|
Token[] scan() @safe
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
@ -286,7 +286,7 @@ struct Loader
|
||||||
}
|
}
|
||||||
|
|
||||||
//Parse and return all events. Used for debugging.
|
//Parse and return all events. Used for debugging.
|
||||||
immutable(Event)[] parse()
|
immutable(Event)[] parse() @safe
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
|
112
dyaml/node.d
112
dyaml/node.d
|
@ -40,6 +40,7 @@ class NodeException : YAMLException
|
||||||
* start = Start position of the node.
|
* start = Start position of the node.
|
||||||
*/
|
*/
|
||||||
this(string msg, Mark start, string file = __FILE__, int line = __LINE__)
|
this(string msg, Mark start, string file = __FILE__, int line = __LINE__)
|
||||||
|
@safe nothrow
|
||||||
{
|
{
|
||||||
super(msg ~ "\nNode at: " ~ start.toString(), file, line);
|
super(msg ~ "\nNode at: " ~ start.toString(), file, line);
|
||||||
}
|
}
|
||||||
|
@ -59,7 +60,7 @@ package enum NodeID : ubyte
|
||||||
struct YAMLNull
|
struct YAMLNull
|
||||||
{
|
{
|
||||||
///Used for string conversion.
|
///Used for string conversion.
|
||||||
string toString() const {return "null";}
|
string toString() const pure @safe nothrow {return "null";}
|
||||||
}
|
}
|
||||||
|
|
||||||
//Merge YAML type, used to support "tag:yaml.org,2002:merge".
|
//Merge YAML type, used to support "tag:yaml.org,2002:merge".
|
||||||
|
@ -70,11 +71,11 @@ package abstract class YAMLObject
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
//Get type of the stored value.
|
//Get type of the stored value.
|
||||||
@property TypeInfo type() const {assert(false);}
|
@property TypeInfo type() const pure @safe nothrow {assert(false);}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
//Compare with another YAMLObject.
|
//Compare with another YAMLObject.
|
||||||
int cmp(const YAMLObject rhs) const {assert(false);};
|
int cmp(const YAMLObject rhs) const @system {assert(false);};
|
||||||
}
|
}
|
||||||
|
|
||||||
//Stores a user defined YAML data type.
|
//Stores a user defined YAML data type.
|
||||||
|
@ -86,10 +87,10 @@ package class YAMLContainer(T) if (!Node.allowed!T): YAMLObject
|
||||||
|
|
||||||
public:
|
public:
|
||||||
//Get type of the stored value.
|
//Get type of the stored value.
|
||||||
@property override TypeInfo type() const {return typeid(T);}
|
@property override TypeInfo type() const pure @safe nothrow {return typeid(T);}
|
||||||
|
|
||||||
//Get string representation of the container.
|
//Get string representation of the container.
|
||||||
override string toString()
|
override string toString() @system
|
||||||
{
|
{
|
||||||
static if(!hasMember!(T, "toString"))
|
static if(!hasMember!(T, "toString"))
|
||||||
{
|
{
|
||||||
|
@ -103,7 +104,7 @@ package class YAMLContainer(T) if (!Node.allowed!T): YAMLObject
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
//Compare with another YAMLObject.
|
//Compare with another YAMLObject.
|
||||||
override int cmp(const YAMLObject rhs) const
|
override int cmp(const YAMLObject rhs) const @system
|
||||||
{
|
{
|
||||||
const typeCmp = type.opCmp(rhs.type);
|
const typeCmp = type.opCmp(rhs.type);
|
||||||
if(typeCmp != 0){return typeCmp;}
|
if(typeCmp != 0){return typeCmp;}
|
||||||
|
@ -116,7 +117,7 @@ package class YAMLContainer(T) if (!Node.allowed!T): YAMLObject
|
||||||
|
|
||||||
private:
|
private:
|
||||||
//Construct a YAMLContainer holding specified value.
|
//Construct a YAMLContainer holding specified value.
|
||||||
this(T value){value_ = value;}
|
this(T value) @trusted {value_ = value;}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -143,7 +144,7 @@ struct Node
|
||||||
@disable int opCmp(ref Pair);
|
@disable int opCmp(ref Pair);
|
||||||
|
|
||||||
///Construct a Pair from two values. Will be converted to Nodes if needed.
|
///Construct a Pair from two values. Will be converted to Nodes if needed.
|
||||||
this(K, V)(K key, V value)
|
this(K, V)(K key, V value) @safe
|
||||||
{
|
{
|
||||||
static if(is(Unqual!K == Node)){this.key = key;}
|
static if(is(Unqual!K == Node)){this.key = key;}
|
||||||
else {this.key = Node(key);}
|
else {this.key = Node(key);}
|
||||||
|
@ -152,7 +153,7 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
///Equality test with another Pair.
|
///Equality test with another Pair.
|
||||||
bool opEquals(const ref Pair rhs) const
|
bool opEquals(const ref Pair rhs) const @safe
|
||||||
{
|
{
|
||||||
return cmp!true(rhs) == 0;
|
return cmp!true(rhs) == 0;
|
||||||
}
|
}
|
||||||
|
@ -164,7 +165,7 @@ struct Node
|
||||||
* useTag determines whether or not we consider node tags
|
* useTag determines whether or not we consider node tags
|
||||||
* in the comparison.
|
* in the comparison.
|
||||||
*/
|
*/
|
||||||
int cmp(bool useTag)(ref const(Pair) rhs) const
|
int cmp(bool useTag)(ref const(Pair) rhs) const @safe
|
||||||
{
|
{
|
||||||
const keyCmp = key.cmp!useTag(rhs.key);
|
const keyCmp = key.cmp!useTag(rhs.key);
|
||||||
return keyCmp != 0 ? keyCmp
|
return keyCmp != 0 ? keyCmp
|
||||||
|
@ -225,8 +226,8 @@ struct Node
|
||||||
* be in full form, e.g. "tag:yaml.org,2002:int", not
|
* be in full form, e.g. "tag:yaml.org,2002:int", not
|
||||||
* a shortcut, like "!!int".
|
* a shortcut, like "!!int".
|
||||||
*/
|
*/
|
||||||
this(T)(T value, in string tag = null) if (isSomeString!T ||
|
this(T)(T value, in string tag = null) @trusted
|
||||||
(!isArray!T && !isAssociativeArray!T))
|
if (isSomeString!T || (!isArray!T && !isAssociativeArray!T))
|
||||||
{
|
{
|
||||||
tag_ = Tag(tag);
|
tag_ = Tag(tag);
|
||||||
|
|
||||||
|
@ -282,7 +283,8 @@ struct Node
|
||||||
* auto set = Node([1, 2, 3, 4, 5], "tag:yaml.org,2002:set");
|
* auto set = Node([1, 2, 3, 4, 5], "tag:yaml.org,2002:set");
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
this(T)(T[] array, in string tag = null) if (!isSomeString!(T[]))
|
this(T)(T[] array, in string tag = null) @safe
|
||||||
|
if (!isSomeString!(T[]))
|
||||||
{
|
{
|
||||||
tag_ = Tag(tag);
|
tag_ = Tag(tag);
|
||||||
|
|
||||||
|
@ -346,7 +348,7 @@ struct Node
|
||||||
* auto pairs = Node([1 : "a", 2 : "b"], "tag:yaml.org,2002:pairs");
|
* auto pairs = Node([1 : "a", 2 : "b"], "tag:yaml.org,2002:pairs");
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
this(K, V)(V[K] array, in string tag = null)
|
this(K, V)(V[K] array, in string tag = null) @safe
|
||||||
{
|
{
|
||||||
tag_ = Tag(tag);
|
tag_ = Tag(tag);
|
||||||
|
|
||||||
|
@ -411,7 +413,7 @@ struct Node
|
||||||
* auto pairs = Node([1, 2], ["a", "b"], "tag:yaml.org,2002:pairs");
|
* auto pairs = Node([1, 2], ["a", "b"], "tag:yaml.org,2002:pairs");
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
this(K, V)(K[] keys, V[] values, in string tag = null)
|
this(K, V)(K[] keys, V[] values, in string tag = null) @safe
|
||||||
if(!(isSomeString!(K[]) || isSomeString!(V[])))
|
if(!(isSomeString!(K[]) || isSomeString!(V[])))
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
|
@ -445,25 +447,25 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
///Is this node valid (initialized)?
|
///Is this node valid (initialized)?
|
||||||
@property bool isValid() const {return value_.hasValue;}
|
@property bool isValid() const @safe {return value_.hasValue;}
|
||||||
|
|
||||||
///Is this node a scalar value?
|
///Is this node a scalar value?
|
||||||
@property bool isScalar() const {return !(isMapping || isSequence);}
|
@property bool isScalar() const @safe {return !(isMapping || isSequence);}
|
||||||
|
|
||||||
///Is this node a sequence?
|
///Is this node a sequence?
|
||||||
@property bool isSequence() const {return isType!(Node[]);}
|
@property bool isSequence() const @safe {return isType!(Node[]);}
|
||||||
|
|
||||||
///Is this node a mapping?
|
///Is this node a mapping?
|
||||||
@property bool isMapping() const {return isType!(Pair[]);}
|
@property bool isMapping() const @safe {return isType!(Pair[]);}
|
||||||
|
|
||||||
///Is this node a user defined type?
|
///Is this node a user defined type?
|
||||||
@property bool isUserType() const {return isType!YAMLObject;}
|
@property bool isUserType() const @safe {return isType!YAMLObject;}
|
||||||
|
|
||||||
///Is this node null?
|
///Is this node null?
|
||||||
@property bool isNull() const {return isType!YAMLNull;}
|
@property bool isNull() const @safe {return isType!YAMLNull;}
|
||||||
|
|
||||||
///Return tag of the node.
|
///Return tag of the node.
|
||||||
@property string tag() const {return tag_.get;}
|
@property string tag() const @safe nothrow {return tag_.get;}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Equality test.
|
* Equality test.
|
||||||
|
@ -489,7 +491,7 @@ struct Node
|
||||||
*
|
*
|
||||||
* Returns: true if equal, false otherwise.
|
* Returns: true if equal, false otherwise.
|
||||||
*/
|
*/
|
||||||
bool opEquals(T)(const auto ref T rhs) const
|
bool opEquals(T)(const auto ref T rhs) const @safe
|
||||||
{
|
{
|
||||||
return equals!true(rhs);
|
return equals!true(rhs);
|
||||||
}
|
}
|
||||||
|
@ -553,7 +555,7 @@ struct Node
|
||||||
* the value is out of range of requested type.
|
* the value is out of range of requested type.
|
||||||
*/
|
*/
|
||||||
@property T get(T, Flag!"stringConversion" stringConversion = Yes.stringConversion)()
|
@property T get(T, Flag!"stringConversion" stringConversion = Yes.stringConversion)()
|
||||||
if(!is(T == const))
|
@trusted if(!is(T == const))
|
||||||
{
|
{
|
||||||
if(isType!T){return value_.get!T;}
|
if(isType!T){return value_.get!T;}
|
||||||
|
|
||||||
|
@ -623,7 +625,7 @@ struct Node
|
||||||
|
|
||||||
///Ditto.
|
///Ditto.
|
||||||
@property T get(T, Flag!"stringConversion" stringConversion = Yes.stringConversion)() const
|
@property T get(T, Flag!"stringConversion" stringConversion = Yes.stringConversion)() const
|
||||||
if(is(T == const))
|
@trusted if(is(T == const))
|
||||||
{
|
{
|
||||||
if(isType!(Unqual!T)){return value_.get!T;}
|
if(isType!(Unqual!T)){return value_.get!T;}
|
||||||
|
|
||||||
|
@ -695,7 +697,7 @@ struct Node
|
||||||
*
|
*
|
||||||
* Throws: NodeException if this is not a sequence nor a mapping.
|
* Throws: NodeException if this is not a sequence nor a mapping.
|
||||||
*/
|
*/
|
||||||
@property size_t length() const
|
@property size_t length() const @safe
|
||||||
{
|
{
|
||||||
if(isSequence) {return value_.get!(const Node[]).length;}
|
if(isSequence) {return value_.get!(const Node[]).length;}
|
||||||
else if(isMapping){return value_.get!(const Pair[]).length;}
|
else if(isMapping){return value_.get!(const Pair[]).length;}
|
||||||
|
@ -722,7 +724,7 @@ struct Node
|
||||||
* non-integral index is used with a sequence or the node is
|
* non-integral index is used with a sequence or the node is
|
||||||
* not a collection.
|
* not a collection.
|
||||||
*/
|
*/
|
||||||
ref Node opIndex(T)(T index)
|
ref Node opIndex(T)(T index) @trusted
|
||||||
{
|
{
|
||||||
if(isSequence)
|
if(isSequence)
|
||||||
{
|
{
|
||||||
|
@ -795,7 +797,7 @@ struct Node
|
||||||
*
|
*
|
||||||
* Throws: NodeException if the node is not a collection.
|
* Throws: NodeException if the node is not a collection.
|
||||||
*/
|
*/
|
||||||
bool contains(T)(T rhs) const
|
bool contains(T)(T rhs) const @safe
|
||||||
{
|
{
|
||||||
return contains_!(T, No.key, "contains")(rhs);
|
return contains_!(T, No.key, "contains")(rhs);
|
||||||
}
|
}
|
||||||
|
@ -815,7 +817,7 @@ struct Node
|
||||||
*
|
*
|
||||||
* Throws: NodeException if the node is not a mapping.
|
* Throws: NodeException if the node is not a mapping.
|
||||||
*/
|
*/
|
||||||
bool containsKey(T)(T rhs) const
|
bool containsKey(T)(T rhs) const @safe
|
||||||
{
|
{
|
||||||
return contains_!(T, Yes.key, "containsKey")(rhs);
|
return contains_!(T, Yes.key, "containsKey")(rhs);
|
||||||
}
|
}
|
||||||
|
@ -895,7 +897,7 @@ struct Node
|
||||||
* Throws: NodeException if the node is not a collection, index is out
|
* Throws: NodeException if the node is not a collection, index is out
|
||||||
* of range or if a non-integral index is used on a sequence node.
|
* of range or if a non-integral index is used on a sequence node.
|
||||||
*/
|
*/
|
||||||
void opIndexAssign(K, V)(V value, K index)
|
void opIndexAssign(K, V)(V value, K index) @safe
|
||||||
{
|
{
|
||||||
if(isSequence())
|
if(isSequence())
|
||||||
{
|
{
|
||||||
|
@ -968,7 +970,7 @@ struct Node
|
||||||
* Throws: NodeException if the node is not a sequence or an
|
* Throws: NodeException if the node is not a sequence or an
|
||||||
* element could not be converted to specified type.
|
* element could not be converted to specified type.
|
||||||
*/
|
*/
|
||||||
int opApply(T)(int delegate(ref T) dg)
|
int opApply(T)(int delegate(ref T) dg) @trusted
|
||||||
{
|
{
|
||||||
enforce(isSequence,
|
enforce(isSequence,
|
||||||
new Error("Trying to sequence-foreach over a " ~ nodeTypeString ~ "node",
|
new Error("Trying to sequence-foreach over a " ~ nodeTypeString ~ "node",
|
||||||
|
@ -1025,7 +1027,7 @@ struct Node
|
||||||
* Throws: NodeException if the node is not a mapping or an
|
* Throws: NodeException if the node is not a mapping or an
|
||||||
* element could not be converted to specified type.
|
* element could not be converted to specified type.
|
||||||
*/
|
*/
|
||||||
int opApply(K, V)(int delegate(ref K, ref V) dg)
|
int opApply(K, V)(int delegate(ref K, ref V) dg) @trusted
|
||||||
{
|
{
|
||||||
enforce(isMapping,
|
enforce(isMapping,
|
||||||
new Error("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node",
|
new Error("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node",
|
||||||
|
@ -1125,7 +1127,7 @@ struct Node
|
||||||
*
|
*
|
||||||
* Params: value = Value to _add to the sequence.
|
* Params: value = Value to _add to the sequence.
|
||||||
*/
|
*/
|
||||||
void add(T)(T value)
|
void add(T)(T value) @safe
|
||||||
{
|
{
|
||||||
enforce(isSequence(),
|
enforce(isSequence(),
|
||||||
new Error("Trying to add an element to a " ~ nodeTypeString ~ " node", startMark_));
|
new Error("Trying to add an element to a " ~ nodeTypeString ~ " node", startMark_));
|
||||||
|
@ -1162,7 +1164,7 @@ struct Node
|
||||||
* Params: key = Key to _add.
|
* Params: key = Key to _add.
|
||||||
* value = Value to _add.
|
* value = Value to _add.
|
||||||
*/
|
*/
|
||||||
void add(K, V)(K key, V value)
|
void add(K, V)(K key, V value) @safe
|
||||||
{
|
{
|
||||||
enforce(isMapping(),
|
enforce(isMapping(),
|
||||||
new Error("Trying to add a key-value pair to a " ~
|
new Error("Trying to add a key-value pair to a " ~
|
||||||
|
@ -1196,7 +1198,7 @@ struct Node
|
||||||
*
|
*
|
||||||
* Throws: NodeException if the node is not a collection.
|
* Throws: NodeException if the node is not a collection.
|
||||||
*/
|
*/
|
||||||
void remove(T)(T rhs)
|
void remove(T)(T rhs) @trusted
|
||||||
{
|
{
|
||||||
remove_!(T, No.key, "remove")(rhs);
|
remove_!(T, No.key, "remove")(rhs);
|
||||||
}
|
}
|
||||||
|
@ -1245,7 +1247,7 @@ struct Node
|
||||||
* Throws: NodeException if the node is not a collection, index is out
|
* Throws: NodeException if the node is not a collection, index is out
|
||||||
* of range or if a non-integral index is used on a sequence node.
|
* of range or if a non-integral index is used on a sequence node.
|
||||||
*/
|
*/
|
||||||
void removeAt(T)(T index)
|
void removeAt(T)(T index) @trusted
|
||||||
{
|
{
|
||||||
remove_!(T, Yes.key, "removeAt")(index);
|
remove_!(T, Yes.key, "removeAt")(index);
|
||||||
}
|
}
|
||||||
|
@ -1274,7 +1276,7 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
///Compare with another _node.
|
///Compare with another _node.
|
||||||
int opCmp(ref const Node node) const
|
int opCmp(ref const Node node) const @safe
|
||||||
{
|
{
|
||||||
return cmp!true(node);
|
return cmp!true(node);
|
||||||
}
|
}
|
||||||
|
@ -1314,9 +1316,9 @@ struct Node
|
||||||
*
|
*
|
||||||
* Returns: Constructed node.
|
* Returns: Constructed node.
|
||||||
*/
|
*/
|
||||||
static Node rawNode(Value value, in Mark startMark, in Tag tag,
|
static Node rawNode(Value value, const Mark startMark, const Tag tag,
|
||||||
in ScalarStyle scalarStyle,
|
in ScalarStyle scalarStyle,
|
||||||
in CollectionStyle collectionStyle)
|
in CollectionStyle collectionStyle) @safe
|
||||||
{
|
{
|
||||||
Node node;
|
Node node;
|
||||||
node.value_ = value;
|
node.value_ = value;
|
||||||
|
@ -1329,7 +1331,7 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
//Construct Node.Value from user defined type.
|
//Construct Node.Value from user defined type.
|
||||||
static Value userValue(T)(T value)
|
static Value userValue(T)(T value) @trusted
|
||||||
{
|
{
|
||||||
return Value(cast(YAMLObject)new YAMLContainer!T(value));
|
return Value(cast(YAMLObject)new YAMLContainer!T(value));
|
||||||
}
|
}
|
||||||
|
@ -1339,7 +1341,7 @@ struct Node
|
||||||
*
|
*
|
||||||
* useTag determines whether or not to consider tags in node-node comparisons.
|
* useTag determines whether or not to consider tags in node-node comparisons.
|
||||||
*/
|
*/
|
||||||
bool equals(bool useTag, T)(ref T rhs) const
|
bool equals(bool useTag, T)(ref T rhs) const @safe
|
||||||
{
|
{
|
||||||
static if(is(Unqual!T == Node))
|
static if(is(Unqual!T == Node))
|
||||||
{
|
{
|
||||||
|
@ -1371,7 +1373,7 @@ struct Node
|
||||||
*
|
*
|
||||||
* useTag determines whether or not to consider tags in the comparison.
|
* useTag determines whether or not to consider tags in the comparison.
|
||||||
*/
|
*/
|
||||||
int cmp(bool useTag)(const ref Node rhs) const
|
int cmp(bool useTag)(const ref Node rhs) const @trusted
|
||||||
{
|
{
|
||||||
//Compare tags - if equal or both null, we need to compare further.
|
//Compare tags - if equal or both null, we need to compare further.
|
||||||
static if(useTag)
|
static if(useTag)
|
||||||
|
@ -1484,7 +1486,7 @@ struct Node
|
||||||
*
|
*
|
||||||
* Returns: String representing the node tree.
|
* Returns: String representing the node tree.
|
||||||
*/
|
*/
|
||||||
@property string debugString(uint level = 0)
|
@property string debugString(uint level = 0) @trusted
|
||||||
{
|
{
|
||||||
string indent;
|
string indent;
|
||||||
foreach(i; 0 .. level){indent ~= " ";}
|
foreach(i; 0 .. level){indent ~= " ";}
|
||||||
|
@ -1520,14 +1522,14 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
//Get type of the node value (YAMLObject for user types).
|
//Get type of the node value (YAMLObject for user types).
|
||||||
@property TypeInfo type() const {return value_.type;}
|
@property TypeInfo type() const @safe {return value_.type;}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Determine if the value stored by the node is of specified type.
|
* Determine if the value stored by the node is of specified type.
|
||||||
*
|
*
|
||||||
* This only works for default YAML types, not for user defined types.
|
* This only works for default YAML types, not for user defined types.
|
||||||
*/
|
*/
|
||||||
@property bool isType(T)() const {return value_.type is typeid(Unqual!T);}
|
@property bool isType(T)() const @safe {return value_.type is typeid(Unqual!T);}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
//Is the value a bool?
|
//Is the value a bool?
|
||||||
|
@ -1549,13 +1551,13 @@ struct Node
|
||||||
alias isType!SysTime isTime;
|
alias isType!SysTime isTime;
|
||||||
|
|
||||||
//Does given node have the same type as this node?
|
//Does given node have the same type as this node?
|
||||||
bool hasEqualType(const ref Node node) const
|
bool hasEqualType(const ref Node node) const @safe
|
||||||
{
|
{
|
||||||
return value_.type is node.value_.type;
|
return value_.type is node.value_.type;
|
||||||
}
|
}
|
||||||
|
|
||||||
//Return a string describing node type (sequence, mapping or scalar)
|
//Return a string describing node type (sequence, mapping or scalar)
|
||||||
@property string nodeTypeString() const
|
@property string nodeTypeString() const @safe
|
||||||
{
|
{
|
||||||
assert(isScalar || isSequence || isMapping, "Unknown node type");
|
assert(isScalar || isSequence || isMapping, "Unknown node type");
|
||||||
return isScalar ? "scalar" :
|
return isScalar ? "scalar" :
|
||||||
|
@ -1564,7 +1566,7 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
//Determine if the value can be converted to specified type.
|
//Determine if the value can be converted to specified type.
|
||||||
bool convertsTo(T)() const
|
bool convertsTo(T)() const @safe
|
||||||
{
|
{
|
||||||
if(isType!T){return true;}
|
if(isType!T){return true;}
|
||||||
|
|
||||||
|
@ -1576,7 +1578,7 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
//Implementation of contains() and containsKey().
|
//Implementation of contains() and containsKey().
|
||||||
bool contains_(T, Flag!"key" key, string func)(T rhs) const
|
bool contains_(T, Flag!"key" key, string func)(T rhs) const @safe
|
||||||
{
|
{
|
||||||
static if(!key) if(isSequence)
|
static if(!key) if(isSequence)
|
||||||
{
|
{
|
||||||
|
@ -1597,7 +1599,7 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
//Implementation of remove() and removeAt()
|
//Implementation of remove() and removeAt()
|
||||||
void remove_(T, Flag!"key" key, string func)(T rhs)
|
void remove_(T, Flag!"key" key, string func)(T rhs) @system
|
||||||
{
|
{
|
||||||
enforce(isSequence || isMapping,
|
enforce(isSequence || isMapping,
|
||||||
new Error("Trying to " ~ func ~ "() from a " ~ nodeTypeString ~ " node",
|
new Error("Trying to " ~ func ~ "() from a " ~ nodeTypeString ~ " node",
|
||||||
|
@ -1641,7 +1643,7 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
//Get index of pair with key (or value, if value is true) matching index.
|
//Get index of pair with key (or value, if value is true) matching index.
|
||||||
sizediff_t findPair(T, bool value = false)(const ref T index) const
|
sizediff_t findPair(T, bool value = false)(const ref T index) const @safe
|
||||||
{
|
{
|
||||||
const pairs = value_.get!(const Pair[])();
|
const pairs = value_.get!(const Pair[])();
|
||||||
const(Node)* node;
|
const(Node)* node;
|
||||||
|
@ -1664,7 +1666,7 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
//Check if index is integral and in range.
|
//Check if index is integral and in range.
|
||||||
void checkSequenceIndex(T)(T index) const
|
void checkSequenceIndex(T)(T index) const @trusted
|
||||||
{
|
{
|
||||||
assert(isSequence,
|
assert(isSequence,
|
||||||
"checkSequenceIndex() called on a " ~ nodeTypeString ~ " node");
|
"checkSequenceIndex() called on a " ~ nodeTypeString ~ " node");
|
||||||
|
@ -1682,7 +1684,7 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
//Const version of opIndex.
|
//Const version of opIndex.
|
||||||
ref const(Node) indexConst(T)(T index) const
|
ref const(Node) indexConst(T)(T index) const @safe
|
||||||
{
|
{
|
||||||
if(isSequence)
|
if(isSequence)
|
||||||
{
|
{
|
||||||
|
@ -1718,7 +1720,7 @@ package:
|
||||||
* Params: pairs = Array of pairs to merge into.
|
* Params: pairs = Array of pairs to merge into.
|
||||||
* toMerge = Pair to merge.
|
* toMerge = Pair to merge.
|
||||||
*/
|
*/
|
||||||
void merge(ref Node.Pair[] pairs, ref Node.Pair toMerge)
|
void merge(ref Node.Pair[] pairs, ref Node.Pair toMerge) @safe
|
||||||
{
|
{
|
||||||
foreach(ref pair; pairs)
|
foreach(ref pair; pairs)
|
||||||
{
|
{
|
||||||
|
@ -1736,7 +1738,7 @@ void merge(ref Node.Pair[] pairs, ref Node.Pair toMerge)
|
||||||
* Params: pairs = Array of pairs to merge into.
|
* Params: pairs = Array of pairs to merge into.
|
||||||
* toMerge = Pairs to merge.
|
* toMerge = Pairs to merge.
|
||||||
*/
|
*/
|
||||||
void merge(ref Node.Pair[] pairs, Node.Pair[] toMerge)
|
void merge(ref Node.Pair[] pairs, Node.Pair[] toMerge) @safe
|
||||||
{
|
{
|
||||||
bool eq(ref Node.Pair a, ref Node.Pair b){return a.key == b.key;}
|
bool eq(ref Node.Pair a, ref Node.Pair b){return a.key == b.key;}
|
||||||
|
|
||||||
|
|
|
@ -135,7 +135,7 @@ final class Parser
|
||||||
|
|
||||||
public:
|
public:
|
||||||
///Construct a Parser using specified Scanner.
|
///Construct a Parser using specified Scanner.
|
||||||
this(Scanner scanner)
|
this(Scanner scanner) @trusted nothrow
|
||||||
{
|
{
|
||||||
state_ = &parseStreamStart;
|
state_ = &parseStreamStart;
|
||||||
scanner_ = scanner;
|
scanner_ = scanner;
|
||||||
|
@ -144,7 +144,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the parser.
|
///Destroy the parser.
|
||||||
~this()
|
@trusted ~this()
|
||||||
{
|
{
|
||||||
clear(currentEvent_);
|
clear(currentEvent_);
|
||||||
clear(tagDirectives_);
|
clear(tagDirectives_);
|
||||||
|
@ -164,7 +164,7 @@ final class Parser
|
||||||
* or if there are any events left if no types specified.
|
* or if there are any events left if no types specified.
|
||||||
* false otherwise.
|
* false otherwise.
|
||||||
*/
|
*/
|
||||||
bool checkEvent(EventID[] ids...)
|
bool checkEvent(EventID[] ids...) @trusted
|
||||||
{
|
{
|
||||||
//Check if the next event is one of specified types.
|
//Check if the next event is one of specified types.
|
||||||
if(currentEvent_.isNull && state_ !is null)
|
if(currentEvent_.isNull && state_ !is null)
|
||||||
|
@ -193,7 +193,7 @@ final class Parser
|
||||||
*
|
*
|
||||||
* Must not be called if there are no events left.
|
* Must not be called if there are no events left.
|
||||||
*/
|
*/
|
||||||
immutable(Event) peekEvent()
|
immutable(Event) peekEvent() @trusted
|
||||||
{
|
{
|
||||||
if(currentEvent_.isNull && state_ !is null)
|
if(currentEvent_.isNull && state_ !is null)
|
||||||
{
|
{
|
||||||
|
@ -208,7 +208,7 @@ final class Parser
|
||||||
*
|
*
|
||||||
* Must not be called if there are no events left.
|
* Must not be called if there are no events left.
|
||||||
*/
|
*/
|
||||||
immutable(Event) getEvent()
|
immutable(Event) getEvent() @trusted
|
||||||
{
|
{
|
||||||
//Get the next event and proceed further.
|
//Get the next event and proceed further.
|
||||||
if(currentEvent_.isNull && state_ !is null)
|
if(currentEvent_.isNull && state_ !is null)
|
||||||
|
@ -227,7 +227,7 @@ final class Parser
|
||||||
|
|
||||||
private:
|
private:
|
||||||
///Pop and return the newest state in states_.
|
///Pop and return the newest state in states_.
|
||||||
Event delegate() popState()
|
Event delegate() popState() @trusted
|
||||||
{
|
{
|
||||||
enforce(states_.length > 0,
|
enforce(states_.length > 0,
|
||||||
new YAMLException("Parser: Need to pop state but no states left to pop"));
|
new YAMLException("Parser: Need to pop state but no states left to pop"));
|
||||||
|
@ -237,7 +237,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Pop and return the newest mark in marks_.
|
///Pop and return the newest mark in marks_.
|
||||||
Mark popMark()
|
Mark popMark() @trusted
|
||||||
{
|
{
|
||||||
enforce(marks_.length > 0,
|
enforce(marks_.length > 0,
|
||||||
new YAMLException("Parser: Need to pop mark but no marks left to pop"));
|
new YAMLException("Parser: Need to pop mark but no marks left to pop"));
|
||||||
|
@ -253,7 +253,7 @@ final class Parser
|
||||||
*/
|
*/
|
||||||
|
|
||||||
///Parse stream start.
|
///Parse stream start.
|
||||||
Event parseStreamStart()
|
Event parseStreamStart() @safe
|
||||||
{
|
{
|
||||||
immutable token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
state_ = &parseImplicitDocumentStart;
|
state_ = &parseImplicitDocumentStart;
|
||||||
|
@ -261,7 +261,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse implicit document start, unless explicit is detected: if so, parse explicit.
|
///Parse implicit document start, unless explicit is detected: if so, parse explicit.
|
||||||
Event parseImplicitDocumentStart()
|
Event parseImplicitDocumentStart() @trusted
|
||||||
{
|
{
|
||||||
//Parse an implicit document.
|
//Parse an implicit document.
|
||||||
if(!scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
|
if(!scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
|
||||||
|
@ -279,7 +279,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse explicit document start.
|
///Parse explicit document start.
|
||||||
Event parseDocumentStart()
|
Event parseDocumentStart() @trusted
|
||||||
{
|
{
|
||||||
//Parse any extra document end indicators.
|
//Parse any extra document end indicators.
|
||||||
while(scanner_.checkToken(TokenID.DocumentEnd)){scanner_.getToken();}
|
while(scanner_.checkToken(TokenID.DocumentEnd)){scanner_.getToken();}
|
||||||
|
@ -312,7 +312,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse document end (explicit or implicit).
|
///Parse document end (explicit or implicit).
|
||||||
Event parseDocumentEnd()
|
Event parseDocumentEnd() @safe
|
||||||
{
|
{
|
||||||
Mark startMark = scanner_.peekToken().startMark;
|
Mark startMark = scanner_.peekToken().startMark;
|
||||||
const bool explicit = scanner_.checkToken(TokenID.DocumentEnd);
|
const bool explicit = scanner_.checkToken(TokenID.DocumentEnd);
|
||||||
|
@ -324,7 +324,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse document content.
|
///Parse document content.
|
||||||
Event parseDocumentContent()
|
Event parseDocumentContent() @safe
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
|
if(scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
|
||||||
TokenID.DocumentEnd, TokenID.StreamEnd))
|
TokenID.DocumentEnd, TokenID.StreamEnd))
|
||||||
|
@ -336,7 +336,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Process directives at the beginning of a document.
|
///Process directives at the beginning of a document.
|
||||||
TagDirective[] processDirectives()
|
TagDirective[] processDirectives() @system
|
||||||
{
|
{
|
||||||
//Destroy version and tag handles from previous document.
|
//Destroy version and tag handles from previous document.
|
||||||
YAMLVersion_ = null;
|
YAMLVersion_ = null;
|
||||||
|
@ -411,7 +411,7 @@ final class Parser
|
||||||
*/
|
*/
|
||||||
|
|
||||||
///Parse a node.
|
///Parse a node.
|
||||||
Event parseNode(in bool block, in bool indentlessSequence = false)
|
Event parseNode(in bool block, in bool indentlessSequence = false) @safe
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.Alias))
|
if(scanner_.checkToken(TokenID.Alias))
|
||||||
{
|
{
|
||||||
|
@ -527,7 +527,8 @@ final class Parser
|
||||||
* startMark = Position of the node the tag belongs to.
|
* startMark = Position of the node the tag belongs to.
|
||||||
* tagMark = Position of the tag.
|
* tagMark = Position of the tag.
|
||||||
*/
|
*/
|
||||||
string processTag(in string tag, in Mark startMark, in Mark tagMark)
|
string processTag(const string tag, const Mark startMark, const Mark tagMark)
|
||||||
|
const @trusted
|
||||||
{
|
{
|
||||||
//Tag handle and suffix are separated by '\0'.
|
//Tag handle and suffix are separated by '\0'.
|
||||||
const parts = tag.split("\0");
|
const parts = tag.split("\0");
|
||||||
|
@ -556,14 +557,14 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Wrappers to parse nodes.
|
///Wrappers to parse nodes.
|
||||||
Event parseBlockNode(){return parseNode(true);}
|
Event parseBlockNode() @safe {return parseNode(true);}
|
||||||
Event parseFlowNode(){return parseNode(false);}
|
Event parseFlowNode() @safe {return parseNode(false);}
|
||||||
Event parseBlockNodeOrIndentlessSequence(){return parseNode(true, true);}
|
Event parseBlockNodeOrIndentlessSequence() @safe {return parseNode(true, true);}
|
||||||
|
|
||||||
///block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
///block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
||||||
|
|
||||||
///Parse an entry of a block sequence. If first is true, this is the first entry.
|
///Parse an entry of a block sequence. If first is true, this is the first entry.
|
||||||
Event parseBlockSequenceEntry(bool first)()
|
Event parseBlockSequenceEntry(bool first)() @trusted
|
||||||
{
|
{
|
||||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||||
|
|
||||||
|
@ -597,7 +598,7 @@ final class Parser
|
||||||
///indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
///indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||||
|
|
||||||
///Parse an entry of an indentless sequence.
|
///Parse an entry of an indentless sequence.
|
||||||
Event parseIndentlessSequenceEntry()
|
Event parseIndentlessSequenceEntry() @trusted
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.BlockEntry))
|
if(scanner_.checkToken(TokenID.BlockEntry))
|
||||||
{
|
{
|
||||||
|
@ -627,7 +628,7 @@ final class Parser
|
||||||
*/
|
*/
|
||||||
|
|
||||||
///Parse a key in a block mapping. If first is true, this is the first key.
|
///Parse a key in a block mapping. If first is true, this is the first key.
|
||||||
Event parseBlockMappingKey(bool first)()
|
Event parseBlockMappingKey(bool first)() @trusted
|
||||||
{
|
{
|
||||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||||
|
|
||||||
|
@ -660,7 +661,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse a value in a block mapping.
|
///Parse a value in a block mapping.
|
||||||
Event parseBlockMappingValue()
|
Event parseBlockMappingValue() @trusted
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.Value))
|
if(scanner_.checkToken(TokenID.Value))
|
||||||
{
|
{
|
||||||
|
@ -694,7 +695,7 @@ final class Parser
|
||||||
*/
|
*/
|
||||||
|
|
||||||
///Parse an entry in a flow sequence. If first is true, this is the first entry.
|
///Parse an entry in a flow sequence. If first is true, this is the first entry.
|
||||||
Event parseFlowSequenceEntry(bool first)()
|
Event parseFlowSequenceEntry(bool first)() @trusted
|
||||||
{
|
{
|
||||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||||
|
|
||||||
|
@ -736,7 +737,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse a key in flow context.
|
///Parse a key in flow context.
|
||||||
Event parseFlowKey(in Event delegate() nextState)
|
Event parseFlowKey(in Event delegate() nextState) @trusted
|
||||||
{
|
{
|
||||||
immutable token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
|
|
||||||
|
@ -752,13 +753,14 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse a mapping key in an entry in a flow sequence.
|
///Parse a mapping key in an entry in a flow sequence.
|
||||||
Event parseFlowSequenceEntryMappingKey()
|
Event parseFlowSequenceEntryMappingKey() @safe
|
||||||
{
|
{
|
||||||
return parseFlowKey(&parseFlowSequenceEntryMappingValue);
|
return parseFlowKey(&parseFlowSequenceEntryMappingValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse a mapping value in a flow context.
|
///Parse a mapping value in a flow context.
|
||||||
Event parseFlowValue(TokenID checkId, in Event delegate() nextState)
|
Event parseFlowValue(TokenID checkId, in Event delegate() nextState)
|
||||||
|
@trusted
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.Value))
|
if(scanner_.checkToken(TokenID.Value))
|
||||||
{
|
{
|
||||||
|
@ -778,14 +780,14 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse a mapping value in an entry in a flow sequence.
|
///Parse a mapping value in an entry in a flow sequence.
|
||||||
Event parseFlowSequenceEntryMappingValue()
|
Event parseFlowSequenceEntryMappingValue() @safe
|
||||||
{
|
{
|
||||||
return parseFlowValue(TokenID.FlowSequenceEnd,
|
return parseFlowValue(TokenID.FlowSequenceEnd,
|
||||||
&parseFlowSequenceEntryMappingEnd);
|
&parseFlowSequenceEntryMappingEnd);
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse end of a mapping in a flow sequence entry.
|
///Parse end of a mapping in a flow sequence entry.
|
||||||
Event parseFlowSequenceEntryMappingEnd()
|
Event parseFlowSequenceEntryMappingEnd() @safe
|
||||||
{
|
{
|
||||||
state_ = &parseFlowSequenceEntry!false;
|
state_ = &parseFlowSequenceEntry!false;
|
||||||
immutable token = scanner_.peekToken();
|
immutable token = scanner_.peekToken();
|
||||||
|
@ -801,7 +803,7 @@ final class Parser
|
||||||
*/
|
*/
|
||||||
|
|
||||||
///Parse a key in a flow mapping.
|
///Parse a key in a flow mapping.
|
||||||
Event parseFlowMappingKey(bool first)()
|
Event parseFlowMappingKey(bool first)() @trusted
|
||||||
{
|
{
|
||||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||||
|
|
||||||
|
@ -841,20 +843,20 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse a value in a flow mapping.
|
///Parse a value in a flow mapping.
|
||||||
Event parseFlowMappingValue()
|
Event parseFlowMappingValue() @safe
|
||||||
{
|
{
|
||||||
return parseFlowValue(TokenID.FlowMappingEnd, &parseFlowMappingKey!false);
|
return parseFlowValue(TokenID.FlowMappingEnd, &parseFlowMappingKey!false);
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse an empty value in a flow mapping.
|
///Parse an empty value in a flow mapping.
|
||||||
Event parseFlowMappingEmptyValue()
|
Event parseFlowMappingEmptyValue() @safe
|
||||||
{
|
{
|
||||||
state_ = &parseFlowMappingKey!false;
|
state_ = &parseFlowMappingKey!false;
|
||||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
return processEmptyScalar(scanner_.peekToken().startMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
///Return an empty scalar.
|
///Return an empty scalar.
|
||||||
Event processEmptyScalar(in Mark mark)
|
Event processEmptyScalar(const Mark mark) const pure @safe nothrow
|
||||||
{
|
{
|
||||||
return scalarEvent(mark, mark, Anchor(), Tag(), tuple(true, false), "");
|
return scalarEvent(mark, mark, Anchor(), Tag(), tuple(true, false), "");
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,7 +56,7 @@ struct Queue(T)
|
||||||
@disable int opCmp(ref Queue);
|
@disable int opCmp(ref Queue);
|
||||||
|
|
||||||
///Destroy the queue, deallocating all its elements.
|
///Destroy the queue, deallocating all its elements.
|
||||||
~this()
|
@safe nothrow ~this()
|
||||||
{
|
{
|
||||||
while(!empty){pop();}
|
while(!empty){pop();}
|
||||||
cursor_ = last_ = first_ = null;
|
cursor_ = last_ = first_ = null;
|
||||||
|
@ -64,13 +64,13 @@ struct Queue(T)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Start iterating over the queue.
|
///Start iterating over the queue.
|
||||||
void startIteration()
|
void startIteration() pure @safe nothrow
|
||||||
{
|
{
|
||||||
cursor_ = first_;
|
cursor_ = first_;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Get next element in the queue.
|
///Get next element in the queue.
|
||||||
ref const(T) next()
|
ref const(T) next() pure @safe nothrow
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(!empty);
|
assert(!empty);
|
||||||
|
@ -84,13 +84,13 @@ struct Queue(T)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Are we done iterating?
|
///Are we done iterating?
|
||||||
bool iterationOver() const
|
bool iterationOver() const pure @safe nothrow
|
||||||
{
|
{
|
||||||
return cursor_ is null;
|
return cursor_ is null;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Push new item to the queue.
|
///Push new item to the queue.
|
||||||
void push(T item)
|
void push(T item) @trusted nothrow
|
||||||
{
|
{
|
||||||
Node* newLast = allocate!Node(item, cast(Node*)null);
|
Node* newLast = allocate!Node(item, cast(Node*)null);
|
||||||
if(last_ !is null){last_.next_ = newLast;}
|
if(last_ !is null){last_.next_ = newLast;}
|
||||||
|
@ -100,7 +100,7 @@ struct Queue(T)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Insert a new item putting it to specified index in the linked list.
|
///Insert a new item putting it to specified index in the linked list.
|
||||||
void insert(T item, in size_t idx)
|
void insert(T item, in size_t idx) @trusted nothrow
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(idx <= length_);
|
assert(idx <= length_);
|
||||||
|
@ -134,7 +134,7 @@ struct Queue(T)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Return the next element in the queue and remove it.
|
///Return the next element in the queue and remove it.
|
||||||
T pop()
|
T pop() @trusted nothrow
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(!empty, "Trying to pop an element from an empty queue");
|
assert(!empty, "Trying to pop an element from an empty queue");
|
||||||
|
@ -155,7 +155,7 @@ struct Queue(T)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Return the next element in the queue.
|
///Return the next element in the queue.
|
||||||
ref inout(T) peek() inout
|
ref inout(T) peek() inout pure @safe nothrow
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(!empty, "Trying to peek at an element in an empty queue");
|
assert(!empty, "Trying to peek at an element in an empty queue");
|
||||||
|
@ -166,13 +166,13 @@ struct Queue(T)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Is the queue empty?
|
///Is the queue empty?
|
||||||
@property bool empty() const
|
@property bool empty() const pure @safe nothrow
|
||||||
{
|
{
|
||||||
return first_ is null;
|
return first_ is null;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Return number of elements in the queue.
|
///Return number of elements in the queue.
|
||||||
@property size_t length() const
|
@property size_t length() const pure @safe nothrow
|
||||||
{
|
{
|
||||||
return length_;
|
return length_;
|
||||||
}
|
}
|
||||||
|
@ -182,7 +182,7 @@ struct Queue(T)
|
||||||
private:
|
private:
|
||||||
|
|
||||||
///Allocate a struct, passing arguments to its constructor or default initializer.
|
///Allocate a struct, passing arguments to its constructor or default initializer.
|
||||||
T* allocate(T, Args...)(Args args)
|
T* allocate(T, Args...)(Args args) @system nothrow
|
||||||
{
|
{
|
||||||
T* ptr = cast(T*)malloc(T.sizeof);
|
T* ptr = cast(T*)malloc(T.sizeof);
|
||||||
*ptr = T(args);
|
*ptr = T(args);
|
||||||
|
@ -192,7 +192,7 @@ T* allocate(T, Args...)(Args args)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Deallocate struct pointed at by specified pointer.
|
///Deallocate struct pointed at by specified pointer.
|
||||||
void free(T)(T* ptr)
|
void free(T)(T* ptr) @system nothrow
|
||||||
{
|
{
|
||||||
//GC doesn't need to care about any references in this struct anymore.
|
//GC doesn't need to care about any references in this struct anymore.
|
||||||
static if(hasIndirections!T){GC.removeRange(cast(void*)ptr);}
|
static if(hasIndirections!T){GC.removeRange(cast(void*)ptr);}
|
||||||
|
|
|
@ -31,6 +31,7 @@ package:
|
||||||
class ReaderException : YAMLException
|
class ReaderException : YAMLException
|
||||||
{
|
{
|
||||||
this(string msg, string file = __FILE__, int line = __LINE__)
|
this(string msg, string file = __FILE__, int line = __LINE__)
|
||||||
|
@safe nothrow
|
||||||
{
|
{
|
||||||
super("Error reading stream: " ~ msg, file, line);
|
super("Error reading stream: " ~ msg, file, line);
|
||||||
}
|
}
|
||||||
|
@ -65,7 +66,7 @@ final class Reader
|
||||||
*
|
*
|
||||||
* Throws: ReaderException if the stream is invalid.
|
* Throws: ReaderException if the stream is invalid.
|
||||||
*/
|
*/
|
||||||
this(Stream stream)
|
this(Stream stream) @trusted
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(stream.readable && stream.seekable,
|
assert(stream.readable && stream.seekable,
|
||||||
|
@ -77,7 +78,7 @@ final class Reader
|
||||||
decoder_ = UTFFastDecoder(stream_);
|
decoder_ = UTFFastDecoder(stream_);
|
||||||
}
|
}
|
||||||
|
|
||||||
~this()
|
@trusted nothrow ~this()
|
||||||
{
|
{
|
||||||
//Delete the buffer, if allocated.
|
//Delete the buffer, if allocated.
|
||||||
if(bufferAllocated_ is null){return;}
|
if(bufferAllocated_ is null){return;}
|
||||||
|
@ -96,7 +97,7 @@ final class Reader
|
||||||
* Throws: ReaderException if trying to read past the end of the stream
|
* Throws: ReaderException if trying to read past the end of the stream
|
||||||
* or if invalid data is read.
|
* or if invalid data is read.
|
||||||
*/
|
*/
|
||||||
dchar peek(size_t index = 0)
|
dchar peek(size_t index = 0) @trusted
|
||||||
{
|
{
|
||||||
if(buffer_.length < bufferOffset_ + index + 1)
|
if(buffer_.length < bufferOffset_ + index + 1)
|
||||||
{
|
{
|
||||||
|
@ -121,7 +122,7 @@ final class Reader
|
||||||
*
|
*
|
||||||
* Returns: Characters starting at current position or an empty slice if out of bounds.
|
* Returns: Characters starting at current position or an empty slice if out of bounds.
|
||||||
*/
|
*/
|
||||||
const(dstring) prefix(size_t length)
|
const(dstring) prefix(size_t length) @safe
|
||||||
{
|
{
|
||||||
return slice(0, length);
|
return slice(0, length);
|
||||||
}
|
}
|
||||||
|
@ -137,7 +138,7 @@ final class Reader
|
||||||
*
|
*
|
||||||
* Returns: Slice into the internal buffer or an empty slice if out of bounds.
|
* Returns: Slice into the internal buffer or an empty slice if out of bounds.
|
||||||
*/
|
*/
|
||||||
const(dstring) slice(size_t start, size_t end)
|
const(dstring) slice(size_t start, size_t end) @trusted
|
||||||
{
|
{
|
||||||
if(buffer_.length <= bufferOffset_ + end)
|
if(buffer_.length <= bufferOffset_ + end)
|
||||||
{
|
{
|
||||||
|
@ -159,7 +160,7 @@ final class Reader
|
||||||
* Throws: ReaderException if trying to read past the end of the stream
|
* Throws: ReaderException if trying to read past the end of the stream
|
||||||
* or if invalid data is read.
|
* or if invalid data is read.
|
||||||
*/
|
*/
|
||||||
dchar get()
|
dchar get() @safe
|
||||||
{
|
{
|
||||||
const result = peek();
|
const result = peek();
|
||||||
forward();
|
forward();
|
||||||
|
@ -176,11 +177,11 @@ final class Reader
|
||||||
* Throws: ReaderException if trying to read past the end of the stream
|
* Throws: ReaderException if trying to read past the end of the stream
|
||||||
* or if invalid data is read.
|
* or if invalid data is read.
|
||||||
*/
|
*/
|
||||||
dstring get(size_t length)
|
dstring get(size_t length) @safe
|
||||||
{
|
{
|
||||||
auto result = prefix(length).dup;
|
auto result = prefix(length).idup;
|
||||||
forward(length);
|
forward(length);
|
||||||
return cast(dstring)result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -191,7 +192,7 @@ final class Reader
|
||||||
* Throws: ReaderException if trying to read past the end of the stream
|
* Throws: ReaderException if trying to read past the end of the stream
|
||||||
* or if invalid data is read.
|
* or if invalid data is read.
|
||||||
*/
|
*/
|
||||||
void forward(size_t length = 1)
|
void forward(size_t length = 1) @trusted
|
||||||
{
|
{
|
||||||
if(buffer_.length <= bufferOffset_ + length + 1)
|
if(buffer_.length <= bufferOffset_ + length + 1)
|
||||||
{
|
{
|
||||||
|
@ -217,19 +218,19 @@ final class Reader
|
||||||
}
|
}
|
||||||
|
|
||||||
///Get a string describing current stream position, used for error messages.
|
///Get a string describing current stream position, used for error messages.
|
||||||
@property final Mark mark() const {return Mark(line_, column_);}
|
@property final Mark mark() const pure @safe nothrow {return Mark(line_, column_);}
|
||||||
|
|
||||||
///Get current line number.
|
///Get current line number.
|
||||||
@property final uint line() const {return line_;}
|
@property final uint line() const pure @safe nothrow {return line_;}
|
||||||
|
|
||||||
///Get current column number.
|
///Get current column number.
|
||||||
@property final uint column() const {return column_;}
|
@property final uint column() const pure @safe nothrow {return column_;}
|
||||||
|
|
||||||
///Get index of the current character in the stream.
|
///Get index of the current character in the stream.
|
||||||
@property final size_t charIndex() const {return charIndex_;}
|
@property final size_t charIndex() const pure @safe nothrow {return charIndex_;}
|
||||||
|
|
||||||
///Get encoding of the input stream.
|
///Get encoding of the input stream.
|
||||||
@property final Encoding encoding() const {return decoder_.encoding;}
|
@property final Encoding encoding() const pure @safe nothrow {return decoder_.encoding;}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
/**
|
/**
|
||||||
|
@ -243,7 +244,7 @@ final class Reader
|
||||||
* Throws: ReaderException if trying to read past the end of the stream
|
* Throws: ReaderException if trying to read past the end of the stream
|
||||||
* or if invalid data is read.
|
* or if invalid data is read.
|
||||||
*/
|
*/
|
||||||
void updateBuffer(in size_t length)
|
void updateBuffer(in size_t length) @system
|
||||||
{
|
{
|
||||||
//Get rid of unneeded data in the buffer.
|
//Get rid of unneeded data in the buffer.
|
||||||
if(bufferOffset_ > 0)
|
if(bufferOffset_ > 0)
|
||||||
|
@ -284,7 +285,7 @@ final class Reader
|
||||||
* if nonprintable characters are detected, or
|
* if nonprintable characters are detected, or
|
||||||
* if there is an error reading from the stream.
|
* if there is an error reading from the stream.
|
||||||
*/
|
*/
|
||||||
void loadChars(size_t chars)
|
void loadChars(size_t chars) @system
|
||||||
{
|
{
|
||||||
const oldLength = buffer_.length;
|
const oldLength = buffer_.length;
|
||||||
const oldPosition = stream_.position;
|
const oldPosition = stream_.position;
|
||||||
|
@ -312,7 +313,7 @@ final class Reader
|
||||||
}
|
}
|
||||||
|
|
||||||
//Handle an exception thrown in loadChars method of any Reader.
|
//Handle an exception thrown in loadChars method of any Reader.
|
||||||
void handleLoadCharsException(Exception e, ulong oldPosition)
|
void handleLoadCharsException(Exception e, ulong oldPosition) @system
|
||||||
{
|
{
|
||||||
try{throw e;}
|
try{throw e;}
|
||||||
catch(UTFException e)
|
catch(UTFException e)
|
||||||
|
@ -328,7 +329,7 @@ final class Reader
|
||||||
}
|
}
|
||||||
|
|
||||||
//Code shared by loadEntireFile methods.
|
//Code shared by loadEntireFile methods.
|
||||||
void loadEntireFile_()
|
void loadEntireFile_() @system
|
||||||
{
|
{
|
||||||
const maxChars = decoder_.maxChars;
|
const maxChars = decoder_.maxChars;
|
||||||
bufferReserve(maxChars + 1);
|
bufferReserve(maxChars + 1);
|
||||||
|
@ -342,7 +343,7 @@ final class Reader
|
||||||
}
|
}
|
||||||
|
|
||||||
//Ensure there is space for at least capacity characters in bufferAllocated_.
|
//Ensure there is space for at least capacity characters in bufferAllocated_.
|
||||||
void bufferReserve(in size_t capacity)
|
void bufferReserve(in size_t capacity) @system nothrow
|
||||||
{
|
{
|
||||||
if(bufferAllocated_ !is null && bufferAllocated_.length >= capacity){return;}
|
if(bufferAllocated_ !is null && bufferAllocated_.length >= capacity){return;}
|
||||||
|
|
||||||
|
@ -408,8 +409,8 @@ struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||||
dchar[] buffer_;
|
dchar[] buffer_;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
///Construct a UTFFastDecoder decoding a stream.
|
///Construct a UTFBlockDecoder decoding a stream.
|
||||||
this(EndianStream stream)
|
this(EndianStream stream) @system
|
||||||
{
|
{
|
||||||
stream_ = stream;
|
stream_ = stream;
|
||||||
available_ = stream_.available;
|
available_ = stream_.available;
|
||||||
|
@ -465,19 +466,19 @@ struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Get maximum number of characters that might be in the stream.
|
///Get maximum number of characters that might be in the stream.
|
||||||
@property size_t maxChars() const {return maxChars_;}
|
@property size_t maxChars() const pure @safe nothrow {return maxChars_;}
|
||||||
|
|
||||||
///Get encoding we're decoding from.
|
///Get encoding we're decoding from.
|
||||||
@property Encoding encoding() const {return encoding_;}
|
@property Encoding encoding() const pure @safe nothrow {return encoding_;}
|
||||||
|
|
||||||
///Are we done decoding?
|
///Are we done decoding?
|
||||||
@property bool done() const
|
@property bool done() const pure @safe nothrow
|
||||||
{
|
{
|
||||||
return rawUsed_ == 0 && buffer_.length == 0 && available_ == 0;
|
return rawUsed_ == 0 && buffer_.length == 0 && available_ == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Get next character.
|
///Get next character.
|
||||||
dchar getDChar()
|
dchar getDChar() @system
|
||||||
{
|
{
|
||||||
if(buffer_.length)
|
if(buffer_.length)
|
||||||
{
|
{
|
||||||
|
@ -492,7 +493,7 @@ struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Get as many characters as possible, but at most maxChars. Slice returned will be invalidated in further calls.
|
///Get as many characters as possible, but at most maxChars. Slice returned will be invalidated in further calls.
|
||||||
const(dchar[]) getDChars(size_t maxChars = size_t.max)
|
const(dchar[]) getDChars(size_t maxChars = size_t.max) @system
|
||||||
{
|
{
|
||||||
if(buffer_.length)
|
if(buffer_.length)
|
||||||
{
|
{
|
||||||
|
@ -509,7 +510,7 @@ struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||||
|
|
||||||
private:
|
private:
|
||||||
//Read and decode characters from file and store them in the buffer.
|
//Read and decode characters from file and store them in the buffer.
|
||||||
void updateBuffer()
|
void updateBuffer() @system
|
||||||
{
|
{
|
||||||
assert(buffer_.length == 0);
|
assert(buffer_.length == 0);
|
||||||
final switch(encoding_)
|
final switch(encoding_)
|
||||||
|
@ -522,7 +523,6 @@ struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||||
available_ -= bytes;
|
available_ -= bytes;
|
||||||
decodeRawBuffer(rawBuffer8_, rawLength);
|
decodeRawBuffer(rawBuffer8_, rawLength);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case Encoding.UTF_16:
|
case Encoding.UTF_16:
|
||||||
const words = min((bufferSize_ / 2) - rawUsed_, available_ / 2);
|
const words = min((bufferSize_ / 2) - rawUsed_, available_ / 2);
|
||||||
//Current length of valid data in rawBuffer16_.
|
//Current length of valid data in rawBuffer16_.
|
||||||
|
@ -534,7 +534,6 @@ struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||||
}
|
}
|
||||||
decodeRawBuffer(rawBuffer16_, rawLength);
|
decodeRawBuffer(rawBuffer16_, rawLength);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case Encoding.UTF_32:
|
case Encoding.UTF_32:
|
||||||
const chars = min(bufferSize_ / 4, available_ / 4);
|
const chars = min(bufferSize_ / 4, available_ / 4);
|
||||||
foreach(c; 0 .. chars)
|
foreach(c; 0 .. chars)
|
||||||
|
@ -548,7 +547,7 @@ struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
//Decode contents of a UTF-8 or UTF-16 raw buffer.
|
//Decode contents of a UTF-8 or UTF-16 raw buffer.
|
||||||
void decodeRawBuffer(C)(C[] buffer, const size_t length)
|
void decodeRawBuffer(C)(C[] buffer, const size_t length) pure @system
|
||||||
{
|
{
|
||||||
//End of part of rawBuffer8_ that contains
|
//End of part of rawBuffer8_ that contains
|
||||||
//complete characters and can be decoded.
|
//complete characters and can be decoded.
|
||||||
|
@ -567,6 +566,7 @@ struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||||
|
|
||||||
//Determine the end of last UTF-8 or UTF-16 sequence in a raw buffer.
|
//Determine the end of last UTF-8 or UTF-16 sequence in a raw buffer.
|
||||||
size_t endOfLastUTFSequence(C)(const C[] buffer, const size_t max)
|
size_t endOfLastUTFSequence(C)(const C[] buffer, const size_t max)
|
||||||
|
pure @system nothrow
|
||||||
{
|
{
|
||||||
static if(is(C == char))
|
static if(is(C == char))
|
||||||
{
|
{
|
||||||
|
@ -598,7 +598,7 @@ struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
//Decode a UTF-8 or UTF-16 buffer (with no incomplete sequences at the end).
|
//Decode a UTF-8 or UTF-16 buffer (with no incomplete sequences at the end).
|
||||||
void decodeUTF(C)(const C[] source)
|
void decodeUTF(C)(const C[] source) pure @system
|
||||||
{
|
{
|
||||||
size_t bufpos = 0;
|
size_t bufpos = 0;
|
||||||
const srclength = source.length;
|
const srclength = source.length;
|
||||||
|
@ -626,7 +626,7 @@ struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||||
*
|
*
|
||||||
* Returns: True if all the characters are printable, false otherwise.
|
* Returns: True if all the characters are printable, false otherwise.
|
||||||
*/
|
*/
|
||||||
bool printable(const ref dchar[] chars) pure
|
bool printable(const ref dchar[] chars) pure @safe nothrow
|
||||||
{
|
{
|
||||||
foreach(c; chars)
|
foreach(c; chars)
|
||||||
{
|
{
|
||||||
|
|
|
@ -66,7 +66,7 @@ final class Representer
|
||||||
* disabled to use custom representer
|
* disabled to use custom representer
|
||||||
* functions for default types.
|
* functions for default types.
|
||||||
*/
|
*/
|
||||||
this(bool useDefaultRepresenters = true)
|
this(bool useDefaultRepresenters = true) @safe
|
||||||
{
|
{
|
||||||
if(!useDefaultRepresenters){return;}
|
if(!useDefaultRepresenters){return;}
|
||||||
addRepresenter!YAMLNull(&representNull);
|
addRepresenter!YAMLNull(&representNull);
|
||||||
|
@ -81,20 +81,20 @@ final class Representer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the Representer.
|
///Destroy the Representer.
|
||||||
~this()
|
pure @safe nothrow ~this()
|
||||||
{
|
{
|
||||||
clear(representers_);
|
clear(representers_);
|
||||||
representers_ = null;
|
representers_ = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set default _style for scalars. If style is $(D ScalarStyle.Invalid), the _style is chosen automatically.
|
///Set default _style for scalars. If style is $(D ScalarStyle.Invalid), the _style is chosen automatically.
|
||||||
@property void defaultScalarStyle(ScalarStyle style)
|
@property void defaultScalarStyle(ScalarStyle style) pure @safe nothrow
|
||||||
{
|
{
|
||||||
defaultScalarStyle_ = style;
|
defaultScalarStyle_ = style;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set default _style for collections. If style is $(D CollectionStyle.Invalid), the _style is chosen automatically.
|
///Set default _style for collections. If style is $(D CollectionStyle.Invalid), the _style is chosen automatically.
|
||||||
@property void defaultCollectionStyle(CollectionStyle style)
|
@property void defaultCollectionStyle(CollectionStyle style) pure @safe nothrow
|
||||||
{
|
{
|
||||||
defaultCollectionStyle_ = style;
|
defaultCollectionStyle_ = style;
|
||||||
}
|
}
|
||||||
|
@ -221,7 +221,7 @@ final class Representer
|
||||||
* }
|
* }
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
void addRepresenter(T)(Node function(ref Node, Representer) representer)
|
void addRepresenter(T)(Node function(ref Node, Representer) representer) @trusted
|
||||||
{
|
{
|
||||||
assert((typeid(T) in representers_) is null,
|
assert((typeid(T) in representers_) is null,
|
||||||
"Representer function for data type " ~ typeid(T).toString() ~
|
"Representer function for data type " ~ typeid(T).toString() ~
|
||||||
|
@ -271,7 +271,7 @@ final class Representer
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
Node representScalar(string tag, string scalar,
|
Node representScalar(string tag, string scalar,
|
||||||
ScalarStyle style = ScalarStyle.Invalid)
|
ScalarStyle style = ScalarStyle.Invalid) @safe
|
||||||
{
|
{
|
||||||
if(style == ScalarStyle.Invalid){style = defaultScalarStyle_;}
|
if(style == ScalarStyle.Invalid){style = defaultScalarStyle_;}
|
||||||
return Node.rawNode(Node.Value(scalar), Mark(), Tag(tag), style,
|
return Node.rawNode(Node.Value(scalar), Mark(), Tag(tag), style,
|
||||||
|
@ -320,7 +320,7 @@ final class Representer
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
Node representSequence(string tag, Node[] sequence,
|
Node representSequence(string tag, Node[] sequence,
|
||||||
CollectionStyle style = CollectionStyle.Invalid)
|
CollectionStyle style = CollectionStyle.Invalid) @trusted
|
||||||
{
|
{
|
||||||
Node[] value;
|
Node[] value;
|
||||||
value.length = sequence.length;
|
value.length = sequence.length;
|
||||||
|
@ -389,7 +389,7 @@ final class Representer
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
Node representMapping(string tag, Node.Pair[] pairs,
|
Node representMapping(string tag, Node.Pair[] pairs,
|
||||||
CollectionStyle style = CollectionStyle.Invalid)
|
CollectionStyle style = CollectionStyle.Invalid) @trusted
|
||||||
{
|
{
|
||||||
Node.Pair[] value;
|
Node.Pair[] value;
|
||||||
value.length = pairs.length;
|
value.length = pairs.length;
|
||||||
|
@ -426,7 +426,7 @@ final class Representer
|
||||||
|
|
||||||
package:
|
package:
|
||||||
//Represent a node based on its type, and return the represented result.
|
//Represent a node based on its type, and return the represented result.
|
||||||
Node representData(ref Node data)
|
Node representData(ref Node data) @system
|
||||||
{
|
{
|
||||||
//User types are wrapped in YAMLObject.
|
//User types are wrapped in YAMLObject.
|
||||||
auto type = data.isUserType ? data.as!YAMLObject.type : data.type;
|
auto type = data.isUserType ? data.as!YAMLObject.type : data.type;
|
||||||
|
@ -452,7 +452,7 @@ final class Representer
|
||||||
}
|
}
|
||||||
|
|
||||||
//Represent a node, serializing with specified Serializer.
|
//Represent a node, serializing with specified Serializer.
|
||||||
void represent(ref Serializer serializer, ref Node node)
|
void represent(ref Serializer serializer, ref Node node) @trusted
|
||||||
{
|
{
|
||||||
auto data = representData(node);
|
auto data = representData(node);
|
||||||
serializer.serialize(data);
|
serializer.serialize(data);
|
||||||
|
@ -461,13 +461,13 @@ final class Representer
|
||||||
|
|
||||||
|
|
||||||
///Represent a _null _node as a _null YAML value.
|
///Represent a _null _node as a _null YAML value.
|
||||||
Node representNull(ref Node node, Representer representer)
|
Node representNull(ref Node node, Representer representer) @safe
|
||||||
{
|
{
|
||||||
return representer.representScalar("tag:yaml.org,2002:null", "null");
|
return representer.representScalar("tag:yaml.org,2002:null", "null");
|
||||||
}
|
}
|
||||||
|
|
||||||
///Represent a string _node as a string scalar.
|
///Represent a string _node as a string scalar.
|
||||||
Node representString(ref Node node, Representer representer)
|
Node representString(ref Node node, Representer representer) @safe
|
||||||
{
|
{
|
||||||
string value = node.as!string;
|
string value = node.as!string;
|
||||||
return value is null
|
return value is null
|
||||||
|
@ -476,7 +476,7 @@ Node representString(ref Node node, Representer representer)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Represent a bytes _node as a binary scalar.
|
///Represent a bytes _node as a binary scalar.
|
||||||
Node representBytes(ref Node node, Representer representer)
|
Node representBytes(ref Node node, Representer representer) @system
|
||||||
{
|
{
|
||||||
const ubyte[] value = node.as!(ubyte[]);
|
const ubyte[] value = node.as!(ubyte[]);
|
||||||
if(value is null){return representNull(node, representer);}
|
if(value is null){return representNull(node, representer);}
|
||||||
|
@ -486,21 +486,21 @@ Node representBytes(ref Node node, Representer representer)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Represent a bool _node as a bool scalar.
|
///Represent a bool _node as a bool scalar.
|
||||||
Node representBool(ref Node node, Representer representer)
|
Node representBool(ref Node node, Representer representer) @safe
|
||||||
{
|
{
|
||||||
return representer.representScalar("tag:yaml.org,2002:bool",
|
return representer.representScalar("tag:yaml.org,2002:bool",
|
||||||
node.as!bool ? "true" : "false");
|
node.as!bool ? "true" : "false");
|
||||||
}
|
}
|
||||||
|
|
||||||
///Represent a long _node as an integer scalar.
|
///Represent a long _node as an integer scalar.
|
||||||
Node representLong(ref Node node, Representer representer)
|
Node representLong(ref Node node, Representer representer) @system
|
||||||
{
|
{
|
||||||
return representer.representScalar("tag:yaml.org,2002:int",
|
return representer.representScalar("tag:yaml.org,2002:int",
|
||||||
to!string(node.as!long));
|
to!string(node.as!long));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Represent a real _node as a floating point scalar.
|
///Represent a real _node as a floating point scalar.
|
||||||
Node representReal(ref Node node, Representer representer)
|
Node representReal(ref Node node, Representer representer) @system
|
||||||
{
|
{
|
||||||
real f = node.as!real;
|
real f = node.as!real;
|
||||||
string value = isNaN(f) ? ".nan":
|
string value = isNaN(f) ? ".nan":
|
||||||
|
@ -514,14 +514,14 @@ Node representReal(ref Node node, Representer representer)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Represent a SysTime _node as a timestamp.
|
///Represent a SysTime _node as a timestamp.
|
||||||
Node representSysTime(ref Node node, Representer representer)
|
Node representSysTime(ref Node node, Representer representer) @system
|
||||||
{
|
{
|
||||||
return representer.representScalar("tag:yaml.org,2002:timestamp",
|
return representer.representScalar("tag:yaml.org,2002:timestamp",
|
||||||
node.as!SysTime.toISOExtString());
|
node.as!SysTime.toISOExtString());
|
||||||
}
|
}
|
||||||
|
|
||||||
///Represent a sequence _node as sequence/set.
|
///Represent a sequence _node as sequence/set.
|
||||||
Node representNodes(ref Node node, Representer representer)
|
Node representNodes(ref Node node, Representer representer) @safe
|
||||||
{
|
{
|
||||||
auto nodes = node.as!(Node[]);
|
auto nodes = node.as!(Node[]);
|
||||||
if(node.tag_ == Tag("tag:yaml.org,2002:set"))
|
if(node.tag_ == Tag("tag:yaml.org,2002:set"))
|
||||||
|
@ -543,7 +543,7 @@ Node representNodes(ref Node node, Representer representer)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Represent a mapping _node as map/ordered map/pairs.
|
///Represent a mapping _node as map/ordered map/pairs.
|
||||||
Node representPairs(ref Node node, Representer representer)
|
Node representPairs(ref Node node, Representer representer) @system
|
||||||
{
|
{
|
||||||
auto pairs = node.as!(Node.Pair[]);
|
auto pairs = node.as!(Node.Pair[]);
|
||||||
|
|
||||||
|
@ -600,7 +600,7 @@ struct MyStruct
|
||||||
{
|
{
|
||||||
int x, y, z;
|
int x, y, z;
|
||||||
|
|
||||||
const int opCmp(ref const MyStruct s)
|
const int opCmp(ref const MyStruct s) const pure @safe nothrow
|
||||||
{
|
{
|
||||||
if(x != s.x){return x - s.x;}
|
if(x != s.x){return x - s.x;}
|
||||||
if(y != s.y){return y - s.y;}
|
if(y != s.y){return y - s.y;}
|
||||||
|
@ -609,7 +609,7 @@ struct MyStruct
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Node representMyStruct(ref Node node, Representer representer)
|
Node representMyStruct(ref Node node, Representer representer) @system
|
||||||
{
|
{
|
||||||
//The node is guaranteed to be MyStruct as we add representer for MyStruct.
|
//The node is guaranteed to be MyStruct as we add representer for MyStruct.
|
||||||
auto value = node.as!MyStruct;
|
auto value = node.as!MyStruct;
|
||||||
|
@ -619,14 +619,14 @@ Node representMyStruct(ref Node node, Representer representer)
|
||||||
return representer.representScalar("!mystruct.tag", scalar);
|
return representer.representScalar("!mystruct.tag", scalar);
|
||||||
}
|
}
|
||||||
|
|
||||||
Node representMyStructSeq(ref Node node, Representer representer)
|
Node representMyStructSeq(ref Node node, Representer representer) @safe
|
||||||
{
|
{
|
||||||
auto value = node.as!MyStruct;
|
auto value = node.as!MyStruct;
|
||||||
auto nodes = [Node(value.x), Node(value.y), Node(value.z)];
|
auto nodes = [Node(value.x), Node(value.y), Node(value.z)];
|
||||||
return representer.representSequence("!mystruct.tag", nodes);
|
return representer.representSequence("!mystruct.tag", nodes);
|
||||||
}
|
}
|
||||||
|
|
||||||
Node representMyStructMap(ref Node node, Representer representer)
|
Node representMyStructMap(ref Node node, Representer representer) @safe
|
||||||
{
|
{
|
||||||
auto value = node.as!MyStruct;
|
auto value = node.as!MyStruct;
|
||||||
auto pairs = [Node.Pair("x", value.x),
|
auto pairs = [Node.Pair("x", value.x),
|
||||||
|
@ -639,14 +639,14 @@ class MyClass
|
||||||
{
|
{
|
||||||
int x, y, z;
|
int x, y, z;
|
||||||
|
|
||||||
this(int x, int y, int z)
|
this(int x, int y, int z) pure @safe nothrow
|
||||||
{
|
{
|
||||||
this.x = x;
|
this.x = x;
|
||||||
this.y = y;
|
this.y = y;
|
||||||
this.z = z;
|
this.z = z;
|
||||||
}
|
}
|
||||||
|
|
||||||
override int opCmp(Object o)
|
override int opCmp(Object o) pure @safe nothrow
|
||||||
{
|
{
|
||||||
MyClass s = cast(MyClass)o;
|
MyClass s = cast(MyClass)o;
|
||||||
if(s is null){return -1;}
|
if(s is null){return -1;}
|
||||||
|
@ -657,14 +657,14 @@ class MyClass
|
||||||
}
|
}
|
||||||
|
|
||||||
///Useful for Node.as!string .
|
///Useful for Node.as!string .
|
||||||
override string toString()
|
override string toString() @trusted
|
||||||
{
|
{
|
||||||
return format("MyClass(", x, ", ", y, ", ", z, ")");
|
return format("MyClass(", x, ", ", y, ", ", z, ")");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//Same as representMyStruct.
|
//Same as representMyStruct.
|
||||||
Node representMyClass(ref Node node, Representer representer)
|
Node representMyClass(ref Node node, Representer representer) @system
|
||||||
{
|
{
|
||||||
//The node is guaranteed to be MyClass as we add representer for MyClass.
|
//The node is guaranteed to be MyClass as we add representer for MyClass.
|
||||||
auto value = node.as!MyClass;
|
auto value = node.as!MyClass;
|
||||||
|
|
|
@ -61,7 +61,7 @@ final class Resolver
|
||||||
*
|
*
|
||||||
* Params: defaultImplicitResolvers = Use default YAML implicit resolvers?
|
* Params: defaultImplicitResolvers = Use default YAML implicit resolvers?
|
||||||
*/
|
*/
|
||||||
this(bool defaultImplicitResolvers = true)
|
this(bool defaultImplicitResolvers = true) @safe
|
||||||
{
|
{
|
||||||
defaultScalarTag_ = Tag("tag:yaml.org,2002:str");
|
defaultScalarTag_ = Tag("tag:yaml.org,2002:str");
|
||||||
defaultSequenceTag_ = Tag("tag:yaml.org,2002:seq");
|
defaultSequenceTag_ = Tag("tag:yaml.org,2002:seq");
|
||||||
|
@ -70,7 +70,7 @@ final class Resolver
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the Resolver.
|
///Destroy the Resolver.
|
||||||
~this()
|
pure @safe nothrow ~this()
|
||||||
{
|
{
|
||||||
clear(yamlImplicitResolvers_);
|
clear(yamlImplicitResolvers_);
|
||||||
yamlImplicitResolvers_ = null;
|
yamlImplicitResolvers_ = null;
|
||||||
|
@ -116,6 +116,7 @@ final class Resolver
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
void addImplicitResolver(string tag, Regex!char regexp, string first)
|
void addImplicitResolver(string tag, Regex!char regexp, string first)
|
||||||
|
pure @safe
|
||||||
{
|
{
|
||||||
foreach(const dchar c; first)
|
foreach(const dchar c; first)
|
||||||
{
|
{
|
||||||
|
@ -141,7 +142,7 @@ final class Resolver
|
||||||
*
|
*
|
||||||
* Returns: Resolved tag.
|
* Returns: Resolved tag.
|
||||||
*/
|
*/
|
||||||
Tag resolve(NodeID kind, Tag tag, string value, bool implicit)
|
Tag resolve(NodeID kind, Tag tag, string value, bool implicit) @safe
|
||||||
{
|
{
|
||||||
if(!tag.isNull() && tag.get() != "!"){return tag;}
|
if(!tag.isNull() && tag.get() != "!"){return tag;}
|
||||||
|
|
||||||
|
@ -210,17 +211,17 @@ final class Resolver
|
||||||
}
|
}
|
||||||
|
|
||||||
///Return default scalar tag.
|
///Return default scalar tag.
|
||||||
@property Tag defaultScalarTag() const {return defaultScalarTag_;}
|
@property Tag defaultScalarTag() const pure @safe nothrow {return defaultScalarTag_;}
|
||||||
|
|
||||||
///Return default sequence tag.
|
///Return default sequence tag.
|
||||||
@property Tag defaultSequenceTag() const {return defaultSequenceTag_;}
|
@property Tag defaultSequenceTag() const pure @safe nothrow {return defaultSequenceTag_;}
|
||||||
|
|
||||||
///Return default mapping tag.
|
///Return default mapping tag.
|
||||||
@property Tag defaultMappingTag() const {return defaultMappingTag_;}
|
@property Tag defaultMappingTag() const pure @safe nothrow {return defaultMappingTag_;}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
///Add default implicit resolvers.
|
///Add default implicit resolvers.
|
||||||
void addImplicitResolvers()
|
void addImplicitResolvers() @safe
|
||||||
{
|
{
|
||||||
addImplicitResolver("tag:yaml.org,2002:bool",
|
addImplicitResolver("tag:yaml.org,2002:bool",
|
||||||
regex(r"^(?:yes|Yes|YES|no|No|NO|true|True|TRUE"
|
regex(r"^(?:yes|Yes|YES|no|No|NO|true|True|TRUE"
|
||||||
|
|
133
dyaml/scanner.d
133
dyaml/scanner.d
|
@ -152,7 +152,7 @@ final class Scanner
|
||||||
|
|
||||||
public:
|
public:
|
||||||
///Construct a Scanner using specified Reader.
|
///Construct a Scanner using specified Reader.
|
||||||
this(Reader reader)
|
this(Reader reader) @trusted nothrow
|
||||||
{
|
{
|
||||||
//Return the next token, but do not delete it from the queue
|
//Return the next token, but do not delete it from the queue
|
||||||
reader_ = reader;
|
reader_ = reader;
|
||||||
|
@ -161,7 +161,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the scanner.
|
///Destroy the scanner.
|
||||||
~this()
|
@trusted ~this()
|
||||||
{
|
{
|
||||||
clear(tokens_);
|
clear(tokens_);
|
||||||
clear(indents_);
|
clear(indents_);
|
||||||
|
@ -182,7 +182,7 @@ final class Scanner
|
||||||
* or if there are any tokens left if no types specified.
|
* or if there are any tokens left if no types specified.
|
||||||
* false otherwise.
|
* false otherwise.
|
||||||
*/
|
*/
|
||||||
bool checkToken(in TokenID[] ids ...)
|
bool checkToken(in TokenID[] ids ...) @safe
|
||||||
{
|
{
|
||||||
//Check if the next token is one of specified types.
|
//Check if the next token is one of specified types.
|
||||||
while(needMoreTokens()){fetchToken();}
|
while(needMoreTokens()){fetchToken();}
|
||||||
|
@ -206,7 +206,7 @@ final class Scanner
|
||||||
*
|
*
|
||||||
* Must not be called if there are no tokens left.
|
* Must not be called if there are no tokens left.
|
||||||
*/
|
*/
|
||||||
ref const(Token) peekToken()
|
ref const(Token) peekToken() @safe
|
||||||
{
|
{
|
||||||
while(needMoreTokens){fetchToken();}
|
while(needMoreTokens){fetchToken();}
|
||||||
if(!tokens_.empty){return tokens_.peek();}
|
if(!tokens_.empty){return tokens_.peek();}
|
||||||
|
@ -218,7 +218,7 @@ final class Scanner
|
||||||
*
|
*
|
||||||
* Must not be called if there are no tokens left.
|
* Must not be called if there are no tokens left.
|
||||||
*/
|
*/
|
||||||
Token getToken()
|
Token getToken() @safe
|
||||||
{
|
{
|
||||||
while(needMoreTokens){fetchToken();}
|
while(needMoreTokens){fetchToken();}
|
||||||
if(!tokens_.empty)
|
if(!tokens_.empty)
|
||||||
|
@ -231,7 +231,7 @@ final class Scanner
|
||||||
|
|
||||||
private:
|
private:
|
||||||
///Determine whether or not we need to fetch more tokens before peeking/getting a token.
|
///Determine whether or not we need to fetch more tokens before peeking/getting a token.
|
||||||
bool needMoreTokens()
|
bool needMoreTokens() pure @safe
|
||||||
{
|
{
|
||||||
if(done_) {return false;}
|
if(done_) {return false;}
|
||||||
if(tokens_.empty){return true;}
|
if(tokens_.empty){return true;}
|
||||||
|
@ -242,7 +242,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Fetch at token, adding it to tokens_.
|
///Fetch at token, adding it to tokens_.
|
||||||
void fetchToken()
|
void fetchToken() @trusted
|
||||||
{
|
{
|
||||||
///Eat whitespaces and comments until we reach the next token.
|
///Eat whitespaces and comments until we reach the next token.
|
||||||
scanToNextToken();
|
scanToNextToken();
|
||||||
|
@ -287,7 +287,7 @@ final class Scanner
|
||||||
|
|
||||||
|
|
||||||
///Return the token number of the nearest possible simple key.
|
///Return the token number of the nearest possible simple key.
|
||||||
uint nextPossibleSimpleKey()
|
uint nextPossibleSimpleKey() pure @safe nothrow
|
||||||
{
|
{
|
||||||
uint minTokenNumber = uint.max;
|
uint minTokenNumber = uint.max;
|
||||||
foreach(k, ref simpleKey; possibleSimpleKeys_)
|
foreach(k, ref simpleKey; possibleSimpleKeys_)
|
||||||
|
@ -307,7 +307,7 @@ final class Scanner
|
||||||
* Disabling this will allow simple keys of any length and
|
* Disabling this will allow simple keys of any length and
|
||||||
* height (may cause problems if indentation is broken though).
|
* height (may cause problems if indentation is broken though).
|
||||||
*/
|
*/
|
||||||
void stalePossibleSimpleKeys()
|
void stalePossibleSimpleKeys() pure @safe
|
||||||
{
|
{
|
||||||
foreach(level, ref key; possibleSimpleKeys_)
|
foreach(level, ref key; possibleSimpleKeys_)
|
||||||
{
|
{
|
||||||
|
@ -328,7 +328,7 @@ final class Scanner
|
||||||
*
|
*
|
||||||
* This function is called for ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
|
* This function is called for ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
|
||||||
*/
|
*/
|
||||||
void savePossibleSimpleKey()
|
void savePossibleSimpleKey() pure @system
|
||||||
{
|
{
|
||||||
//Check if a simple key is required at the current position.
|
//Check if a simple key is required at the current position.
|
||||||
const required = (flowLevel_ == 0 && indent_ == reader_.column);
|
const required = (flowLevel_ == 0 && indent_ == reader_.column);
|
||||||
|
@ -360,7 +360,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Remove the saved possible key position at the current flow level.
|
///Remove the saved possible key position at the current flow level.
|
||||||
void removePossibleSimpleKey()
|
void removePossibleSimpleKey() pure @safe
|
||||||
{
|
{
|
||||||
if(possibleSimpleKeys_.length <= flowLevel_){return;}
|
if(possibleSimpleKeys_.length <= flowLevel_){return;}
|
||||||
|
|
||||||
|
@ -379,7 +379,7 @@ final class Scanner
|
||||||
*
|
*
|
||||||
* Params: column = Current column in the file/stream.
|
* Params: column = Current column in the file/stream.
|
||||||
*/
|
*/
|
||||||
void unwindIndent(in int column)
|
void unwindIndent(in int column) @trusted
|
||||||
{
|
{
|
||||||
if(flowLevel_ > 0)
|
if(flowLevel_ > 0)
|
||||||
{
|
{
|
||||||
|
@ -416,7 +416,7 @@ final class Scanner
|
||||||
*
|
*
|
||||||
* Returns: true if the indentation was increased, false otherwise.
|
* Returns: true if the indentation was increased, false otherwise.
|
||||||
*/
|
*/
|
||||||
bool addIndent(int column)
|
bool addIndent(int column) @trusted
|
||||||
{
|
{
|
||||||
if(indent_ >= column){return false;}
|
if(indent_ >= column){return false;}
|
||||||
indents_ ~= indent_;
|
indents_ ~= indent_;
|
||||||
|
@ -426,13 +426,13 @@ final class Scanner
|
||||||
|
|
||||||
|
|
||||||
///Add STREAM-START token.
|
///Add STREAM-START token.
|
||||||
void fetchStreamStart()
|
void fetchStreamStart() @safe nothrow
|
||||||
{
|
{
|
||||||
tokens_.push(streamStartToken(reader_.mark, reader_.mark, reader_.encoding));
|
tokens_.push(streamStartToken(reader_.mark, reader_.mark, reader_.encoding));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add STREAM-END token.
|
///Add STREAM-END token.
|
||||||
void fetchStreamEnd()
|
void fetchStreamEnd() @safe
|
||||||
{
|
{
|
||||||
//Set intendation to -1 .
|
//Set intendation to -1 .
|
||||||
unwindIndent(-1);
|
unwindIndent(-1);
|
||||||
|
@ -445,7 +445,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add DIRECTIVE token.
|
///Add DIRECTIVE token.
|
||||||
void fetchDirective()
|
void fetchDirective() @safe
|
||||||
{
|
{
|
||||||
//Set intendation to -1 .
|
//Set intendation to -1 .
|
||||||
unwindIndent(-1);
|
unwindIndent(-1);
|
||||||
|
@ -457,7 +457,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add DOCUMENT-START or DOCUMENT-END token.
|
///Add DOCUMENT-START or DOCUMENT-END token.
|
||||||
void fetchDocumentIndicator(TokenID id)()
|
void fetchDocumentIndicator(TokenID id)() @safe
|
||||||
if(id == TokenID.DocumentStart || id == TokenID.DocumentEnd)
|
if(id == TokenID.DocumentStart || id == TokenID.DocumentEnd)
|
||||||
{
|
{
|
||||||
//Set indentation to -1 .
|
//Set indentation to -1 .
|
||||||
|
@ -476,7 +476,7 @@ final class Scanner
|
||||||
alias fetchDocumentIndicator!(TokenID.DocumentEnd) fetchDocumentEnd;
|
alias fetchDocumentIndicator!(TokenID.DocumentEnd) fetchDocumentEnd;
|
||||||
|
|
||||||
///Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
|
///Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
|
||||||
void fetchFlowCollectionStart(TokenID id)()
|
void fetchFlowCollectionStart(TokenID id)() @trusted
|
||||||
{
|
{
|
||||||
//'[' and '{' may start a simple key.
|
//'[' and '{' may start a simple key.
|
||||||
savePossibleSimpleKey();
|
savePossibleSimpleKey();
|
||||||
|
@ -494,7 +494,7 @@ final class Scanner
|
||||||
alias fetchFlowCollectionStart!(TokenID.FlowMappingStart) fetchFlowMappingStart;
|
alias fetchFlowCollectionStart!(TokenID.FlowMappingStart) fetchFlowMappingStart;
|
||||||
|
|
||||||
///Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
|
///Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
|
||||||
void fetchFlowCollectionEnd(TokenID id)()
|
void fetchFlowCollectionEnd(TokenID id)() @safe
|
||||||
{
|
{
|
||||||
//Reset possible simple key on the current level.
|
//Reset possible simple key on the current level.
|
||||||
removePossibleSimpleKey();
|
removePossibleSimpleKey();
|
||||||
|
@ -512,7 +512,7 @@ final class Scanner
|
||||||
alias fetchFlowCollectionEnd!(TokenID.FlowMappingEnd) fetchFlowMappingEnd;
|
alias fetchFlowCollectionEnd!(TokenID.FlowMappingEnd) fetchFlowMappingEnd;
|
||||||
|
|
||||||
///Add FLOW-ENTRY token;
|
///Add FLOW-ENTRY token;
|
||||||
void fetchFlowEntry()
|
void fetchFlowEntry() @safe
|
||||||
{
|
{
|
||||||
//Reset possible simple key on the current level.
|
//Reset possible simple key on the current level.
|
||||||
removePossibleSimpleKey();
|
removePossibleSimpleKey();
|
||||||
|
@ -530,7 +530,7 @@ final class Scanner
|
||||||
* Params: type = String representing the token type we might need to add.
|
* Params: type = String representing the token type we might need to add.
|
||||||
* id = Token type we might need to add.
|
* id = Token type we might need to add.
|
||||||
*/
|
*/
|
||||||
void blockChecks(string type, TokenID id)()
|
void blockChecks(string type, TokenID id)() @safe
|
||||||
{
|
{
|
||||||
//Are we allowed to start a key (not neccesarily a simple one)?
|
//Are we allowed to start a key (not neccesarily a simple one)?
|
||||||
enforce(allowSimpleKey_, new Error(type ~ " keys are not allowed here",
|
enforce(allowSimpleKey_, new Error(type ~ " keys are not allowed here",
|
||||||
|
@ -543,7 +543,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add BLOCK-ENTRY token. Might add BLOCK-SEQUENCE-START in the process.
|
///Add BLOCK-ENTRY token. Might add BLOCK-SEQUENCE-START in the process.
|
||||||
void fetchBlockEntry()
|
void fetchBlockEntry() @safe
|
||||||
{
|
{
|
||||||
if(flowLevel_ == 0){blockChecks!("Sequence", TokenID.BlockSequenceStart)();}
|
if(flowLevel_ == 0){blockChecks!("Sequence", TokenID.BlockSequenceStart)();}
|
||||||
|
|
||||||
|
@ -561,7 +561,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add KEY token. Might add BLOCK-MAPPING-START in the process.
|
///Add KEY token. Might add BLOCK-MAPPING-START in the process.
|
||||||
void fetchKey()
|
void fetchKey() @safe
|
||||||
{
|
{
|
||||||
if(flowLevel_ == 0){blockChecks!("Mapping", TokenID.BlockMappingStart)();}
|
if(flowLevel_ == 0){blockChecks!("Mapping", TokenID.BlockMappingStart)();}
|
||||||
|
|
||||||
|
@ -576,7 +576,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add VALUE token. Might add KEY and/or BLOCK-MAPPING-START in the process.
|
///Add VALUE token. Might add KEY and/or BLOCK-MAPPING-START in the process.
|
||||||
void fetchValue()
|
void fetchValue() @safe
|
||||||
{
|
{
|
||||||
//Do we determine a simple key?
|
//Do we determine a simple key?
|
||||||
if(possibleSimpleKeys_.length > flowLevel_ &&
|
if(possibleSimpleKeys_.length > flowLevel_ &&
|
||||||
|
@ -629,7 +629,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add ALIAS or ANCHOR token.
|
///Add ALIAS or ANCHOR token.
|
||||||
void fetchAnchor_(TokenID id)()
|
void fetchAnchor_(TokenID id)() @trusted
|
||||||
if(id == TokenID.Alias || id == TokenID.Anchor)
|
if(id == TokenID.Alias || id == TokenID.Anchor)
|
||||||
{
|
{
|
||||||
//ALIAS/ANCHOR could be a simple key.
|
//ALIAS/ANCHOR could be a simple key.
|
||||||
|
@ -645,7 +645,7 @@ final class Scanner
|
||||||
alias fetchAnchor_!(TokenID.Anchor) fetchAnchor;
|
alias fetchAnchor_!(TokenID.Anchor) fetchAnchor;
|
||||||
|
|
||||||
///Add TAG token.
|
///Add TAG token.
|
||||||
void fetchTag()
|
void fetchTag() @trusted
|
||||||
{
|
{
|
||||||
//TAG could start a simple key.
|
//TAG could start a simple key.
|
||||||
savePossibleSimpleKey();
|
savePossibleSimpleKey();
|
||||||
|
@ -656,7 +656,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add block SCALAR token.
|
///Add block SCALAR token.
|
||||||
void fetchBlockScalar(ScalarStyle style)()
|
void fetchBlockScalar(ScalarStyle style)() @trusted
|
||||||
if(style == ScalarStyle.Literal || style == ScalarStyle.Folded)
|
if(style == ScalarStyle.Literal || style == ScalarStyle.Folded)
|
||||||
{
|
{
|
||||||
//Reset possible simple key on the current level.
|
//Reset possible simple key on the current level.
|
||||||
|
@ -672,7 +672,7 @@ final class Scanner
|
||||||
alias fetchBlockScalar!(ScalarStyle.Folded) fetchFolded;
|
alias fetchBlockScalar!(ScalarStyle.Folded) fetchFolded;
|
||||||
|
|
||||||
///Add quoted flow SCALAR token.
|
///Add quoted flow SCALAR token.
|
||||||
void fetchFlowScalar(ScalarStyle quotes)()
|
void fetchFlowScalar(ScalarStyle quotes)() @trusted
|
||||||
{
|
{
|
||||||
//A flow scalar could be a simple key.
|
//A flow scalar could be a simple key.
|
||||||
savePossibleSimpleKey();
|
savePossibleSimpleKey();
|
||||||
|
@ -688,7 +688,7 @@ final class Scanner
|
||||||
alias fetchFlowScalar!(ScalarStyle.DoubleQuoted) fetchDouble;
|
alias fetchFlowScalar!(ScalarStyle.DoubleQuoted) fetchDouble;
|
||||||
|
|
||||||
///Add plain SCALAR token.
|
///Add plain SCALAR token.
|
||||||
void fetchPlain()
|
void fetchPlain() @trusted
|
||||||
{
|
{
|
||||||
//A plain scalar could be a simple key
|
//A plain scalar could be a simple key
|
||||||
savePossibleSimpleKey();
|
savePossibleSimpleKey();
|
||||||
|
@ -702,10 +702,13 @@ final class Scanner
|
||||||
|
|
||||||
|
|
||||||
///Check if the next token is DIRECTIVE: ^ '%' ...
|
///Check if the next token is DIRECTIVE: ^ '%' ...
|
||||||
bool checkDirective(){return reader_.peek() == '%' && reader_.column == 0;}
|
bool checkDirective() @safe
|
||||||
|
{
|
||||||
|
return reader_.peek() == '%' && reader_.column == 0;
|
||||||
|
}
|
||||||
|
|
||||||
///Check if the next token is DOCUMENT-START: ^ '---' (' '|'\n')
|
///Check if the next token is DOCUMENT-START: ^ '---' (' '|'\n')
|
||||||
bool checkDocumentStart()
|
bool checkDocumentStart() @safe
|
||||||
{
|
{
|
||||||
//Check one char first, then all 3, to prevent reading outside stream.
|
//Check one char first, then all 3, to prevent reading outside stream.
|
||||||
return reader_.column == 0 &&
|
return reader_.column == 0 &&
|
||||||
|
@ -715,7 +718,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Check if the next token is DOCUMENT-END: ^ '...' (' '|'\n')
|
///Check if the next token is DOCUMENT-END: ^ '...' (' '|'\n')
|
||||||
bool checkDocumentEnd()
|
bool checkDocumentEnd() @safe
|
||||||
{
|
{
|
||||||
//Check one char first, then all 3, to prevent reading outside stream.
|
//Check one char first, then all 3, to prevent reading outside stream.
|
||||||
return reader_.column == 0 &&
|
return reader_.column == 0 &&
|
||||||
|
@ -725,7 +728,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Check if the next token is BLOCK-ENTRY: '-' (' '|'\n')
|
///Check if the next token is BLOCK-ENTRY: '-' (' '|'\n')
|
||||||
bool checkBlockEntry()
|
bool checkBlockEntry() @safe
|
||||||
{
|
{
|
||||||
return reader_.peek() == '-' &&
|
return reader_.peek() == '-' &&
|
||||||
" \t\0\n\r\u0085\u2028\u2029"d.canFind(reader_.peek(1));
|
" \t\0\n\r\u0085\u2028\u2029"d.canFind(reader_.peek(1));
|
||||||
|
@ -736,7 +739,7 @@ final class Scanner
|
||||||
*
|
*
|
||||||
* or KEY(block context): '?' (' '|'\n')
|
* or KEY(block context): '?' (' '|'\n')
|
||||||
*/
|
*/
|
||||||
bool checkKey()
|
bool checkKey() @safe
|
||||||
{
|
{
|
||||||
return reader_.peek() == '?' &&
|
return reader_.peek() == '?' &&
|
||||||
(flowLevel_ > 0 ||
|
(flowLevel_ > 0 ||
|
||||||
|
@ -748,7 +751,7 @@ final class Scanner
|
||||||
*
|
*
|
||||||
* or VALUE(block context): ':' (' '|'\n')
|
* or VALUE(block context): ':' (' '|'\n')
|
||||||
*/
|
*/
|
||||||
bool checkValue()
|
bool checkValue() @safe
|
||||||
{
|
{
|
||||||
return reader_.peek() == ':' &&
|
return reader_.peek() == ':' &&
|
||||||
(flowLevel_ > 0 ||
|
(flowLevel_ > 0 ||
|
||||||
|
@ -771,7 +774,7 @@ final class Scanner
|
||||||
* '-' character) because we want the flow context to be space
|
* '-' character) because we want the flow context to be space
|
||||||
* independent.
|
* independent.
|
||||||
*/
|
*/
|
||||||
bool checkPlain()
|
bool checkPlain() @safe
|
||||||
{
|
{
|
||||||
const c = reader_.peek();
|
const c = reader_.peek();
|
||||||
return !("-?:,[]{}#&*!|>\'\"%@` \t\0\n\r\u0085\u2028\u2029"d.canFind(c)) ||
|
return !("-?:,[]{}#&*!|>\'\"%@` \t\0\n\r\u0085\u2028\u2029"d.canFind(c)) ||
|
||||||
|
@ -780,13 +783,13 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Move to the next non-space character.
|
///Move to the next non-space character.
|
||||||
void findNextNonSpace()
|
void findNextNonSpace() @safe
|
||||||
{
|
{
|
||||||
while(reader_.peek() == ' '){reader_.forward();}
|
while(reader_.peek() == ' '){reader_.forward();}
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan a string of alphanumeric or "-_" characters.
|
///Scan a string of alphanumeric or "-_" characters.
|
||||||
dstring scanAlphaNumeric(string name)(in Mark startMark)
|
dstring scanAlphaNumeric(string name)(in Mark startMark) @trusted
|
||||||
{
|
{
|
||||||
uint length = 0;
|
uint length = 0;
|
||||||
dchar c = reader_.peek();
|
dchar c = reader_.peek();
|
||||||
|
@ -805,7 +808,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan all characters until nex line break.
|
///Scan all characters until nex line break.
|
||||||
dstring scanToNextBreak()
|
dstring scanToNextBreak() @safe
|
||||||
{
|
{
|
||||||
uint length = 0;
|
uint length = 0;
|
||||||
while(!"\0\n\r\u0085\u2028\u2029"d.canFind(reader_.peek(length))){++length;}
|
while(!"\0\n\r\u0085\u2028\u2029"d.canFind(reader_.peek(length))){++length;}
|
||||||
|
@ -823,7 +826,7 @@ final class Scanner
|
||||||
* specification requires. Any such mark will be considered as a part
|
* specification requires. Any such mark will be considered as a part
|
||||||
* of the document.
|
* of the document.
|
||||||
*/
|
*/
|
||||||
void scanToNextToken()
|
void scanToNextToken() @safe
|
||||||
{
|
{
|
||||||
//TODO(PyYAML): We need to make tab handling rules more sane. A good rule is:
|
//TODO(PyYAML): We need to make tab handling rules more sane. A good rule is:
|
||||||
// Tabs cannot precede tokens
|
// Tabs cannot precede tokens
|
||||||
|
@ -850,7 +853,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan directive token.
|
///Scan directive token.
|
||||||
Token scanDirective()
|
Token scanDirective() @trusted
|
||||||
{
|
{
|
||||||
Mark startMark = reader_.mark;
|
Mark startMark = reader_.mark;
|
||||||
//Skip the '%'.
|
//Skip the '%'.
|
||||||
|
@ -870,7 +873,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan name of a directive token.
|
///Scan name of a directive token.
|
||||||
dstring scanDirectiveName(in Mark startMark)
|
dstring scanDirectiveName(in Mark startMark) @trusted
|
||||||
{
|
{
|
||||||
//Scan directive name.
|
//Scan directive name.
|
||||||
const name = scanAlphaNumeric!"a directive"(startMark);
|
const name = scanAlphaNumeric!"a directive"(startMark);
|
||||||
|
@ -883,7 +886,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan value of a YAML directive token. Returns major, minor version separated by '.'.
|
///Scan value of a YAML directive token. Returns major, minor version separated by '.'.
|
||||||
dstring scanYAMLDirectiveValue(in Mark startMark)
|
dstring scanYAMLDirectiveValue(in Mark startMark) @trusted
|
||||||
{
|
{
|
||||||
findNextNonSpace();
|
findNextNonSpace();
|
||||||
|
|
||||||
|
@ -904,7 +907,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan a number from a YAML directive.
|
///Scan a number from a YAML directive.
|
||||||
dstring scanYAMLDirectiveNumber(in Mark startMark)
|
dstring scanYAMLDirectiveNumber(in Mark startMark) @trusted
|
||||||
{
|
{
|
||||||
enforce(isDigit(reader_.peek()),
|
enforce(isDigit(reader_.peek()),
|
||||||
new Error("While scanning a directive", startMark,
|
new Error("While scanning a directive", startMark,
|
||||||
|
@ -919,7 +922,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan value of a tag directive.
|
///Scan value of a tag directive.
|
||||||
dstring scanTagDirectiveValue(in Mark startMark)
|
dstring scanTagDirectiveValue(in Mark startMark) @safe
|
||||||
{
|
{
|
||||||
findNextNonSpace();
|
findNextNonSpace();
|
||||||
const handle = scanTagDirectiveHandle(startMark);
|
const handle = scanTagDirectiveHandle(startMark);
|
||||||
|
@ -928,7 +931,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan handle of a tag directive.
|
///Scan handle of a tag directive.
|
||||||
dstring scanTagDirectiveHandle(in Mark startMark)
|
dstring scanTagDirectiveHandle(in Mark startMark) @trusted
|
||||||
{
|
{
|
||||||
const value = scanTagHandle("directive", startMark);
|
const value = scanTagHandle("directive", startMark);
|
||||||
enforce(reader_.peek() == ' ',
|
enforce(reader_.peek() == ' ',
|
||||||
|
@ -939,7 +942,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan prefix of a tag directive.
|
///Scan prefix of a tag directive.
|
||||||
dstring scanTagDirectivePrefix(in Mark startMark)
|
dstring scanTagDirectivePrefix(in Mark startMark) @trusted
|
||||||
{
|
{
|
||||||
const value = scanTagURI("directive", startMark);
|
const value = scanTagURI("directive", startMark);
|
||||||
enforce(" \0\n\r\u0085\u2028\u2029"d.canFind(reader_.peek()),
|
enforce(" \0\n\r\u0085\u2028\u2029"d.canFind(reader_.peek()),
|
||||||
|
@ -951,7 +954,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan (and ignore) ignored line after a directive.
|
///Scan (and ignore) ignored line after a directive.
|
||||||
void scanDirectiveIgnoredLine(in Mark startMark)
|
void scanDirectiveIgnoredLine(in Mark startMark) @trusted
|
||||||
{
|
{
|
||||||
findNextNonSpace();
|
findNextNonSpace();
|
||||||
if(reader_.peek() == '#'){scanToNextBreak();}
|
if(reader_.peek() == '#'){scanToNextBreak();}
|
||||||
|
@ -975,7 +978,7 @@ final class Scanner
|
||||||
* [ *alias , "value" ]
|
* [ *alias , "value" ]
|
||||||
* Therefore we restrict aliases to ASCII alphanumeric characters.
|
* Therefore we restrict aliases to ASCII alphanumeric characters.
|
||||||
*/
|
*/
|
||||||
Token scanAnchor(TokenID id)
|
Token scanAnchor(TokenID id) @trusted
|
||||||
{
|
{
|
||||||
const startMark = reader_.mark;
|
const startMark = reader_.mark;
|
||||||
|
|
||||||
|
@ -1002,7 +1005,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan a tag token.
|
///Scan a tag token.
|
||||||
Token scanTag()
|
Token scanTag() @trusted
|
||||||
{
|
{
|
||||||
const startMark = reader_.mark;
|
const startMark = reader_.mark;
|
||||||
dchar c = reader_.peek(1);
|
dchar c = reader_.peek(1);
|
||||||
|
@ -1058,7 +1061,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan a block scalar token with specified style.
|
///Scan a block scalar token with specified style.
|
||||||
Token scanBlockScalar(in ScalarStyle style)
|
Token scanBlockScalar(in ScalarStyle style) @system
|
||||||
{
|
{
|
||||||
const startMark = reader_.mark;
|
const startMark = reader_.mark;
|
||||||
|
|
||||||
|
@ -1139,7 +1142,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan chomping and indentation indicators of a scalar token.
|
///Scan chomping and indentation indicators of a scalar token.
|
||||||
Tuple!(Chomping, int) scanBlockScalarIndicators(in Mark startMark)
|
Tuple!(Chomping, int) scanBlockScalarIndicators(in Mark startMark) @trusted
|
||||||
{
|
{
|
||||||
auto chomping = Chomping.Clip;
|
auto chomping = Chomping.Clip;
|
||||||
int increment = int.min;
|
int increment = int.min;
|
||||||
|
@ -1182,7 +1185,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan (and ignore) ignored line in a block scalar.
|
///Scan (and ignore) ignored line in a block scalar.
|
||||||
void scanBlockScalarIgnoredLine(in Mark startMark)
|
void scanBlockScalarIgnoredLine(in Mark startMark) @trusted
|
||||||
{
|
{
|
||||||
findNextNonSpace();
|
findNextNonSpace();
|
||||||
if(reader_.peek == '#'){scanToNextBreak();}
|
if(reader_.peek == '#'){scanToNextBreak();}
|
||||||
|
@ -1195,7 +1198,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan indentation in a block scalar, returning line breaks, max indent and end mark.
|
///Scan indentation in a block scalar, returning line breaks, max indent and end mark.
|
||||||
Tuple!(dchar[], uint, Mark) scanBlockScalarIndentation()
|
Tuple!(dchar[], uint, Mark) scanBlockScalarIndentation() @safe
|
||||||
{
|
{
|
||||||
dchar[] chunks;
|
dchar[] chunks;
|
||||||
uint maxIndent;
|
uint maxIndent;
|
||||||
|
@ -1217,7 +1220,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan line breaks at lower or specified indentation in a block scalar.
|
///Scan line breaks at lower or specified indentation in a block scalar.
|
||||||
Tuple!(dchar[], Mark) scanBlockScalarBreaks(in uint indent)
|
Tuple!(dchar[], Mark) scanBlockScalarBreaks(in uint indent) @safe
|
||||||
{
|
{
|
||||||
dchar[] chunks;
|
dchar[] chunks;
|
||||||
Mark endMark = reader_.mark;
|
Mark endMark = reader_.mark;
|
||||||
|
@ -1234,7 +1237,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan a qouted flow scalar token with specified quotes.
|
///Scan a qouted flow scalar token with specified quotes.
|
||||||
Token scanFlowScalar(in ScalarStyle quotes)
|
Token scanFlowScalar(in ScalarStyle quotes) @system
|
||||||
{
|
{
|
||||||
const startMark = reader_.mark;
|
const startMark = reader_.mark;
|
||||||
const quote = reader_.get();
|
const quote = reader_.get();
|
||||||
|
@ -1256,7 +1259,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan nonspace characters in a flow scalar.
|
///Scan nonspace characters in a flow scalar.
|
||||||
void scanFlowScalarNonSpaces(in ScalarStyle quotes, in Mark startMark)
|
void scanFlowScalarNonSpaces(in ScalarStyle quotes, in Mark startMark) @system
|
||||||
{
|
{
|
||||||
for(;;)
|
for(;;)
|
||||||
{
|
{
|
||||||
|
@ -1343,7 +1346,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan space characters in a flow scalar.
|
///Scan space characters in a flow scalar.
|
||||||
void scanFlowScalarSpaces(in Mark startMark)
|
void scanFlowScalarSpaces(in Mark startMark) @system
|
||||||
{
|
{
|
||||||
uint length = 0;
|
uint length = 0;
|
||||||
while(" \t"d.canFind(reader_.peek(length))){++length;}
|
while(" \t"d.canFind(reader_.peek(length))){++length;}
|
||||||
|
@ -1371,7 +1374,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan line breaks in a flow scalar.
|
///Scan line breaks in a flow scalar.
|
||||||
dstring scanFlowScalarBreaks(in Mark startMark)
|
dstring scanFlowScalarBreaks(in Mark startMark) @system
|
||||||
{
|
{
|
||||||
auto appender = appender!dstring();
|
auto appender = appender!dstring();
|
||||||
for(;;)
|
for(;;)
|
||||||
|
@ -1396,7 +1399,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan plain scalar token (no block, no quotes).
|
///Scan plain scalar token (no block, no quotes).
|
||||||
Token scanPlain()
|
Token scanPlain() @system
|
||||||
{
|
{
|
||||||
//We keep track of the allowSimpleKey_ flag here.
|
//We keep track of the allowSimpleKey_ flag here.
|
||||||
//Indentation rules are loosed for the flow context
|
//Indentation rules are loosed for the flow context
|
||||||
|
@ -1461,7 +1464,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan spaces in a plain scalar.
|
///Scan spaces in a plain scalar.
|
||||||
dstring scanPlainSpaces(in Mark startMark)
|
dstring scanPlainSpaces(in Mark startMark) @system
|
||||||
{
|
{
|
||||||
///The specification is really confusing about tabs in plain scalars.
|
///The specification is really confusing about tabs in plain scalars.
|
||||||
///We just forbid them completely. Do not use tabs in YAML!
|
///We just forbid them completely. Do not use tabs in YAML!
|
||||||
|
@ -1509,7 +1512,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan handle of a tag token.
|
///Scan handle of a tag token.
|
||||||
dstring scanTagHandle(in string name, in Mark startMark)
|
dstring scanTagHandle(const string name, in Mark startMark) @system
|
||||||
{
|
{
|
||||||
dchar c = reader_.peek();
|
dchar c = reader_.peek();
|
||||||
enforce(c == '!',
|
enforce(c == '!',
|
||||||
|
@ -1538,7 +1541,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan URI in a tag token.
|
///Scan URI in a tag token.
|
||||||
dstring scanTagURI(in string name, in Mark startMark)
|
dstring scanTagURI(const string name, in Mark startMark) @system
|
||||||
{
|
{
|
||||||
//Note: we do not check if URI is well-formed.
|
//Note: we do not check if URI is well-formed.
|
||||||
//Using appender_, so clear it when we're done.
|
//Using appender_, so clear it when we're done.
|
||||||
|
@ -1570,7 +1573,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan URI escape sequences.
|
///Scan URI escape sequences.
|
||||||
dstring scanURIEscapes(in string name, in Mark startMark)
|
dstring scanURIEscapes(const string name, in Mark startMark) @system
|
||||||
{
|
{
|
||||||
ubyte[] bytes;
|
ubyte[] bytes;
|
||||||
Mark mark = reader_.mark;
|
Mark mark = reader_.mark;
|
||||||
|
@ -1628,7 +1631,7 @@ final class Scanner
|
||||||
* '\u2029 : '\u2029'
|
* '\u2029 : '\u2029'
|
||||||
* no break : '\0'
|
* no break : '\0'
|
||||||
*/
|
*/
|
||||||
dchar scanLineBreak()
|
dchar scanLineBreak() @safe
|
||||||
{
|
{
|
||||||
const c = reader_.peek();
|
const c = reader_.peek();
|
||||||
|
|
||||||
|
|
|
@ -70,7 +70,7 @@ struct Serializer
|
||||||
*/
|
*/
|
||||||
this(ref Emitter emitter, Resolver resolver, Encoding encoding,
|
this(ref Emitter emitter, Resolver resolver, Encoding encoding,
|
||||||
in bool explicitStart, in bool explicitEnd, string YAMLVersion,
|
in bool explicitStart, in bool explicitEnd, string YAMLVersion,
|
||||||
TagDirective[] tagDirectives)
|
TagDirective[] tagDirectives) @trusted
|
||||||
{
|
{
|
||||||
emitter_ = &emitter;
|
emitter_ = &emitter;
|
||||||
resolver_ = resolver;
|
resolver_ = resolver;
|
||||||
|
@ -83,7 +83,7 @@ struct Serializer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Destroy the Serializer.
|
///Destroy the Serializer.
|
||||||
~this()
|
@safe ~this()
|
||||||
{
|
{
|
||||||
emitter_.emit(streamEndEvent(Mark(), Mark()));
|
emitter_.emit(streamEndEvent(Mark(), Mark()));
|
||||||
clear(YAMLVersion_);
|
clear(YAMLVersion_);
|
||||||
|
@ -95,7 +95,7 @@ struct Serializer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Serialize a node, emitting it in the process.
|
///Serialize a node, emitting it in the process.
|
||||||
void serialize(ref Node node)
|
void serialize(ref Node node) @safe
|
||||||
{
|
{
|
||||||
emitter_.emit(documentStartEvent(Mark(), Mark(), explicitStart_,
|
emitter_.emit(documentStartEvent(Mark(), Mark(), explicitStart_,
|
||||||
YAMLVersion_, tagDirectives_));
|
YAMLVersion_, tagDirectives_));
|
||||||
|
@ -120,7 +120,7 @@ struct Serializer
|
||||||
*
|
*
|
||||||
* Returns: True if the node is anchorable, false otherwise.
|
* Returns: True if the node is anchorable, false otherwise.
|
||||||
*/
|
*/
|
||||||
static bool anchorable(ref Node node)
|
static bool anchorable(ref Node node) @safe
|
||||||
{
|
{
|
||||||
if(node.isScalar)
|
if(node.isScalar)
|
||||||
{
|
{
|
||||||
|
@ -132,7 +132,7 @@ struct Serializer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add an anchor to the node if it's anchorable and not anchored yet.
|
///Add an anchor to the node if it's anchorable and not anchored yet.
|
||||||
void anchorNode(ref Node node)
|
void anchorNode(ref Node node) @safe
|
||||||
{
|
{
|
||||||
if(!anchorable(node)){return;}
|
if(!anchorable(node)){return;}
|
||||||
|
|
||||||
|
@ -158,7 +158,7 @@ struct Serializer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Generate and return a new anchor.
|
///Generate and return a new anchor.
|
||||||
Anchor generateAnchor()
|
Anchor generateAnchor() @trusted
|
||||||
{
|
{
|
||||||
++lastAnchorID_;
|
++lastAnchorID_;
|
||||||
auto appender = appender!string;
|
auto appender = appender!string;
|
||||||
|
@ -167,7 +167,7 @@ struct Serializer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Serialize a node and all its subnodes.
|
///Serialize a node and all its subnodes.
|
||||||
void serializeNode(ref Node node)
|
void serializeNode(ref Node node) @trusted
|
||||||
{
|
{
|
||||||
//If the node has an anchor, emit an anchor (as aliasEvent) on the
|
//If the node has an anchor, emit an anchor (as aliasEvent) on the
|
||||||
//first occurrence, save it in serializedNodes_, and emit an alias
|
//first occurrence, save it in serializedNodes_, and emit an alias
|
||||||
|
|
|
@ -69,7 +69,7 @@ struct Token
|
||||||
Encoding encoding;
|
Encoding encoding;
|
||||||
|
|
||||||
///Get string representation of the token ID.
|
///Get string representation of the token ID.
|
||||||
@property string idString() const {return to!string(id);}
|
@property string idString() const @trusted {return to!string(id);}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -79,7 +79,7 @@ struct Token
|
||||||
* end = End position of the token.
|
* end = End position of the token.
|
||||||
* value = Value of the token.
|
* value = Value of the token.
|
||||||
*/
|
*/
|
||||||
Token directiveToken(in Mark start, in Mark end, in string value) pure
|
Token directiveToken(in Mark start, in Mark end, in string value) pure @safe nothrow
|
||||||
{
|
{
|
||||||
return Token(value, start, end, TokenID.Directive);
|
return Token(value, start, end, TokenID.Directive);
|
||||||
}
|
}
|
||||||
|
@ -91,7 +91,7 @@ Token directiveToken(in Mark start, in Mark end, in string value) pure
|
||||||
* start = Start position of the token.
|
* start = Start position of the token.
|
||||||
* end = End position of the token.
|
* end = End position of the token.
|
||||||
*/
|
*/
|
||||||
Token simpleToken(TokenID id)(in Mark start, in Mark end) pure
|
Token simpleToken(TokenID id)(in Mark start, in Mark end) pure @safe nothrow
|
||||||
{
|
{
|
||||||
return Token(null, start, end, id);
|
return Token(null, start, end, id);
|
||||||
}
|
}
|
||||||
|
@ -103,7 +103,7 @@ Token simpleToken(TokenID id)(in Mark start, in Mark end) pure
|
||||||
* end = End position of the token.
|
* end = End position of the token.
|
||||||
* encoding = Encoding of the stream.
|
* encoding = Encoding of the stream.
|
||||||
*/
|
*/
|
||||||
Token streamStartToken(in Mark start, in Mark end, in Encoding encoding) pure
|
Token streamStartToken(in Mark start, in Mark end, in Encoding encoding) pure @safe nothrow
|
||||||
{
|
{
|
||||||
return Token(null, start, end, TokenID.StreamStart, ScalarStyle.Invalid, encoding);
|
return Token(null, start, end, TokenID.StreamStart, ScalarStyle.Invalid, encoding);
|
||||||
}
|
}
|
||||||
|
@ -126,7 +126,7 @@ alias simpleToken!(TokenID.FlowEntry) flowEntryToken;
|
||||||
* end = End position of the token.
|
* end = End position of the token.
|
||||||
* value = Value of the token.
|
* value = Value of the token.
|
||||||
*/
|
*/
|
||||||
Token simpleValueToken(TokenID id)(in Mark start, in Mark end, string value) pure
|
Token simpleValueToken(TokenID id)(in Mark start, in Mark end, string value) pure @safe nothrow
|
||||||
{
|
{
|
||||||
return Token(value, start, end, id);
|
return Token(value, start, end, id);
|
||||||
}
|
}
|
||||||
|
@ -144,7 +144,7 @@ alias simpleValueToken!(TokenID.Anchor) anchorToken;
|
||||||
* value = Value of the token.
|
* value = Value of the token.
|
||||||
* style = Style of the token.
|
* style = Style of the token.
|
||||||
*/
|
*/
|
||||||
Token scalarToken(in Mark start, in Mark end, in string value, in ScalarStyle style) pure
|
Token scalarToken(in Mark start, in Mark end, in string value, in ScalarStyle style) pure @safe nothrow
|
||||||
{
|
{
|
||||||
return Token(value, start, end, TokenID.Scalar, style);
|
return Token(value, start, end, TokenID.Scalar, style);
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ struct ZeroString(string TypeName)
|
||||||
@disable int opCmp(ref ZeroString);
|
@disable int opCmp(ref ZeroString);
|
||||||
|
|
||||||
///Construct a string.
|
///Construct a string.
|
||||||
this(in string str)
|
this(in string str) pure nothrow @safe
|
||||||
{
|
{
|
||||||
if(str is null || str == "")
|
if(str is null || str == "")
|
||||||
{
|
{
|
||||||
|
@ -44,7 +44,7 @@ struct ZeroString(string TypeName)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Test for equality with another string.
|
///Test for equality with another string.
|
||||||
bool opEquals(const ref ZeroString str) const
|
bool opEquals(const ref ZeroString str) const nothrow @trusted
|
||||||
{
|
{
|
||||||
return isNull ? str.isNull :
|
return isNull ? str.isNull :
|
||||||
str.isNull ? false : (0 == strcmp(str_, str.str_));
|
str.isNull ? false : (0 == strcmp(str_, str.str_));
|
||||||
|
@ -60,7 +60,7 @@ struct ZeroString(string TypeName)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Compare with another string.
|
///Compare with another string.
|
||||||
int opCmp(const ref ZeroString str) const
|
int opCmp(const ref ZeroString str) const nothrow @trusted
|
||||||
in{assert(!isNull && !str.isNull);}
|
in{assert(!isNull && !str.isNull);}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
@ -68,7 +68,7 @@ struct ZeroString(string TypeName)
|
||||||
}
|
}
|
||||||
|
|
||||||
///Is this string null (invalid)?
|
///Is this string null (invalid)?
|
||||||
@property bool isNull() const nothrow @safe {return str_ is null;}
|
@property bool isNull() pure const nothrow @safe {return str_ is null;}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
///Hack to allow toHash to be @safe.
|
///Hack to allow toHash to be @safe.
|
||||||
|
|
Loading…
Reference in a new issue