Another attempt at making d-yaml work with dub.
Creating a symlink in source to the dyaml directory does not actually result in a symlink when another package grabs d-yaml as a dependency via dub, and even if it did, it wouldn't work on Windows. So, this moves the source into source so that it'll actually work, and cdc.d has been adjusted accordingly so that building with it should still work.
This commit is contained in:
parent
9f9c96e317
commit
7a1e1ecce3
30 changed files with 1 additions and 2 deletions
|
@ -1 +0,0 @@
|
|||
../dyaml/
|
18
source/dyaml/all.d
Normal file
18
source/dyaml/all.d
Normal file
|
@ -0,0 +1,18 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.all;
|
||||
|
||||
public import dyaml.constructor;
|
||||
public import dyaml.dumper;
|
||||
public import dyaml.encoding;
|
||||
public import dyaml.exception;
|
||||
public import dyaml.linebreak;
|
||||
public import dyaml.loader;
|
||||
public import dyaml.representer;
|
||||
public import dyaml.resolver;
|
||||
public import dyaml.style;
|
||||
public import dyaml.node;
|
13
source/dyaml/anchor.d
Normal file
13
source/dyaml/anchor.d
Normal file
|
@ -0,0 +1,13 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///YAML anchor.
|
||||
module dyaml.anchor;
|
||||
|
||||
import dyaml.zerostring;
|
||||
|
||||
///YAML anchor (reference) struct. Encapsulates an anchor to save memory.
|
||||
alias ZeroString!"Anchor" Anchor;
|
389
source/dyaml/composer.d
Normal file
389
source/dyaml/composer.d
Normal file
|
@ -0,0 +1,389 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* Composes nodes from YAML events provided by parser.
|
||||
* Code based on PyYAML: http://www.pyyaml.org
|
||||
*/
|
||||
module dyaml.composer;
|
||||
|
||||
import core.memory;
|
||||
|
||||
import std.array;
|
||||
import std.conv;
|
||||
import std.exception;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.anchor;
|
||||
import dyaml.constructor;
|
||||
import dyaml.event;
|
||||
import dyaml.exception;
|
||||
import dyaml.node;
|
||||
import dyaml.parser;
|
||||
import dyaml.resolver;
|
||||
|
||||
|
||||
package:
|
||||
/**
|
||||
* Exception thrown at composer errors.
|
||||
*
|
||||
* See_Also: MarkedYAMLException
|
||||
*/
|
||||
class ComposerException : MarkedYAMLException
|
||||
{
|
||||
mixin MarkedExceptionCtors;
|
||||
}
|
||||
|
||||
///Composes YAML documents from events provided by a Parser.
|
||||
final class Composer
|
||||
{
|
||||
private:
|
||||
///Parser providing YAML events.
|
||||
Parser parser_;
|
||||
///Resolver resolving tags (data types).
|
||||
Resolver resolver_;
|
||||
///Constructor constructing YAML values.
|
||||
Constructor constructor_;
|
||||
///Nodes associated with anchors. Used by YAML aliases.
|
||||
Node[Anchor] anchors_;
|
||||
|
||||
///Used to reduce allocations when creating pair arrays.
|
||||
///
|
||||
///We need one appender for each nesting level that involves
|
||||
///a pair array, as the inner levels are processed as a
|
||||
///part of the outer levels. Used as a stack.
|
||||
Appender!(Node.Pair[], Node.Pair)[] pairAppenders_;
|
||||
///Used to reduce allocations when creating node arrays.
|
||||
///
|
||||
///We need one appender for each nesting level that involves
|
||||
///a node array, as the inner levels are processed as a
|
||||
///part of the outer levels. Used as a stack.
|
||||
Appender!(Node[], Node)[] nodeAppenders_;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Construct a composer.
|
||||
*
|
||||
* Params: parser = Parser to provide YAML events.
|
||||
* resolver = Resolver to resolve tags (data types).
|
||||
* constructor = Constructor to construct nodes.
|
||||
*/
|
||||
this(Parser parser, Resolver resolver, Constructor constructor) @safe
|
||||
{
|
||||
parser_ = parser;
|
||||
resolver_ = resolver;
|
||||
constructor_ = constructor;
|
||||
}
|
||||
|
||||
///Destroy the composer.
|
||||
pure @safe nothrow ~this()
|
||||
{
|
||||
parser_ = null;
|
||||
resolver_ = null;
|
||||
constructor_ = null;
|
||||
clear(anchors_);
|
||||
anchors_ = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if there are any nodes left.
|
||||
*
|
||||
* Must be called before loading as it handles the stream start event.
|
||||
*/
|
||||
bool checkNode() @safe
|
||||
{
|
||||
//Drop the STREAM-START event.
|
||||
if(parser_.checkEvent(EventID.StreamStart))
|
||||
{
|
||||
parser_.getEvent();
|
||||
}
|
||||
|
||||
//True if there are more documents available.
|
||||
return !parser_.checkEvent(EventID.StreamEnd);
|
||||
}
|
||||
|
||||
///Get a YAML document as a node (the root of the document).
|
||||
Node getNode() @safe
|
||||
{
|
||||
//Get the root node of the next document.
|
||||
assert(!parser_.checkEvent(EventID.StreamEnd),
|
||||
"Trying to get a node from Composer when there is no node to "
|
||||
"get. use checkNode() to determine if there is a node.");
|
||||
|
||||
return composeDocument();
|
||||
}
|
||||
|
||||
///Get single YAML document, throwing if there is more than one document.
|
||||
Node getSingleNode() @trusted
|
||||
{
|
||||
assert(!parser_.checkEvent(EventID.StreamEnd),
|
||||
"Trying to get a node from Composer when there is no node to "
|
||||
"get. use checkNode() to determine if there is a node.");
|
||||
|
||||
Node document = composeDocument();
|
||||
|
||||
//Ensure that the stream contains no more documents.
|
||||
enforce(parser_.checkEvent(EventID.StreamEnd),
|
||||
new ComposerException("Expected single document in the stream, "
|
||||
"but found another document.",
|
||||
parser_.getEvent().startMark));
|
||||
|
||||
//Drop the STREAM-END event.
|
||||
parser_.getEvent();
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
private:
|
||||
///Ensure that appenders for specified nesting levels exist.
|
||||
///
|
||||
///Params: pairAppenderLevel = Current level in the pair appender stack.
|
||||
/// nodeAppenderLevel = Current level the node appender stack.
|
||||
void ensureAppendersExist(const uint pairAppenderLevel, const uint nodeAppenderLevel)
|
||||
@trusted
|
||||
{
|
||||
while(pairAppenders_.length <= pairAppenderLevel)
|
||||
{
|
||||
pairAppenders_ ~= appender!(Node.Pair[])();
|
||||
}
|
||||
while(nodeAppenders_.length <= nodeAppenderLevel)
|
||||
{
|
||||
nodeAppenders_ ~= appender!(Node[])();
|
||||
}
|
||||
}
|
||||
|
||||
///Compose a YAML document and return its root node.
|
||||
Node composeDocument() @trusted
|
||||
{
|
||||
//Drop the DOCUMENT-START event.
|
||||
parser_.getEvent();
|
||||
|
||||
//Compose the root node.
|
||||
Node node = composeNode(0, 0);
|
||||
|
||||
//Drop the DOCUMENT-END event.
|
||||
parser_.getEvent();
|
||||
|
||||
clear(anchors_);
|
||||
return node;
|
||||
}
|
||||
|
||||
/// Compose a node.
|
||||
///
|
||||
/// Params: pairAppenderLevel = Current level of the pair appender stack.
|
||||
/// nodeAppenderLevel = Current level of the node appender stack.
|
||||
Node composeNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) @system
|
||||
{
|
||||
if(parser_.checkEvent(EventID.Alias))
|
||||
{
|
||||
immutable event = parser_.getEvent();
|
||||
const anchor = event.anchor;
|
||||
enforce((anchor in anchors_) !is null,
|
||||
new ComposerException("Found undefined alias: " ~ anchor.get,
|
||||
event.startMark));
|
||||
|
||||
//If the node referenced by the anchor is uninitialized,
|
||||
//it's not finished, i.e. we're currently composing it
|
||||
//and trying to use it recursively here.
|
||||
enforce(anchors_[anchor] != Node(),
|
||||
new ComposerException("Found recursive alias: " ~ anchor.get,
|
||||
event.startMark));
|
||||
|
||||
return anchors_[anchor];
|
||||
}
|
||||
|
||||
immutable event = parser_.peekEvent();
|
||||
const anchor = event.anchor;
|
||||
if(!anchor.isNull() && (anchor in anchors_) !is null)
|
||||
{
|
||||
throw new ComposerException("Found duplicate anchor: " ~ anchor.get,
|
||||
event.startMark);
|
||||
}
|
||||
|
||||
Node result;
|
||||
//Associate the anchor, if any, with an uninitialized node.
|
||||
//used to detect duplicate and recursive anchors.
|
||||
if(!anchor.isNull())
|
||||
{
|
||||
anchors_[anchor] = Node();
|
||||
}
|
||||
|
||||
if(parser_.checkEvent(EventID.Scalar))
|
||||
{
|
||||
result = composeScalarNode();
|
||||
}
|
||||
else if(parser_.checkEvent(EventID.SequenceStart))
|
||||
{
|
||||
result = composeSequenceNode(pairAppenderLevel, nodeAppenderLevel);
|
||||
}
|
||||
else if(parser_.checkEvent(EventID.MappingStart))
|
||||
{
|
||||
result = composeMappingNode(pairAppenderLevel, nodeAppenderLevel);
|
||||
}
|
||||
else{assert(false, "This code should never be reached");}
|
||||
|
||||
if(!anchor.isNull())
|
||||
{
|
||||
anchors_[anchor] = result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
///Compose a scalar node.
|
||||
Node composeScalarNode() @system
|
||||
{
|
||||
immutable event = parser_.getEvent();
|
||||
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value,
|
||||
event.implicit);
|
||||
|
||||
Node node = constructor_.node(event.startMark, event.endMark, tag,
|
||||
event.value, event.scalarStyle);
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
/// Compose a sequence node.
|
||||
///
|
||||
/// Params: pairAppenderLevel = Current level of the pair appender stack.
|
||||
/// nodeAppenderLevel = Current level of the node appender stack.
|
||||
Node composeSequenceNode(const uint pairAppenderLevel, const uint nodeAppenderLevel)
|
||||
@system
|
||||
{
|
||||
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
|
||||
auto nodeAppender = &(nodeAppenders_[nodeAppenderLevel]);
|
||||
|
||||
immutable startEvent = parser_.getEvent();
|
||||
const tag = resolver_.resolve(NodeID.Sequence, startEvent.tag, null,
|
||||
startEvent.implicit);
|
||||
|
||||
while(!parser_.checkEvent(EventID.SequenceEnd))
|
||||
{
|
||||
nodeAppender.put(composeNode(pairAppenderLevel, nodeAppenderLevel + 1));
|
||||
}
|
||||
|
||||
core.memory.GC.disable();
|
||||
scope(exit){core.memory.GC.enable();}
|
||||
Node node = constructor_.node(startEvent.startMark, parser_.getEvent().endMark,
|
||||
tag, nodeAppender.data.dup, startEvent.collectionStyle);
|
||||
nodeAppender.clear();
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten a node, merging it with nodes referenced through YAMLMerge data type.
|
||||
*
|
||||
* Node must be a mapping or a sequence of mappings.
|
||||
*
|
||||
* Params: root = Node to flatten.
|
||||
* startMark = Start position of the node.
|
||||
* endMark = End position of the node.
|
||||
* pairAppenderLevel = Current level of the pair appender stack.
|
||||
* nodeAppenderLevel = Current level of the node appender stack.
|
||||
*
|
||||
* Returns: Flattened mapping as pairs.
|
||||
*/
|
||||
Node.Pair[] flatten(ref Node root, const Mark startMark, const Mark endMark,
|
||||
const uint pairAppenderLevel, const uint nodeAppenderLevel) @system
|
||||
{
|
||||
void error(Node node)
|
||||
{
|
||||
//this is Composer, but the code is related to Constructor.
|
||||
throw new ConstructorException("While constructing a mapping, "
|
||||
"expected a mapping or a list of "
|
||||
"mappings for merging, but found: "
|
||||
~ node.type.toString() ~
|
||||
" NOTE: line/column shows topmost parent "
|
||||
"to which the content is being merged",
|
||||
startMark, endMark);
|
||||
}
|
||||
|
||||
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
|
||||
auto pairAppender = &(pairAppenders_[pairAppenderLevel]);
|
||||
|
||||
if(root.isMapping)
|
||||
{
|
||||
Node[] toMerge;
|
||||
foreach(ref Node key, ref Node value; root)
|
||||
{
|
||||
if(key.isType!YAMLMerge){toMerge ~= value;}
|
||||
else
|
||||
{
|
||||
auto temp = Node.Pair(key, value);
|
||||
merge(*pairAppender, temp);
|
||||
}
|
||||
}
|
||||
foreach(node; toMerge)
|
||||
{
|
||||
merge(*pairAppender, flatten(node, startMark, endMark,
|
||||
pairAppenderLevel + 1, nodeAppenderLevel));
|
||||
}
|
||||
}
|
||||
//Must be a sequence of mappings.
|
||||
else if(root.isSequence) foreach(ref Node node; root)
|
||||
{
|
||||
if(!node.isType!(Node.Pair[])){error(node);}
|
||||
merge(*pairAppender, flatten(node, startMark, endMark,
|
||||
pairAppenderLevel + 1, nodeAppenderLevel));
|
||||
}
|
||||
else
|
||||
{
|
||||
error(root);
|
||||
}
|
||||
|
||||
core.memory.GC.disable();
|
||||
scope(exit){core.memory.GC.enable();}
|
||||
auto flattened = pairAppender.data.dup;
|
||||
pairAppender.clear();
|
||||
|
||||
return flattened;
|
||||
}
|
||||
|
||||
/// Compose a mapping node.
|
||||
///
|
||||
/// Params: pairAppenderLevel = Current level of the pair appender stack.
|
||||
/// nodeAppenderLevel = Current level of the node appender stack.
|
||||
Node composeMappingNode(const uint pairAppenderLevel, const uint nodeAppenderLevel)
|
||||
@system
|
||||
{
|
||||
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
|
||||
immutable startEvent = parser_.getEvent();
|
||||
const tag = resolver_.resolve(NodeID.Mapping, startEvent.tag, null,
|
||||
startEvent.implicit);
|
||||
auto pairAppender = &(pairAppenders_[pairAppenderLevel]);
|
||||
|
||||
Tuple!(Node, Mark)[] toMerge;
|
||||
while(!parser_.checkEvent(EventID.MappingEnd))
|
||||
{
|
||||
auto pair = Node.Pair(composeNode(pairAppenderLevel + 1, nodeAppenderLevel),
|
||||
composeNode(pairAppenderLevel + 1, nodeAppenderLevel));
|
||||
|
||||
//Need to flatten and merge the node referred by YAMLMerge.
|
||||
if(pair.key.isType!YAMLMerge)
|
||||
{
|
||||
toMerge ~= tuple(pair.value, cast(Mark)parser_.peekEvent().endMark);
|
||||
}
|
||||
//Not YAMLMerge, just add the pair.
|
||||
else
|
||||
{
|
||||
merge(*pairAppender, pair);
|
||||
}
|
||||
}
|
||||
foreach(node; toMerge)
|
||||
{
|
||||
merge(*pairAppender, flatten(node[0], startEvent.startMark, node[1],
|
||||
pairAppenderLevel + 1, nodeAppenderLevel));
|
||||
}
|
||||
|
||||
core.memory.GC.disable();
|
||||
scope(exit){core.memory.GC.enable();}
|
||||
Node node = constructor_.node(startEvent.startMark, parser_.getEvent().endMark,
|
||||
tag, pairAppender.data.dup, startEvent.collectionStyle);
|
||||
|
||||
pairAppender.clear();
|
||||
return node;
|
||||
}
|
||||
}
|
936
source/dyaml/constructor.d
Normal file
936
source/dyaml/constructor.d
Normal file
|
@ -0,0 +1,936 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* Implements a class that processes YAML mappings, sequences and scalars into
|
||||
* nodes. This can be used to implement custom data types. A tutorial can be
|
||||
* found $(LINK2 ../tutorials/custom_types.html, here).
|
||||
*/
|
||||
module dyaml.constructor;
|
||||
|
||||
|
||||
import std.array;
|
||||
import std.algorithm;
|
||||
import std.base64;
|
||||
import std.container;
|
||||
import std.conv;
|
||||
import std.datetime;
|
||||
import std.exception;
|
||||
import std.stdio;
|
||||
import std.regex;
|
||||
import std.string;
|
||||
import std.typecons;
|
||||
import std.utf;
|
||||
|
||||
import dyaml.node;
|
||||
import dyaml.exception;
|
||||
import dyaml.tag;
|
||||
import dyaml.style;
|
||||
|
||||
|
||||
/**
|
||||
* Exception thrown at constructor errors.
|
||||
*
|
||||
* Can be thrown by custom constructor functions.
|
||||
*/
|
||||
package class ConstructorException : YAMLException
|
||||
{
|
||||
/**
|
||||
* Construct a ConstructorException.
|
||||
*
|
||||
* Params: msg = Error message.
|
||||
* start = Start position of the error context.
|
||||
* end = End position of the error context.
|
||||
*/
|
||||
this(string msg, Mark start, Mark end, string file = __FILE__, int line = __LINE__)
|
||||
@safe
|
||||
{
|
||||
super(msg ~ "\nstart: " ~ start.toString() ~ "\nend: " ~ end.toString(),
|
||||
file, line);
|
||||
}
|
||||
}
|
||||
|
||||
private alias ConstructorException Error;
|
||||
|
||||
/**
|
||||
* Constructs YAML values.
|
||||
*
|
||||
* Each YAML scalar, sequence or mapping has a tag specifying its data type.
|
||||
* Constructor uses user-specifyable functions to create a node of desired
|
||||
* data type from a scalar, sequence or mapping.
|
||||
*
|
||||
*
|
||||
* Each of these functions is associated with a tag, and can process either
|
||||
* a scalar, a sequence, or a mapping. The constructor passes each value to
|
||||
* the function with corresponding tag, which then returns the resulting value
|
||||
* that can be stored in a node.
|
||||
*
|
||||
* If a tag is detected with no known constructor function, it is considered an error.
|
||||
*/
|
||||
final class Constructor
|
||||
{
|
||||
private:
|
||||
///Constructor functions from scalars.
|
||||
Node.Value delegate(ref Node)[Tag] fromScalar_;
|
||||
///Constructor functions from sequences.
|
||||
Node.Value delegate(ref Node)[Tag] fromSequence_;
|
||||
///Constructor functions from mappings.
|
||||
Node.Value delegate(ref Node)[Tag] fromMapping_;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Construct a Constructor.
|
||||
*
|
||||
* If you don't want to support default YAML tags/data types, you can use
|
||||
* defaultConstructors to disable constructor functions for these.
|
||||
*
|
||||
* Params: defaultConstructors = Use constructors for default YAML tags?
|
||||
*/
|
||||
this(const Flag!"useDefaultConstructors" defaultConstructors = Yes.useDefaultConstructors)
|
||||
@safe nothrow
|
||||
{
|
||||
if(!defaultConstructors){return;}
|
||||
|
||||
addConstructorScalar("tag:yaml.org,2002:null", &constructNull);
|
||||
addConstructorScalar("tag:yaml.org,2002:bool", &constructBool);
|
||||
addConstructorScalar("tag:yaml.org,2002:int", &constructLong);
|
||||
addConstructorScalar("tag:yaml.org,2002:float", &constructReal);
|
||||
addConstructorScalar("tag:yaml.org,2002:binary", &constructBinary);
|
||||
addConstructorScalar("tag:yaml.org,2002:timestamp", &constructTimestamp);
|
||||
addConstructorScalar("tag:yaml.org,2002:str", &constructString);
|
||||
|
||||
///In a mapping, the default value is kept as an entry with the '=' key.
|
||||
addConstructorScalar("tag:yaml.org,2002:value", &constructString);
|
||||
|
||||
addConstructorSequence("tag:yaml.org,2002:omap", &constructOrderedMap);
|
||||
addConstructorSequence("tag:yaml.org,2002:pairs", &constructPairs);
|
||||
addConstructorMapping("tag:yaml.org,2002:set", &constructSet);
|
||||
addConstructorSequence("tag:yaml.org,2002:seq", &constructSequence);
|
||||
addConstructorMapping("tag:yaml.org,2002:map", &constructMap);
|
||||
addConstructorScalar("tag:yaml.org,2002:merge", &constructMerge);
|
||||
}
|
||||
|
||||
///Destroy the constructor.
|
||||
pure @safe nothrow ~this()
|
||||
{
|
||||
clear(fromScalar_);
|
||||
fromScalar_ = null;
|
||||
clear(fromSequence_);
|
||||
fromSequence_ = null;
|
||||
clear(fromMapping_);
|
||||
fromMapping_ = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a constructor function from scalar.
|
||||
*
|
||||
* The function must take a reference to $(D Node) to construct from.
|
||||
* The node contains a string for scalars, $(Node[]) for sequences and
|
||||
* $(Node.Pair[]) for mappings.
|
||||
*
|
||||
* Any exception thrown by this function will be caught by D:YAML and
|
||||
* its message will be added to a $(YAMLException) that will also tell
|
||||
* the user which type failed to construct, and position in the file.
|
||||
*
|
||||
*
|
||||
* The value returned by this function will be stored in the resulting node.
|
||||
*
|
||||
* Only one constructor function can be set for one tag.
|
||||
*
|
||||
*
|
||||
* Structs and classes must implement the $(D opCmp()) operator for D:YAML
|
||||
* support. The signature of the operator that must be implemented
|
||||
* is $(D const int opCmp(ref const MyStruct s)) for structs where
|
||||
* $(I MyStruct) is the struct type, and $(D int opCmp(Object o)) for
|
||||
* classes. Note that the class $(D opCmp()) should not alter the compared
|
||||
* values - it is not const for compatibility reasons.
|
||||
*
|
||||
* Params: tag = Tag for the function to handle.
|
||||
* ctor = Constructor function.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* --------------------
|
||||
* import std.string;
|
||||
*
|
||||
* import yaml;
|
||||
*
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
*
|
||||
* //Any D:YAML type must have a custom opCmp operator.
|
||||
* //This is used for ordering in mappings.
|
||||
* const int opCmp(ref const MyStruct s)
|
||||
* {
|
||||
* if(x != s.x){return x - s.x;}
|
||||
* if(y != s.y){return y - s.y;}
|
||||
* if(z != s.z){return z - s.z;}
|
||||
* return 0;
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* MyStruct constructMyStructScalar(ref Node node)
|
||||
* {
|
||||
* //Guaranteed to be string as we construct from scalar.
|
||||
* //!mystruct x:y:z
|
||||
* auto parts = node.as!string().split(":");
|
||||
* //If this throws, the D:YAML will handle it and throw a YAMLException.
|
||||
* return MyStruct(to!int(parts[0]), to!int(parts[1]), to!int(parts[2]));
|
||||
* }
|
||||
*
|
||||
* void main()
|
||||
* {
|
||||
* auto loader = Loader("file.yaml");
|
||||
* auto constructor = new Constructor;
|
||||
* constructor.addConstructorScalar("!mystruct", &constructMyStructScalar);
|
||||
* loader.constructor = constructor;
|
||||
* Node node = loader.load();
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
void addConstructorScalar(T)(const string tag, T function(ref Node) ctor)
|
||||
@safe nothrow
|
||||
{
|
||||
const t = Tag(tag);
|
||||
auto deleg = addConstructor!T(t, ctor);
|
||||
(*delegates!string)[t] = deleg;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a constructor function from sequence.
|
||||
*
|
||||
* See_Also: addConstructorScalar
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* --------------------
|
||||
* import std.string;
|
||||
*
|
||||
* import yaml;
|
||||
*
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
*
|
||||
* //Any D:YAML type must have a custom opCmp operator.
|
||||
* //This is used for ordering in mappings.
|
||||
* const int opCmp(ref const MyStruct s)
|
||||
* {
|
||||
* if(x != s.x){return x - s.x;}
|
||||
* if(y != s.y){return y - s.y;}
|
||||
* if(z != s.z){return z - s.z;}
|
||||
* return 0;
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* MyStruct constructMyStructSequence(ref Node node)
|
||||
* {
|
||||
* //node is guaranteed to be sequence.
|
||||
* //!mystruct [x, y, z]
|
||||
* return MyStruct(node[0].as!int, node[1].as!int, node[2].as!int);
|
||||
* }
|
||||
*
|
||||
* void main()
|
||||
* {
|
||||
* auto loader = Loader("file.yaml");
|
||||
* auto constructor = new Constructor;
|
||||
* constructor.addConstructorSequence("!mystruct", &constructMyStructSequence);
|
||||
* loader.constructor = constructor;
|
||||
* Node node = loader.load();
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
void addConstructorSequence(T)(const string tag, T function(ref Node) ctor)
|
||||
@safe nothrow
|
||||
{
|
||||
const t = Tag(tag);
|
||||
auto deleg = addConstructor!T(t, ctor);
|
||||
(*delegates!(Node[]))[t] = deleg;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a constructor function from a mapping.
|
||||
*
|
||||
* See_Also: addConstructorScalar
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* --------------------
|
||||
* import std.string;
|
||||
*
|
||||
* import yaml;
|
||||
*
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
*
|
||||
* //Any D:YAML type must have a custom opCmp operator.
|
||||
* //This is used for ordering in mappings.
|
||||
* const int opCmp(ref const MyStruct s)
|
||||
* {
|
||||
* if(x != s.x){return x - s.x;}
|
||||
* if(y != s.y){return y - s.y;}
|
||||
* if(z != s.z){return z - s.z;}
|
||||
* return 0;
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* MyStruct constructMyStructMapping(ref Node node)
|
||||
* {
|
||||
* //node is guaranteed to be mapping.
|
||||
* //!mystruct {"x": x, "y": y, "z": z}
|
||||
* return MyStruct(node["x"].as!int, node["y"].as!int, node["z"].as!int);
|
||||
* }
|
||||
*
|
||||
* void main()
|
||||
* {
|
||||
* auto loader = Loader("file.yaml");
|
||||
* auto constructor = new Constructor;
|
||||
* constructor.addConstructorMapping("!mystruct", &constructMyStructMapping);
|
||||
* loader.constructor = constructor;
|
||||
* Node node = loader.load();
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
void addConstructorMapping(T)(const string tag, T function(ref Node) ctor)
|
||||
@safe nothrow
|
||||
{
|
||||
const t = Tag(tag);
|
||||
auto deleg = addConstructor!T(t, ctor);
|
||||
(*delegates!(Node.Pair[]))[t] = deleg;
|
||||
}
|
||||
|
||||
package:
|
||||
/*
|
||||
* Construct a node.
|
||||
*
|
||||
* Params: start = Start position of the node.
|
||||
* end = End position of the node.
|
||||
* tag = Tag (data type) of the node.
|
||||
* value = Value to construct node from (string, nodes or pairs).
|
||||
* style = Style of the node (scalar or collection style).
|
||||
*
|
||||
* Returns: Constructed node.
|
||||
*/
|
||||
Node node(T, U)(const Mark start, const Mark end, const Tag tag,
|
||||
T value, U style) @trusted
|
||||
if((is(T : string) || is(T == Node[]) || is(T == Node.Pair[])) &&
|
||||
(is(U : CollectionStyle) || is(U : ScalarStyle)))
|
||||
{
|
||||
static type = is(T : string) ? "scalar" :
|
||||
is(T == Node[]) ? "sequence" :
|
||||
is(T == Node.Pair[]) ? "mapping" :
|
||||
"ERROR";
|
||||
enforce((tag in *delegates!T) !is null,
|
||||
new Error("No constructor function from " ~ type ~
|
||||
" for tag " ~ tag.get(), start, end));
|
||||
|
||||
Node node = Node(value);
|
||||
try
|
||||
{
|
||||
static if(is(U : ScalarStyle))
|
||||
{
|
||||
return Node.rawNode((*delegates!T)[tag](node), start, tag,
|
||||
style, CollectionStyle.Invalid);
|
||||
}
|
||||
else static if(is(U : CollectionStyle))
|
||||
{
|
||||
return Node.rawNode((*delegates!T)[tag](node), start, tag,
|
||||
ScalarStyle.Invalid, style);
|
||||
}
|
||||
else static assert(false);
|
||||
}
|
||||
catch(Exception e)
|
||||
{
|
||||
throw new Error("Error constructing " ~ typeid(T).toString()
|
||||
~ ":\n" ~ e.msg, start, end);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
/*
|
||||
* Add a constructor function.
|
||||
*
|
||||
* Params: tag = Tag for the function to handle.
|
||||
* ctor = Constructor function.
|
||||
*/
|
||||
auto addConstructor(T)(const Tag tag, T function(ref Node) ctor)
|
||||
@trusted nothrow
|
||||
{
|
||||
assert((tag in fromScalar_) is null &&
|
||||
(tag in fromSequence_) is null &&
|
||||
(tag in fromMapping_) is null,
|
||||
"Constructor function for tag " ~ tag.get ~ " is already "
|
||||
"specified. Can't specify another one.");
|
||||
|
||||
|
||||
return (ref Node n)
|
||||
{
|
||||
static if(Node.allowed!T){return Node.value(ctor(n));}
|
||||
else {return Node.userValue(ctor(n));}
|
||||
};
|
||||
}
|
||||
|
||||
//Get the array of constructor functions for scalar, sequence or mapping.
|
||||
auto delegates(T)() pure @safe nothrow
|
||||
{
|
||||
static if(is(T : string)) {return &fromScalar_;}
|
||||
else static if(is(T : Node[])) {return &fromSequence_;}
|
||||
else static if(is(T : Node.Pair[])){return &fromMapping_;}
|
||||
else static assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
///Construct a _null _node.
|
||||
YAMLNull constructNull(ref Node node)
|
||||
{
|
||||
return YAMLNull();
|
||||
}
|
||||
|
||||
///Construct a merge _node - a _node that merges another _node into a mapping.
|
||||
YAMLMerge constructMerge(ref Node node)
|
||||
{
|
||||
return YAMLMerge();
|
||||
}
|
||||
|
||||
///Construct a boolean _node.
|
||||
bool constructBool(ref Node node)
|
||||
{
|
||||
static yes = ["yes", "true", "on"];
|
||||
static no = ["no", "false", "off"];
|
||||
string value = node.as!string().toLower();
|
||||
if(yes.canFind(value)){return true;}
|
||||
if(no.canFind(value)) {return false;}
|
||||
throw new Exception("Unable to parse boolean value: " ~ value);
|
||||
}
|
||||
|
||||
///Construct an integer (long) _node.
|
||||
long constructLong(ref Node node)
|
||||
{
|
||||
string value = node.as!string().replace("_", "");
|
||||
const char c = value[0];
|
||||
const long sign = c != '-' ? 1 : -1;
|
||||
if(c == '-' || c == '+')
|
||||
{
|
||||
value = value[1 .. $];
|
||||
}
|
||||
|
||||
enforce(value != "", new Exception("Unable to parse float value: " ~ value));
|
||||
|
||||
long result;
|
||||
try
|
||||
{
|
||||
//Zero.
|
||||
if(value == "0") {result = cast(long)0;}
|
||||
//Binary.
|
||||
else if(value.startsWith("0b")){result = sign * to!int(value[2 .. $], 2);}
|
||||
//Hexadecimal.
|
||||
else if(value.startsWith("0x")){result = sign * to!int(value[2 .. $], 16);}
|
||||
//Octal.
|
||||
else if(value[0] == '0') {result = sign * to!int(value, 8);}
|
||||
//Sexagesimal.
|
||||
else if(value.canFind(":"))
|
||||
{
|
||||
long val = 0;
|
||||
long base = 1;
|
||||
foreach_reverse(digit; value.split(":"))
|
||||
{
|
||||
val += to!long(digit) * base;
|
||||
base *= 60;
|
||||
}
|
||||
result = sign * val;
|
||||
}
|
||||
//Decimal.
|
||||
else{result = sign * to!long(value);}
|
||||
}
|
||||
catch(ConvException e)
|
||||
{
|
||||
throw new Exception("Unable to parse integer value: " ~ value);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
unittest
|
||||
{
|
||||
long getLong(string str)
|
||||
{
|
||||
auto node = Node(str);
|
||||
return constructLong(node);
|
||||
}
|
||||
|
||||
string canonical = "685230";
|
||||
string decimal = "+685_230";
|
||||
string octal = "02472256";
|
||||
string hexadecimal = "0x_0A_74_AE";
|
||||
string binary = "0b1010_0111_0100_1010_1110";
|
||||
string sexagesimal = "190:20:30";
|
||||
|
||||
assert(685230 == getLong(canonical));
|
||||
assert(685230 == getLong(decimal));
|
||||
assert(685230 == getLong(octal));
|
||||
assert(685230 == getLong(hexadecimal));
|
||||
assert(685230 == getLong(binary));
|
||||
assert(685230 == getLong(sexagesimal));
|
||||
}
|
||||
|
||||
///Construct a floating point (real) _node.
|
||||
real constructReal(ref Node node)
|
||||
{
|
||||
string value = node.as!string().replace("_", "").toLower();
|
||||
const char c = value[0];
|
||||
const real sign = c != '-' ? 1.0 : -1.0;
|
||||
if(c == '-' || c == '+')
|
||||
{
|
||||
value = value[1 .. $];
|
||||
}
|
||||
|
||||
enforce(value != "" && value != "nan" && value != "inf" && value != "-inf",
|
||||
new Exception("Unable to parse float value: " ~ value));
|
||||
|
||||
real result;
|
||||
try
|
||||
{
|
||||
//Infinity.
|
||||
if (value == ".inf"){result = sign * real.infinity;}
|
||||
//Not a Number.
|
||||
else if(value == ".nan"){result = real.nan;}
|
||||
//Sexagesimal.
|
||||
else if(value.canFind(":"))
|
||||
{
|
||||
real val = 0.0;
|
||||
real base = 1.0;
|
||||
foreach_reverse(digit; value.split(":"))
|
||||
{
|
||||
val += to!real(digit) * base;
|
||||
base *= 60.0;
|
||||
}
|
||||
result = sign * val;
|
||||
}
|
||||
//Plain floating point.
|
||||
else{result = sign * to!real(value);}
|
||||
}
|
||||
catch(ConvException e)
|
||||
{
|
||||
throw new Exception("Unable to parse float value: \"" ~ value ~ "\"");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
unittest
|
||||
{
|
||||
bool eq(real a, real b, real epsilon = 0.2)
|
||||
{
|
||||
return a >= (b - epsilon) && a <= (b + epsilon);
|
||||
}
|
||||
|
||||
real getReal(string str)
|
||||
{
|
||||
auto node = Node(str);
|
||||
return constructReal(node);
|
||||
}
|
||||
|
||||
string canonical = "6.8523015e+5";
|
||||
string exponential = "685.230_15e+03";
|
||||
string fixed = "685_230.15";
|
||||
string sexagesimal = "190:20:30.15";
|
||||
string negativeInf = "-.inf";
|
||||
string NaN = ".NaN";
|
||||
|
||||
assert(eq(685230.15, getReal(canonical)));
|
||||
assert(eq(685230.15, getReal(exponential)));
|
||||
assert(eq(685230.15, getReal(fixed)));
|
||||
assert(eq(685230.15, getReal(sexagesimal)));
|
||||
assert(eq(-real.infinity, getReal(negativeInf)));
|
||||
assert(to!string(getReal(NaN)) == "nan");
|
||||
}
|
||||
|
||||
///Construct a binary (base64) _node.
|
||||
ubyte[] constructBinary(ref Node node)
|
||||
{
|
||||
string value = node.as!string;
|
||||
//For an unknown reason, this must be nested to work (compiler bug?).
|
||||
try
|
||||
{
|
||||
try{return Base64.decode(value.removechars("\n"));}
|
||||
catch(Exception e)
|
||||
{
|
||||
throw new Exception("Unable to decode base64 value: " ~ e.msg);
|
||||
}
|
||||
}
|
||||
catch(UTFException e)
|
||||
{
|
||||
throw new Exception("Unable to decode base64 value: " ~ e.msg);
|
||||
}
|
||||
}
|
||||
unittest
|
||||
{
|
||||
ubyte[] test = cast(ubyte[])"The Answer: 42";
|
||||
char[] buffer;
|
||||
buffer.length = 256;
|
||||
string input = cast(string)Base64.encode(test, buffer);
|
||||
auto node = Node(input);
|
||||
auto value = constructBinary(node);
|
||||
assert(value == test);
|
||||
}
|
||||
|
||||
///Construct a timestamp (SysTime) _node.
|
||||
SysTime constructTimestamp(ref Node node)
|
||||
{
|
||||
string value = node.as!string;
|
||||
|
||||
auto YMDRegexp = regex("^([0-9][0-9][0-9][0-9])-([0-9][0-9]?)-([0-9][0-9]?)");
|
||||
auto HMSRegexp = regex("^[Tt \t]+([0-9][0-9]?):([0-9][0-9]):([0-9][0-9])(\\.[0-9]*)?");
|
||||
auto TZRegexp = regex("^[ \t]*Z|([-+][0-9][0-9]?)(:[0-9][0-9])?");
|
||||
|
||||
try
|
||||
{
|
||||
//First, get year, month and day.
|
||||
auto matches = match(value, YMDRegexp);
|
||||
|
||||
enforce(!matches.empty,
|
||||
new Exception("Unable to parse timestamp value: " ~ value));
|
||||
|
||||
auto captures = matches.front.captures;
|
||||
const year = to!int(captures[1]);
|
||||
const month = to!int(captures[2]);
|
||||
const day = to!int(captures[3]);
|
||||
|
||||
//If available, get hour, minute, second and fraction, if present.
|
||||
value = matches.front.post;
|
||||
matches = match(value, HMSRegexp);
|
||||
if(matches.empty)
|
||||
{
|
||||
return SysTime(DateTime(year, month, day), UTC());
|
||||
}
|
||||
|
||||
captures = matches.front.captures;
|
||||
const hour = to!int(captures[1]);
|
||||
const minute = to!int(captures[2]);
|
||||
const second = to!int(captures[3]);
|
||||
const hectonanosecond = cast(int)(to!real("0" ~ captures[4]) * 10000000);
|
||||
|
||||
//If available, get timezone.
|
||||
value = matches.front.post;
|
||||
matches = match(value, TZRegexp);
|
||||
if(matches.empty || matches.front.captures[0] == "Z")
|
||||
{
|
||||
return SysTime(DateTime(year, month, day, hour, minute, second),
|
||||
FracSec.from!"hnsecs"(hectonanosecond), UTC());
|
||||
}
|
||||
|
||||
captures = matches.front.captures;
|
||||
int sign = 1;
|
||||
int tzHours = 0;
|
||||
if(!captures[1].empty)
|
||||
{
|
||||
if(captures[1][0] == '-'){sign = -1;}
|
||||
tzHours = to!int(captures[1][1 .. $]);
|
||||
}
|
||||
const tzMinutes = (!captures[2].empty) ? to!int(captures[2][1 .. $]) : 0;
|
||||
const tzOffset = sign * (60 * tzHours + tzMinutes);
|
||||
|
||||
return SysTime(DateTime(year, month, day, hour, minute, second),
|
||||
FracSec.from!"hnsecs"(hectonanosecond),
|
||||
new SimpleTimeZone(tzOffset));
|
||||
}
|
||||
catch(ConvException e)
|
||||
{
|
||||
throw new Exception("Unable to parse timestamp value " ~ value ~ " : " ~ e.msg);
|
||||
}
|
||||
catch(DateTimeException e)
|
||||
{
|
||||
throw new Exception("Invalid timestamp value " ~ value ~ " : " ~ e.msg);
|
||||
}
|
||||
|
||||
assert(false, "This code should never be reached");
|
||||
}
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML construction timestamp unittest");
|
||||
|
||||
string timestamp(string value)
|
||||
{
|
||||
auto node = Node(value);
|
||||
return constructTimestamp(node).toISOString();
|
||||
}
|
||||
|
||||
string canonical = "2001-12-15T02:59:43.1Z";
|
||||
string iso8601 = "2001-12-14t21:59:43.10-05:00";
|
||||
string spaceSeparated = "2001-12-14 21:59:43.10 -5";
|
||||
string noTZ = "2001-12-15 2:59:43.10";
|
||||
string noFraction = "2001-12-15 2:59:43";
|
||||
string ymd = "2002-12-14";
|
||||
|
||||
assert(timestamp(canonical) == "20011215T025943.1Z");
|
||||
//avoiding float conversion errors
|
||||
assert(timestamp(iso8601) == "20011214T215943.0999999-05:00" ||
|
||||
timestamp(iso8601) == "20011214T215943.1-05:00");
|
||||
assert(timestamp(spaceSeparated) == "20011214T215943.0999999-05:00" ||
|
||||
timestamp(spaceSeparated) == "20011214T215943.1-05:00");
|
||||
assert(timestamp(noTZ) == "20011215T025943.0999999Z" ||
|
||||
timestamp(noTZ) == "20011215T025943.1Z");
|
||||
assert(timestamp(noFraction) == "20011215T025943Z");
|
||||
assert(timestamp(ymd) == "20021214T000000Z");
|
||||
}
|
||||
|
||||
///Construct a string _node.
|
||||
string constructString(ref Node node)
|
||||
{
|
||||
return node.as!string;
|
||||
}
|
||||
|
||||
///Convert a sequence of single-element mappings into a sequence of pairs.
|
||||
Node.Pair[] getPairs(string type, Node[] nodes)
|
||||
{
|
||||
Node.Pair[] pairs;
|
||||
|
||||
foreach(ref node; nodes)
|
||||
{
|
||||
enforce(node.isMapping && node.length == 1,
|
||||
new Exception("While constructing " ~ type ~
|
||||
", expected a mapping with single element"));
|
||||
|
||||
pairs ~= node.as!(Node.Pair[]);
|
||||
}
|
||||
|
||||
return pairs;
|
||||
}
|
||||
|
||||
///Construct an ordered map (ordered sequence of key:value pairs without duplicates) _node.
|
||||
Node.Pair[] constructOrderedMap(ref Node node)
|
||||
{
|
||||
auto pairs = getPairs("ordered map", node.as!(Node[]));
|
||||
|
||||
//Detect duplicates.
|
||||
//TODO this should be replaced by something with deterministic memory allocation.
|
||||
auto keys = redBlackTree!Node();
|
||||
scope(exit){clear(keys);}
|
||||
foreach(ref pair; pairs)
|
||||
{
|
||||
enforce(!(pair.key in keys),
|
||||
new Exception("Duplicate entry in an ordered map: "
|
||||
~ pair.key.debugString()));
|
||||
keys.insert(pair.key);
|
||||
}
|
||||
return pairs;
|
||||
}
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML construction ordered map unittest");
|
||||
|
||||
alias Node.Pair Pair;
|
||||
|
||||
Node[] alternateTypes(uint length)
|
||||
{
|
||||
Node[] pairs;
|
||||
foreach(long i; 0 .. length)
|
||||
{
|
||||
auto pair = (i % 2) ? Pair(to!string(i), i)
|
||||
: Pair(i, to!string(i));
|
||||
pairs ~= Node([pair]);
|
||||
}
|
||||
return pairs;
|
||||
}
|
||||
|
||||
Node[] sameType(uint length)
|
||||
{
|
||||
Node[] pairs;
|
||||
foreach(long i; 0 .. length)
|
||||
{
|
||||
auto pair = Pair(to!string(i), i);
|
||||
pairs ~= Node([pair]);
|
||||
}
|
||||
return pairs;
|
||||
}
|
||||
|
||||
bool hasDuplicates(Node[] nodes)
|
||||
{
|
||||
auto node = Node(nodes);
|
||||
return null !is collectException(constructOrderedMap(node));
|
||||
}
|
||||
|
||||
assert(hasDuplicates(alternateTypes(8) ~ alternateTypes(2)));
|
||||
assert(!hasDuplicates(alternateTypes(8)));
|
||||
assert(hasDuplicates(sameType(64) ~ sameType(16)));
|
||||
assert(hasDuplicates(alternateTypes(64) ~ alternateTypes(16)));
|
||||
assert(!hasDuplicates(sameType(64)));
|
||||
assert(!hasDuplicates(alternateTypes(64)));
|
||||
}
|
||||
|
||||
///Construct a pairs (ordered sequence of key: value pairs allowing duplicates) _node.
|
||||
Node.Pair[] constructPairs(ref Node node)
|
||||
{
|
||||
return getPairs("pairs", node.as!(Node[]));
|
||||
}
|
||||
|
||||
///Construct a set _node.
|
||||
Node[] constructSet(ref Node node)
|
||||
{
|
||||
auto pairs = node.as!(Node.Pair[]);
|
||||
|
||||
//In future, the map here should be replaced with something with deterministic
|
||||
//memory allocation if possible.
|
||||
//Detect duplicates.
|
||||
ubyte[Node] map;
|
||||
scope(exit){clear(map);}
|
||||
Node[] nodes;
|
||||
foreach(ref pair; pairs)
|
||||
{
|
||||
enforce((pair.key in map) is null,
|
||||
new Exception("Duplicate entry in a set"));
|
||||
map[pair.key] = 0;
|
||||
nodes ~= pair.key;
|
||||
}
|
||||
|
||||
return nodes;
|
||||
}
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML construction set unittest");
|
||||
|
||||
Node.Pair[] set(uint length)
|
||||
{
|
||||
Node.Pair[] pairs;
|
||||
foreach(long i; 0 .. length)
|
||||
{
|
||||
pairs ~= Node.Pair(to!string(i), YAMLNull());
|
||||
}
|
||||
|
||||
return pairs;
|
||||
}
|
||||
|
||||
auto DuplicatesShort = set(8) ~ set(2);
|
||||
auto noDuplicatesShort = set(8);
|
||||
auto DuplicatesLong = set(64) ~ set(4);
|
||||
auto noDuplicatesLong = set(64);
|
||||
|
||||
bool eq(Node.Pair[] a, Node[] b)
|
||||
{
|
||||
if(a.length != b.length){return false;}
|
||||
foreach(i; 0 .. a.length)
|
||||
{
|
||||
if(a[i].key != b[i])
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
auto nodeDuplicatesShort = Node(DuplicatesShort.dup);
|
||||
auto nodeNoDuplicatesShort = Node(noDuplicatesShort.dup);
|
||||
auto nodeDuplicatesLong = Node(DuplicatesLong.dup);
|
||||
auto nodeNoDuplicatesLong = Node(noDuplicatesLong.dup);
|
||||
|
||||
assert(null !is collectException(constructSet(nodeDuplicatesShort)));
|
||||
assert(null is collectException(constructSet(nodeNoDuplicatesShort)));
|
||||
assert(null !is collectException(constructSet(nodeDuplicatesLong)));
|
||||
assert(null is collectException(constructSet(nodeNoDuplicatesLong)));
|
||||
}
|
||||
|
||||
///Construct a sequence (array) _node.
|
||||
Node[] constructSequence(ref Node node)
|
||||
{
|
||||
return node.as!(Node[]);
|
||||
}
|
||||
|
||||
///Construct an unordered map (unordered set of key:value _pairs without duplicates) _node.
|
||||
Node.Pair[] constructMap(ref Node node)
|
||||
{
|
||||
auto pairs = node.as!(Node.Pair[]);
|
||||
//Detect duplicates.
|
||||
//TODO this should be replaced by something with deterministic memory allocation.
|
||||
auto keys = redBlackTree!Node();
|
||||
scope(exit){clear(keys);}
|
||||
foreach(ref pair; pairs)
|
||||
{
|
||||
enforce(!(pair.key in keys),
|
||||
new Exception("Duplicate entry in a map: "
|
||||
~ pair.key.debugString()));
|
||||
keys.insert(pair.key);
|
||||
}
|
||||
return pairs;
|
||||
}
|
||||
|
||||
|
||||
//Unittests
|
||||
private:
|
||||
|
||||
import std.stream;
|
||||
import dyaml.loader;
|
||||
|
||||
struct MyStruct
|
||||
{
|
||||
int x, y, z;
|
||||
|
||||
const int opCmp(ref const MyStruct s) const pure @safe nothrow
|
||||
{
|
||||
if(x != s.x){return x - s.x;}
|
||||
if(y != s.y){return y - s.y;}
|
||||
if(z != s.z){return z - s.z;}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
MyStruct constructMyStructScalar(ref Node node)
|
||||
{
|
||||
//Guaranteed to be string as we construct from scalar.
|
||||
auto parts = node.as!string().split(":");
|
||||
return MyStruct(to!int(parts[0]), to!int(parts[1]), to!int(parts[2]));
|
||||
}
|
||||
|
||||
MyStruct constructMyStructSequence(ref Node node)
|
||||
{
|
||||
//node is guaranteed to be sequence.
|
||||
return MyStruct(node[0].as!int, node[1].as!int, node[2].as!int);
|
||||
}
|
||||
|
||||
MyStruct constructMyStructMapping(ref Node node)
|
||||
{
|
||||
//node is guaranteed to be mapping.
|
||||
return MyStruct(node["x"].as!int, node["y"].as!int, node["z"].as!int);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
char[] data = cast(char[])"!mystruct 1:2:3";
|
||||
auto loadStream = new MemoryStream(data);
|
||||
auto loader = Loader(loadStream);
|
||||
auto constructor = new Constructor;
|
||||
constructor.addConstructorScalar("!mystruct", &constructMyStructScalar);
|
||||
loader.constructor = constructor;
|
||||
Node node = loader.load();
|
||||
|
||||
assert(node.as!MyStruct == MyStruct(1, 2, 3));
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
char[] data = cast(char[])"!mystruct [1, 2, 3]";
|
||||
auto loadStream = new MemoryStream(data);
|
||||
auto loader = Loader(loadStream);
|
||||
auto constructor = new Constructor;
|
||||
constructor.addConstructorSequence("!mystruct", &constructMyStructSequence);
|
||||
loader.constructor = constructor;
|
||||
Node node = loader.load();
|
||||
|
||||
assert(node.as!MyStruct == MyStruct(1, 2, 3));
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
char[] data = cast(char[])"!mystruct {x: 1, y: 2, z: 3}";
|
||||
auto loadStream = new MemoryStream(data);
|
||||
auto loader = Loader(loadStream);
|
||||
auto constructor = new Constructor;
|
||||
constructor.addConstructorMapping("!mystruct", &constructMyStructMapping);
|
||||
loader.constructor = constructor;
|
||||
Node node = loader.load();
|
||||
|
||||
assert(node.as!MyStruct == MyStruct(1, 2, 3));
|
||||
}
|
355
source/dyaml/dumper.d
Normal file
355
source/dyaml/dumper.d
Normal file
|
@ -0,0 +1,355 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* YAML _dumper.
|
||||
*
|
||||
* Code based on $(LINK2 http://www.pyyaml.org, PyYAML).
|
||||
*/
|
||||
module dyaml.dumper;
|
||||
|
||||
|
||||
import std.stream;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.anchor;
|
||||
import dyaml.emitter;
|
||||
import dyaml.encoding;
|
||||
import dyaml.event;
|
||||
import dyaml.exception;
|
||||
import dyaml.linebreak;
|
||||
import dyaml.node;
|
||||
import dyaml.representer;
|
||||
import dyaml.resolver;
|
||||
import dyaml.serializer;
|
||||
import dyaml.tagdirective;
|
||||
|
||||
|
||||
/**
|
||||
* Dumps YAML documents to files or streams.
|
||||
*
|
||||
* User specified Representer and/or Resolver can be used to support new
|
||||
* tags / data types.
|
||||
*
|
||||
* Setters are provided to affect output details (style, encoding, etc.).
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* Write to a file:
|
||||
* --------------------
|
||||
* auto node = Node([1, 2, 3, 4, 5]);
|
||||
* Dumper("file.yaml").dump(node);
|
||||
* --------------------
|
||||
*
|
||||
* Write multiple YAML documents to a file:
|
||||
* --------------------
|
||||
* auto node1 = Node([1, 2, 3, 4, 5]);
|
||||
* auto node2 = Node("This document contains only one string");
|
||||
* Dumper("file.yaml").dump(node1, node2);
|
||||
*
|
||||
* //Or with an array:
|
||||
* //Dumper("file.yaml").dump([node1, node2]);
|
||||
*
|
||||
*
|
||||
* --------------------
|
||||
*
|
||||
* Write to memory:
|
||||
* --------------------
|
||||
* import std.stream;
|
||||
* auto stream = new MemoryStream();
|
||||
* auto node = Node([1, 2, 3, 4, 5]);
|
||||
* Dumper(stream).dump(node);
|
||||
* --------------------
|
||||
*
|
||||
* Use a custom representer/resolver to support custom data types and/or implicit tags:
|
||||
* --------------------
|
||||
* auto node = Node([1, 2, 3, 4, 5]);
|
||||
* auto representer = new Representer();
|
||||
* auto resolver = new Resolver();
|
||||
*
|
||||
* //Add representer functions / resolver expressions here...
|
||||
*
|
||||
* auto dumper = Dumper("file.yaml");
|
||||
* dumper.representer = representer;
|
||||
* dumper.resolver = resolver;
|
||||
* dumper.dump(node);
|
||||
* --------------------
|
||||
*/
|
||||
struct Dumper
|
||||
{
|
||||
unittest
|
||||
{
|
||||
auto node = Node([1, 2, 3, 4, 5]);
|
||||
Dumper(new MemoryStream()).dump(node);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
auto node1 = Node([1, 2, 3, 4, 5]);
|
||||
auto node2 = Node("This document contains only one string");
|
||||
Dumper(new MemoryStream()).dump(node1, node2);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
import std.stream;
|
||||
auto stream = new MemoryStream();
|
||||
auto node = Node([1, 2, 3, 4, 5]);
|
||||
Dumper(stream).dump(node);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
auto node = Node([1, 2, 3, 4, 5]);
|
||||
auto representer = new Representer();
|
||||
auto resolver = new Resolver();
|
||||
auto dumper = Dumper(new MemoryStream());
|
||||
dumper.representer = representer;
|
||||
dumper.resolver = resolver;
|
||||
dumper.dump(node);
|
||||
}
|
||||
|
||||
private:
|
||||
///Resolver to resolve tags.
|
||||
Resolver resolver_;
|
||||
///Representer to represent data types.
|
||||
Representer representer_;
|
||||
|
||||
///Stream to write to.
|
||||
Stream stream_;
|
||||
|
||||
///Write scalars in canonical form?
|
||||
bool canonical_;
|
||||
///Indentation width.
|
||||
int indent_ = 2;
|
||||
///Preferred text width.
|
||||
uint textWidth_ = 80;
|
||||
///Line break to use.
|
||||
LineBreak lineBreak_ = LineBreak.Unix;
|
||||
///Character encoding to use.
|
||||
Encoding encoding_ = Encoding.UTF_8;
|
||||
///YAML version string.
|
||||
string YAMLVersion_ = "1.1";
|
||||
///Tag directives to use.
|
||||
TagDirective[] tags_ = null;
|
||||
///Always write document start?
|
||||
Flag!"explicitStart" explicitStart_ = No.explicitStart;
|
||||
///Always write document end?
|
||||
Flag!"explicitEnd" explicitEnd_ = No.explicitEnd;
|
||||
|
||||
///Name of the output file or stream, used in error messages.
|
||||
string name_ = "<unknown>";
|
||||
|
||||
public:
|
||||
@disable this();
|
||||
@disable bool opEquals(ref Dumper);
|
||||
@disable int opCmp(ref Dumper);
|
||||
|
||||
/**
|
||||
* Construct a Dumper writing to a file.
|
||||
*
|
||||
* Params: filename = File name to write to.
|
||||
*
|
||||
* Throws: YAMLException if the file can not be dumped to (e.g. cannot be opened).
|
||||
*/
|
||||
this(string filename) @safe
|
||||
{
|
||||
name_ = filename;
|
||||
try{this(new File(filename, FileMode.OutNew));}
|
||||
catch(StreamException e)
|
||||
{
|
||||
throw new YAMLException("Unable to open file " ~ filename ~
|
||||
" for YAML dumping: " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
||||
///Construct a Dumper writing to a _stream. This is useful to e.g. write to memory.
|
||||
this(Stream stream) pure @safe
|
||||
{
|
||||
resolver_ = new Resolver();
|
||||
representer_ = new Representer();
|
||||
stream_ = stream;
|
||||
}
|
||||
|
||||
///Destroy the Dumper.
|
||||
pure @safe nothrow ~this()
|
||||
{
|
||||
YAMLVersion_ = null;
|
||||
}
|
||||
|
||||
///Set stream _name. Used in debugging messages.
|
||||
@property void name(string name) pure @safe nothrow
|
||||
{
|
||||
name_ = name;
|
||||
}
|
||||
|
||||
///Specify custom Resolver to use.
|
||||
@property void resolver(Resolver resolver) @trusted
|
||||
{
|
||||
clear(resolver_);
|
||||
resolver_ = resolver;
|
||||
}
|
||||
|
||||
///Specify custom Representer to use.
|
||||
@property void representer(Representer representer) @trusted
|
||||
{
|
||||
clear(representer_);
|
||||
representer_ = representer;
|
||||
}
|
||||
|
||||
///Write scalars in _canonical form?
|
||||
@property void canonical(bool canonical) pure @safe nothrow
|
||||
{
|
||||
canonical_ = canonical;
|
||||
}
|
||||
|
||||
///Set indentation width. 2 by default. Must not be zero.
|
||||
@property void indent(uint indent) pure @safe nothrow
|
||||
in
|
||||
{
|
||||
assert(indent != 0, "Can't use zero YAML indent width");
|
||||
}
|
||||
body
|
||||
{
|
||||
indent_ = indent;
|
||||
}
|
||||
|
||||
///Set preferred text _width.
|
||||
@property void textWidth(uint width) pure @safe nothrow
|
||||
{
|
||||
textWidth_ = width;
|
||||
}
|
||||
|
||||
///Set line break to use. Unix by default.
|
||||
@property void lineBreak(LineBreak lineBreak) pure @safe nothrow
|
||||
{
|
||||
lineBreak_ = lineBreak;
|
||||
}
|
||||
|
||||
///Set character _encoding to use. UTF-8 by default.
|
||||
@property void encoding(Encoding encoding) pure @safe nothrow
|
||||
{
|
||||
encoding_ = encoding;
|
||||
}
|
||||
|
||||
///Always explicitly write document start?
|
||||
@property void explicitStart(bool explicit) pure @safe nothrow
|
||||
{
|
||||
explicitStart_ = explicit ? Yes.explicitStart : No.explicitStart;
|
||||
}
|
||||
|
||||
///Always explicitly write document end?
|
||||
@property void explicitEnd(bool explicit) pure @safe nothrow
|
||||
{
|
||||
explicitEnd_ = explicit ? Yes.explicitEnd : No.explicitEnd;
|
||||
}
|
||||
|
||||
///Specify YAML version string. "1.1" by default.
|
||||
@property void YAMLVersion(string YAMLVersion) pure @safe nothrow
|
||||
{
|
||||
YAMLVersion_ = YAMLVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify tag directives.
|
||||
*
|
||||
* A tag directive specifies a shorthand notation for specifying _tags.
|
||||
* Each tag directive associates a handle with a prefix. This allows for
|
||||
* compact tag notation.
|
||||
*
|
||||
* Each handle specified MUST start and end with a '!' character
|
||||
* (a single character "!" handle is allowed as well).
|
||||
*
|
||||
* Only alphanumeric characters, '-', and '__' may be used in handles.
|
||||
*
|
||||
* Each prefix MUST not be empty.
|
||||
*
|
||||
* The "!!" handle is used for default YAML _tags with prefix
|
||||
* "tag:yaml.org,2002:". This can be overridden.
|
||||
*
|
||||
* Params: tags = Tag directives (keys are handles, values are prefixes).
|
||||
*
|
||||
* Example:
|
||||
* --------------------
|
||||
* Dumper dumper = Dumper("file.yaml");
|
||||
* string[string] directives;
|
||||
* directives["!short!"] = "tag:long.org,2011:";
|
||||
* //This will emit tags starting with "tag:long.org,2011"
|
||||
* //with a "!short!" prefix instead.
|
||||
* dumper.tagDirectives(directives);
|
||||
* dumper.dump(Node("foo"));
|
||||
* --------------------
|
||||
*/
|
||||
@property void tagDirectives(string[string] tags) pure @trusted
|
||||
{
|
||||
TagDirective[] t;
|
||||
foreach(handle, prefix; tags)
|
||||
{
|
||||
assert(handle.length >= 1 && handle[0] == '!' && handle[$ - 1] == '!',
|
||||
"A tag handle is empty or does not start and end with a "
|
||||
"'!' character : " ~ handle);
|
||||
assert(prefix.length >= 1, "A tag prefix is empty");
|
||||
t ~= TagDirective(handle, prefix);
|
||||
}
|
||||
tags_ = t;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump one or more YAML _documents to the file/stream.
|
||||
*
|
||||
* Note that while you can call dump() multiple times on the same
|
||||
* dumper, you will end up writing multiple YAML "files" to the same
|
||||
* file/stream.
|
||||
*
|
||||
* Params: documents = Documents to _dump (root nodes of the _documents).
|
||||
*
|
||||
* Throws: YAMLException on error (e.g. invalid nodes,
|
||||
* unable to write to file/stream).
|
||||
*/
|
||||
void dump(Node[] documents ...) @trusted
|
||||
{
|
||||
try
|
||||
{
|
||||
auto emitter = Emitter(stream_, canonical_, indent_, textWidth_, lineBreak_);
|
||||
auto serializer = Serializer(emitter, resolver_, encoding_, explicitStart_,
|
||||
explicitEnd_, YAMLVersion_, tags_);
|
||||
foreach(ref document; documents)
|
||||
{
|
||||
representer_.represent(serializer, document);
|
||||
}
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to dump YAML to stream "
|
||||
~ name_ ~ " : " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
||||
package:
|
||||
/*
|
||||
* Emit specified events. Used for debugging/testing.
|
||||
*
|
||||
* Params: events = Events to emit.
|
||||
*
|
||||
* Throws: YAMLException if unable to emit.
|
||||
*/
|
||||
void emit(Event[] events) @system
|
||||
{
|
||||
try
|
||||
{
|
||||
auto emitter = Emitter(stream_, canonical_, indent_, textWidth_, lineBreak_);
|
||||
foreach(ref event; events)
|
||||
{
|
||||
emitter.emit(event);
|
||||
}
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to emit YAML to stream "
|
||||
~ name_ ~ " : " ~ e.msg);
|
||||
}
|
||||
}
|
||||
}
|
1692
source/dyaml/emitter.d
Normal file
1692
source/dyaml/emitter.d
Normal file
File diff suppressed because it is too large
Load diff
19
source/dyaml/encoding.d
Normal file
19
source/dyaml/encoding.d
Normal file
|
@ -0,0 +1,19 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.encoding;
|
||||
|
||||
|
||||
///Text encodings supported by D:YAML.
|
||||
enum Encoding : ubyte
|
||||
{
|
||||
///Unicode UTF-8
|
||||
UTF_8,
|
||||
///Unicode UTF-16
|
||||
UTF_16,
|
||||
///Unicode UTF-32
|
||||
UTF_32
|
||||
}
|
61
source/dyaml/escapes.d
Normal file
61
source/dyaml/escapes.d
Normal file
|
@ -0,0 +1,61 @@
|
|||
|
||||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.escapes;
|
||||
|
||||
|
||||
package:
|
||||
|
||||
///Translation table from YAML escapes to dchars.
|
||||
immutable dchar[dchar] fromEscapes;
|
||||
///Translation table from dchars to YAML escapes.
|
||||
immutable dchar[dchar] toEscapes;
|
||||
///Translation table from prefixes of escaped hexadecimal format characters to their lengths.
|
||||
immutable uint[dchar] escapeHexCodes;
|
||||
|
||||
|
||||
static this()
|
||||
{
|
||||
fromEscapes =
|
||||
['0': '\0',
|
||||
'a': '\x07',
|
||||
'b': '\x08',
|
||||
't': '\x09',
|
||||
'\t': '\x09',
|
||||
'n': '\x0A',
|
||||
'v': '\x0B',
|
||||
'f': '\x0C',
|
||||
'r': '\x0D',
|
||||
'e': '\x1B',
|
||||
' ': '\x20',
|
||||
'\"': '\"',
|
||||
'\\': '\\',
|
||||
'N': '\u0085',
|
||||
'_': '\xA0',
|
||||
'L': '\u2028',
|
||||
'P': '\u2029'];
|
||||
|
||||
toEscapes =
|
||||
['\0': '0',
|
||||
'\x07': 'a',
|
||||
'\x08': 'b',
|
||||
'\x09': 't',
|
||||
'\x0A': 'n',
|
||||
'\x0B': 'v',
|
||||
'\x0C': 'f',
|
||||
'\x0D': 'r',
|
||||
'\x1B': 'e',
|
||||
'\"': '\"',
|
||||
'\\': '\\',
|
||||
'\u0085': 'N',
|
||||
'\xA0': '_',
|
||||
'\u2028': 'L',
|
||||
'\u2029': 'P'];
|
||||
|
||||
escapeHexCodes = ['x': 2, 'u': 4, 'U': 8];
|
||||
}
|
||||
|
239
source/dyaml/event.d
Normal file
239
source/dyaml/event.d
Normal file
|
@ -0,0 +1,239 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* YAML events.
|
||||
* Code based on PyYAML: http://www.pyyaml.org
|
||||
*/
|
||||
module dyaml.event;
|
||||
|
||||
import std.array;
|
||||
import std.conv;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.anchor;
|
||||
import dyaml.encoding;
|
||||
import dyaml.exception;
|
||||
import dyaml.reader;
|
||||
import dyaml.tag;
|
||||
import dyaml.tagdirective;
|
||||
import dyaml.style;
|
||||
|
||||
|
||||
package:
|
||||
///Event types.
|
||||
enum EventID : ubyte
|
||||
{
|
||||
Invalid = 0, /// Invalid (uninitialized) event.
|
||||
StreamStart, /// Stream start
|
||||
StreamEnd, /// Stream end
|
||||
DocumentStart, /// Document start
|
||||
DocumentEnd, /// Document end
|
||||
Alias, /// Alias
|
||||
Scalar, /// Scalar
|
||||
SequenceStart, /// Sequence start
|
||||
SequenceEnd, /// Sequence end
|
||||
MappingStart, /// Mapping start
|
||||
MappingEnd /// Mapping end
|
||||
}
|
||||
|
||||
/**
|
||||
* YAML event produced by parser.
|
||||
*
|
||||
* 48 bytes on 64bit.
|
||||
*/
|
||||
struct Event
|
||||
{
|
||||
@disable int opCmp(ref Event);
|
||||
|
||||
///Value of the event, if any.
|
||||
string value;
|
||||
///Start position of the event in file/stream.
|
||||
Mark startMark;
|
||||
///End position of the event in file/stream.
|
||||
Mark endMark;
|
||||
union
|
||||
{
|
||||
struct
|
||||
{
|
||||
///Anchor of the event, if any.
|
||||
Anchor anchor;
|
||||
///Tag of the event, if any.
|
||||
Tag tag;
|
||||
}
|
||||
///Tag directives, if this is a DocumentStart.
|
||||
//TagDirectives tagDirectives;
|
||||
TagDirective[] tagDirectives;
|
||||
}
|
||||
///Event type.
|
||||
EventID id = EventID.Invalid;
|
||||
///Style of scalar event, if this is a scalar event.
|
||||
ScalarStyle scalarStyle = ScalarStyle.Invalid;
|
||||
union
|
||||
{
|
||||
///Should the tag be implicitly resolved?
|
||||
bool implicit;
|
||||
/**
|
||||
* Is this document event explicit?
|
||||
*
|
||||
* Used if this is a DocumentStart or DocumentEnd.
|
||||
*/
|
||||
bool explicitDocument;
|
||||
}
|
||||
///TODO figure this out - Unknown, used by PyYAML with Scalar events.
|
||||
bool implicit_2;
|
||||
///Encoding of the stream, if this is a StreamStart.
|
||||
Encoding encoding;
|
||||
///Collection style, if this is a SequenceStart or MappingStart.
|
||||
CollectionStyle collectionStyle = CollectionStyle.Invalid;
|
||||
|
||||
///Is this a null (uninitialized) event?
|
||||
@property bool isNull() const pure @system nothrow {return id == EventID.Invalid;}
|
||||
|
||||
///Get string representation of the token ID.
|
||||
@property string idString() const @system {return to!string(id);}
|
||||
|
||||
static assert(Event.sizeof <= 48, "Event struct larger than expected");
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a simple event.
|
||||
*
|
||||
* Params: start = Start position of the event in the file/stream.
|
||||
* end = End position of the event in the file/stream.
|
||||
* anchor = Anchor, if this is an alias event.
|
||||
*/
|
||||
Event event(EventID id)(const Mark start, const Mark end, const Anchor anchor = Anchor())
|
||||
pure @trusted nothrow
|
||||
{
|
||||
Event result;
|
||||
result.startMark = start;
|
||||
result.endMark = end;
|
||||
result.anchor = anchor;
|
||||
result.id = id;
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a collection (mapping or sequence) start event.
|
||||
*
|
||||
* Params: start = Start position of the event in the file/stream.
|
||||
* end = End position of the event in the file/stream.
|
||||
* anchor = Anchor of the sequence, if any.
|
||||
* tag = Tag of the sequence, if specified.
|
||||
* implicit = Should the tag be implicitly resolved?
|
||||
*/
|
||||
Event collectionStartEvent(EventID id)
|
||||
(const Mark start, const Mark end, const Anchor anchor, const Tag tag,
|
||||
const bool implicit, const CollectionStyle style) pure @trusted nothrow
|
||||
{
|
||||
static assert(id == EventID.SequenceStart || id == EventID.SequenceEnd ||
|
||||
id == EventID.MappingStart || id == EventID.MappingEnd);
|
||||
Event result;
|
||||
result.startMark = start;
|
||||
result.endMark = end;
|
||||
result.anchor = anchor;
|
||||
result.tag = tag;
|
||||
result.id = id;
|
||||
result.implicit = implicit;
|
||||
result.collectionStyle = style;
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a stream start event.
|
||||
*
|
||||
* Params: start = Start position of the event in the file/stream.
|
||||
* end = End position of the event in the file/stream.
|
||||
* encoding = Encoding of the stream.
|
||||
*/
|
||||
Event streamStartEvent(const Mark start, const Mark end, const Encoding encoding)
|
||||
pure @trusted nothrow
|
||||
{
|
||||
Event result;
|
||||
result.startMark = start;
|
||||
result.endMark = end;
|
||||
result.id = EventID.StreamStart;
|
||||
result.encoding = encoding;
|
||||
return result;
|
||||
}
|
||||
|
||||
///Aliases for simple events.
|
||||
alias event!(EventID.StreamEnd) streamEndEvent;
|
||||
alias event!(EventID.Alias) aliasEvent;
|
||||
alias event!(EventID.SequenceEnd) sequenceEndEvent;
|
||||
alias event!(EventID.MappingEnd) mappingEndEvent;
|
||||
|
||||
///Aliases for collection start events.
|
||||
alias collectionStartEvent!(EventID.SequenceStart) sequenceStartEvent;
|
||||
alias collectionStartEvent!(EventID.MappingStart) mappingStartEvent;
|
||||
|
||||
/**
|
||||
* Construct a document start event.
|
||||
*
|
||||
* Params: start = Start position of the event in the file/stream.
|
||||
* end = End position of the event in the file/stream.
|
||||
* explicit = Is this an explicit document start?
|
||||
* YAMLVersion = YAML version string of the document.
|
||||
* tagDirectives = Tag directives of the document.
|
||||
*/
|
||||
Event documentStartEvent(const Mark start, const Mark end, const bool explicit, string YAMLVersion,
|
||||
TagDirective[] tagDirectives) pure @trusted nothrow
|
||||
{
|
||||
Event result;
|
||||
result.value = YAMLVersion;
|
||||
result.startMark = start;
|
||||
result.endMark = end;
|
||||
result.id = EventID.DocumentStart;
|
||||
result.explicitDocument = explicit;
|
||||
result.tagDirectives = tagDirectives;
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a document end event.
|
||||
*
|
||||
* Params: start = Start position of the event in the file/stream.
|
||||
* end = End position of the event in the file/stream.
|
||||
* explicit = Is this an explicit document end?
|
||||
*/
|
||||
Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pure @trusted nothrow
|
||||
{
|
||||
Event result;
|
||||
result.startMark = start;
|
||||
result.endMark = end;
|
||||
result.id = EventID.DocumentEnd;
|
||||
result.explicitDocument = explicit;
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a scalar event.
|
||||
*
|
||||
* Params: start = Start position of the event in the file/stream.
|
||||
* end = End position of the event in the file/stream.
|
||||
* anchor = Anchor of the scalar, if any.
|
||||
* tag = Tag of the scalar, if specified.
|
||||
* implicit = Should the tag be implicitly resolved?
|
||||
* value = String value of the scalar.
|
||||
* style = Scalar style.
|
||||
*/
|
||||
Event scalarEvent(const Mark start, const Mark end, const Anchor anchor, const Tag tag,
|
||||
const Tuple!(bool, bool) implicit, const string value,
|
||||
const ScalarStyle style = ScalarStyle.Invalid) pure @trusted nothrow
|
||||
{
|
||||
Event result;
|
||||
result.value = value;
|
||||
result.startMark = start;
|
||||
result.endMark = end;
|
||||
result.anchor = anchor;
|
||||
result.tag = tag;
|
||||
result.id = EventID.Scalar;
|
||||
result.scalarStyle = style;
|
||||
result.implicit = implicit[0];
|
||||
result.implicit_2 = implicit[1];
|
||||
return result;
|
||||
}
|
106
source/dyaml/exception.d
Normal file
106
source/dyaml/exception.d
Normal file
|
@ -0,0 +1,106 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///Exceptions thrown by D:YAML and _exception related code.
|
||||
module dyaml.exception;
|
||||
|
||||
|
||||
import std.algorithm;
|
||||
import std.array;
|
||||
import std.string;
|
||||
import std.conv;
|
||||
|
||||
alias to!string str;
|
||||
|
||||
|
||||
///Base class for all exceptions thrown by D:YAML.
|
||||
class YAMLException : Exception
|
||||
{
|
||||
///Construct a YAMLException with specified message and position where it was thrown.
|
||||
public this(string msg, string file = __FILE__, int line = __LINE__)
|
||||
@trusted nothrow
|
||||
{
|
||||
super(msg, file, line);
|
||||
}
|
||||
}
|
||||
|
||||
///Position in a YAML stream, used for error messages.
|
||||
struct Mark
|
||||
{
|
||||
private:
|
||||
///Line number.
|
||||
ushort line_;
|
||||
///Column number.
|
||||
ushort column_;
|
||||
|
||||
public:
|
||||
///Construct a Mark with specified line and column in the file.
|
||||
this(const uint line, const uint column) pure @safe nothrow
|
||||
{
|
||||
line_ = cast(ushort)min(ushort.max, line);
|
||||
column_ = cast(ushort)min(ushort.max, column);
|
||||
}
|
||||
|
||||
///Get a string representation of the mark.
|
||||
string toString() const @trusted
|
||||
{
|
||||
//Line/column numbers start at zero internally, make them start at 1.
|
||||
string clamped(ushort v){return str(v + 1) ~ (v == ushort.max ? " or higher" : "");}
|
||||
return "line " ~ clamped(line_) ~ ",column " ~ clamped(column_);
|
||||
}
|
||||
}
|
||||
|
||||
static assert(Mark.sizeof == 4, "Unexpected Mark size");
|
||||
|
||||
package:
|
||||
//Base class of YAML exceptions with marked positions of the problem.
|
||||
abstract class MarkedYAMLException : YAMLException
|
||||
{
|
||||
//Construct a MarkedYAMLException with specified context and problem.
|
||||
this(string context, Mark contextMark, string problem, Mark problemMark,
|
||||
string file = __FILE__, int line = __LINE__) @safe
|
||||
{
|
||||
const msg = context ~ '\n' ~
|
||||
(contextMark != problemMark ? contextMark.toString() ~ '\n' : "") ~
|
||||
problem ~ '\n' ~ problemMark.toString() ~ '\n';
|
||||
super(msg, file, line);
|
||||
}
|
||||
|
||||
//Construct a MarkedYAMLException with specified problem.
|
||||
this(string problem, Mark problemMark, string file = __FILE__, int line = __LINE__)
|
||||
@safe
|
||||
{
|
||||
super(problem ~ '\n' ~ problemMark.toString(), file, line);
|
||||
}
|
||||
}
|
||||
|
||||
//Constructors of YAML exceptions are mostly the same, so we use a mixin.
|
||||
template ExceptionCtors()
|
||||
{
|
||||
public this(string msg, string file = __FILE__, int line = __LINE__)
|
||||
@safe nothrow
|
||||
{
|
||||
super(msg, file, line);
|
||||
}
|
||||
}
|
||||
|
||||
//Constructors of marked YAML exceptions are mostly the same, so we use a mixin.
|
||||
template MarkedExceptionCtors()
|
||||
{
|
||||
public:
|
||||
this(string context, Mark contextMark, string problem, Mark problemMark,
|
||||
string file = __FILE__, int line = __LINE__) @safe
|
||||
{
|
||||
super(context, contextMark, problem, problemMark,
|
||||
file, line);
|
||||
}
|
||||
|
||||
this(string problem, Mark problemMark, string file = __FILE__, int line = __LINE__)
|
||||
@safe
|
||||
{
|
||||
super(problem, problemMark, file, line);
|
||||
}
|
||||
}
|
96
source/dyaml/fastcharsearch.d
Normal file
96
source/dyaml/fastcharsearch.d
Normal file
|
@ -0,0 +1,96 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.fastcharsearch;
|
||||
|
||||
|
||||
import std.algorithm;
|
||||
import std.conv;
|
||||
|
||||
|
||||
package:
|
||||
|
||||
/**
|
||||
* Mixin used for fast searching for a character in string.
|
||||
*
|
||||
* Creates a lookup table to quickly determine if a character
|
||||
* is present in the string. Size of the lookup table is limited;
|
||||
* any characters not represented in the table will be checked
|
||||
* by ordinary equality comparison.
|
||||
*
|
||||
* Params: chars = String to search in.
|
||||
* tableSize = Maximum number of bytes used by the table.
|
||||
*
|
||||
* Generated method:
|
||||
* bool canFind(dchar c)
|
||||
*
|
||||
* Determines if a character is in the string.
|
||||
*/
|
||||
template FastCharSearch(dstring chars, uint tableSize = 256)
|
||||
{
|
||||
private mixin(searchCode!(chars, tableSize)());
|
||||
}
|
||||
|
||||
///Generate the search table and the canFind method.
|
||||
string searchCode(dstring chars, uint tableSize)() @trusted
|
||||
{
|
||||
const tableSizeStr = to!string(tableSize);
|
||||
ubyte[tableSize] table;
|
||||
table[] = 0;
|
||||
|
||||
//Characters that don't fit in the table.
|
||||
dchar[] specialChars;
|
||||
|
||||
foreach(c; chars)
|
||||
{
|
||||
if(c < tableSize){table[c] = 1;}
|
||||
else {specialChars ~= c;}
|
||||
}
|
||||
|
||||
string tableCode()
|
||||
{
|
||||
string code = "static immutable ubyte table_[" ~ tableSizeStr ~ "] = [\n";
|
||||
foreach(c; table[0 .. $ - 1])
|
||||
{
|
||||
code ~= c ? "true,\n" : "false,\n";
|
||||
}
|
||||
code ~= table[$ - 1] ? "true\n" : "false\n";
|
||||
code ~= "];\n\n";
|
||||
return code;
|
||||
}
|
||||
|
||||
string specialCharsCode()
|
||||
{
|
||||
string code;
|
||||
foreach(c; specialChars[0 .. $ - 1])
|
||||
{
|
||||
code ~= "cast(uint)c == " ~ to!string(cast(uint)c) ~ " || ";
|
||||
}
|
||||
code ~= "cast(uint)c == " ~ to!string(cast(uint)specialChars[$ - 1]);
|
||||
|
||||
return code;
|
||||
}
|
||||
|
||||
string code = tableSize ? tableCode() : "";
|
||||
|
||||
code ~= "bool canFind(in dchar c) pure @safe nothrow\n"
|
||||
"{\n";
|
||||
|
||||
if(tableSize)
|
||||
{
|
||||
code ~= " if(c < " ~ tableSizeStr ~ ")\n"
|
||||
" {\n"
|
||||
" return cast(immutable(bool))table_[c];\n"
|
||||
" }\n";
|
||||
}
|
||||
|
||||
code ~= specialChars.length
|
||||
? " return " ~ specialCharsCode() ~ ";\n"
|
||||
: " return false;";
|
||||
code ~= "}\n";
|
||||
|
||||
return code;
|
||||
}
|
91
source/dyaml/flags.d
Normal file
91
source/dyaml/flags.d
Normal file
|
@ -0,0 +1,91 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///Compact storage of multiple boolean values.
|
||||
module dyaml.flags;
|
||||
|
||||
|
||||
import std.conv;
|
||||
|
||||
|
||||
package:
|
||||
|
||||
/**
|
||||
* Struct holding multiple named boolean values in a single byte.
|
||||
*
|
||||
* Can hold at most 8 values.
|
||||
*
|
||||
* Example:
|
||||
* --------------------
|
||||
* Flags!("empty", "multiline") flags;
|
||||
* assert(flags.empty == false && flags.multiline == false);
|
||||
* flags.multiline = true;
|
||||
* assert(flags.empty == false && flags.multiline == true);
|
||||
* flags.empty = true;
|
||||
* assert(flags.empty == true && flags.multiline == true);
|
||||
* flags.multiline = false;
|
||||
* assert(flags.empty == true && flags.multiline == false);
|
||||
* flags.empty = false;
|
||||
* assert(flags.empty == false && flags.multiline == false);
|
||||
* --------------------
|
||||
*/
|
||||
struct Flags(names ...) if(names.length <= 8)
|
||||
{
|
||||
private:
|
||||
@disable int opCmp(ref Flags);
|
||||
|
||||
///Byte storing the flags.
|
||||
ubyte flags_;
|
||||
|
||||
///Generate a setter and a getter for each flag.
|
||||
static string flags(string[] names ...) @trusted
|
||||
in
|
||||
{
|
||||
assert(names.length <= 8, "Flags struct can only hold 8 flags");
|
||||
}
|
||||
body
|
||||
{
|
||||
string result;
|
||||
foreach(index, name; names)
|
||||
{
|
||||
string istr = to!string(index);
|
||||
result ~= "\n"
|
||||
"@property bool " ~ name ~ "(bool value) pure @safe nothrow\n"
|
||||
"{\n"
|
||||
" flags_ = value ? flags_ | (1 <<" ~ istr ~ ")\n"
|
||||
" : flags_ & (0xFF ^ (1 << " ~ istr ~"));\n"
|
||||
" return value;\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"@property bool " ~ name ~ "() const pure @safe nothrow\n"
|
||||
"{\n"
|
||||
" return (flags_ >> " ~ istr ~ ") & 1;\n"
|
||||
"}\n";
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public:
|
||||
///Flag accessors.
|
||||
mixin(flags(names));
|
||||
}
|
||||
unittest
|
||||
{
|
||||
import std.stdio;
|
||||
writeln("Flags unittest");
|
||||
|
||||
Flags!("empty", "multiline") flags;
|
||||
assert(flags.empty == false && flags.multiline == false);
|
||||
flags.multiline = true;
|
||||
assert(flags.empty == false && flags.multiline == true);
|
||||
flags.empty = true;
|
||||
assert(flags.empty == true && flags.multiline == true);
|
||||
flags.multiline = false;
|
||||
assert(flags.empty == true && flags.multiline == false);
|
||||
flags.empty = false;
|
||||
assert(flags.empty == false && flags.multiline == false);
|
||||
}
|
||||
|
32
source/dyaml/linebreak.d
Normal file
32
source/dyaml/linebreak.d
Normal file
|
@ -0,0 +1,32 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.linebreak;
|
||||
|
||||
|
||||
///Enumerates platform specific line breaks.
|
||||
enum LineBreak
|
||||
{
|
||||
///Unix line break ("\n").
|
||||
Unix,
|
||||
///Windows line break ("\r\n").
|
||||
Windows,
|
||||
///Macintosh line break ("\r").
|
||||
Macintosh
|
||||
}
|
||||
|
||||
package:
|
||||
|
||||
//Get line break string for specified line break.
|
||||
string lineBreak(in LineBreak b) pure @safe nothrow
|
||||
{
|
||||
final switch(b)
|
||||
{
|
||||
case LineBreak.Unix: return "\n";
|
||||
case LineBreak.Windows: return "\r";
|
||||
case LineBreak.Macintosh: return "\r\n";
|
||||
}
|
||||
}
|
334
source/dyaml/loader.d
Normal file
334
source/dyaml/loader.d
Normal file
|
@ -0,0 +1,334 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* Class used to load YAML documents.
|
||||
*/
|
||||
module dyaml.loader;
|
||||
|
||||
|
||||
import std.exception;
|
||||
import std.stream;
|
||||
|
||||
import dyaml.composer;
|
||||
import dyaml.constructor;
|
||||
import dyaml.event;
|
||||
import dyaml.exception;
|
||||
import dyaml.node;
|
||||
import dyaml.parser;
|
||||
import dyaml.reader;
|
||||
import dyaml.resolver;
|
||||
import dyaml.scanner;
|
||||
import dyaml.token;
|
||||
|
||||
|
||||
/**
|
||||
* Loads YAML documents from files or streams.
|
||||
*
|
||||
* User specified Constructor and/or Resolver can be used to support new
|
||||
* tags / data types.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* Load single YAML document from a file:
|
||||
* --------------------
|
||||
* auto rootNode = Loader("file.yaml").load();
|
||||
* ...
|
||||
* --------------------
|
||||
*
|
||||
* Load all YAML documents from a file:
|
||||
* --------------------
|
||||
* auto nodes = Loader("file.yaml").loadAll();
|
||||
* ...
|
||||
* --------------------
|
||||
*
|
||||
* Iterate over YAML documents in a file, lazily loading them:
|
||||
* --------------------
|
||||
* auto loader = Loader("file.yaml");
|
||||
*
|
||||
* foreach(ref node; loader)
|
||||
* {
|
||||
* ...
|
||||
* }
|
||||
* --------------------
|
||||
*
|
||||
* Load YAML from memory:
|
||||
* --------------------
|
||||
* import std.stream;
|
||||
* import std.stdio;
|
||||
*
|
||||
* string yaml_input = "red: '#ff0000'\n"
|
||||
* "green: '#00ff00'\n"
|
||||
* "blue: '#0000ff'";
|
||||
*
|
||||
* auto colors = Loader.fromString(yaml_input).load();
|
||||
*
|
||||
* foreach(string color, string value; colors)
|
||||
* {
|
||||
* writeln(color, " is ", value, " in HTML/CSS");
|
||||
* }
|
||||
* --------------------
|
||||
*
|
||||
* Use a custom constructor/resolver to support custom data types and/or implicit tags:
|
||||
* --------------------
|
||||
* auto constructor = new Constructor();
|
||||
* auto resolver = new Resolver();
|
||||
*
|
||||
* //Add constructor functions / resolver expressions here...
|
||||
*
|
||||
* auto loader = Loader("file.yaml");
|
||||
* loader.constructor = constructor;
|
||||
* loader.resolver = resolver;
|
||||
* auto rootNode = loader.load(node);
|
||||
* --------------------
|
||||
*/
|
||||
struct Loader
|
||||
{
|
||||
private:
|
||||
///Reads character data from a stream.
|
||||
Reader reader_;
|
||||
///Processes character data to YAML tokens.
|
||||
Scanner scanner_;
|
||||
///Processes tokens to YAML events.
|
||||
Parser parser_;
|
||||
///Resolves tags (data types).
|
||||
Resolver resolver_;
|
||||
///Constructs YAML data types.
|
||||
Constructor constructor_;
|
||||
///Name of the input file or stream, used in error messages.
|
||||
string name_ = "<unknown>";
|
||||
///Are we done loading?
|
||||
bool done_ = false;
|
||||
|
||||
public:
|
||||
@disable this();
|
||||
@disable int opCmp(ref Loader);
|
||||
@disable bool opEquals(ref Loader);
|
||||
|
||||
/**
|
||||
* Construct a Loader to load YAML from a file.
|
||||
*
|
||||
* Params: filename = Name of the file to load from.
|
||||
*
|
||||
* Throws: YAMLException if the file could not be opened or read.
|
||||
*/
|
||||
this(string filename) pure @safe
|
||||
{
|
||||
name_ = filename;
|
||||
try{this(new File(filename));}
|
||||
catch(StreamException e)
|
||||
{
|
||||
throw new YAMLException("Unable to open file " ~ filename ~
|
||||
" for YAML loading: " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a Loader to load YAML from a string.
|
||||
///
|
||||
/// Params: data = String to load YAML from.
|
||||
///
|
||||
/// Returns: Loader loading YAML from given string.
|
||||
static Loader fromString(string data)
|
||||
{
|
||||
return Loader(new MemoryStream(cast(char[])data));
|
||||
}
|
||||
unittest
|
||||
{
|
||||
assert(Loader.fromString("42").load().as!int == 42);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a Loader to load YAML from a _stream.
|
||||
*
|
||||
* Params: stream = Stream to read from. Must be readable and seekable.
|
||||
*
|
||||
* Throws: YAMLException if stream could not be read.
|
||||
*/
|
||||
this(Stream stream) pure @safe
|
||||
{
|
||||
try
|
||||
{
|
||||
reader_ = new Reader(stream);
|
||||
scanner_ = new Scanner(reader_);
|
||||
parser_ = new Parser(scanner_);
|
||||
resolver_ = new Resolver();
|
||||
constructor_ = new Constructor();
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to open stream " ~ name_ ~
|
||||
" for YAML loading: " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
||||
///Destroy the Loader.
|
||||
@trusted ~this()
|
||||
{
|
||||
clear(reader_);
|
||||
clear(scanner_);
|
||||
clear(parser_);
|
||||
}
|
||||
|
||||
///Set stream _name. Used in debugging messages.
|
||||
@property void name(string name) pure @safe nothrow
|
||||
{
|
||||
name_ = name;
|
||||
}
|
||||
|
||||
///Specify custom Resolver to use.
|
||||
@property void resolver(Resolver resolver) pure @safe nothrow
|
||||
{
|
||||
resolver_ = resolver;
|
||||
}
|
||||
|
||||
///Specify custom Constructor to use.
|
||||
@property void constructor(Constructor constructor) pure @safe nothrow
|
||||
{
|
||||
constructor_ = constructor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load single YAML document.
|
||||
*
|
||||
* If none or more than one YAML document is found, this throws a YAMLException.
|
||||
*
|
||||
* This can only be called once; this is enforced by contract.
|
||||
*
|
||||
* Returns: Root node of the document.
|
||||
*
|
||||
* Throws: YAMLException if there wasn't exactly one document
|
||||
* or on a YAML parsing error.
|
||||
*/
|
||||
Node load() @safe
|
||||
in
|
||||
{
|
||||
assert(!done_, "Loader: Trying to load YAML twice");
|
||||
}
|
||||
body
|
||||
{
|
||||
try
|
||||
{
|
||||
scope(exit){done_ = true;}
|
||||
auto composer = new Composer(parser_, resolver_, constructor_);
|
||||
enforce(composer.checkNode(), new YAMLException("No YAML document to load"));
|
||||
return composer.getSingleNode();
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to load YAML from stream " ~
|
||||
name_ ~ " : " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load all YAML documents.
|
||||
*
|
||||
* This is just a shortcut that iterates over all documents and returns
|
||||
* them all at once. Calling loadAll after iterating over the node or
|
||||
* vice versa will not return any documents, as they have all been parsed
|
||||
* already.
|
||||
*
|
||||
* This can only be called once; this is enforced by contract.
|
||||
*
|
||||
* Returns: Array of root nodes of all documents in the file/stream.
|
||||
*
|
||||
* Throws: YAMLException on a parsing error.
|
||||
*/
|
||||
Node[] loadAll() @safe
|
||||
{
|
||||
Node[] nodes;
|
||||
foreach(ref node; this){nodes ~= node;}
|
||||
return nodes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Foreach over YAML documents.
|
||||
*
|
||||
* Parses documents lazily, when they are needed.
|
||||
*
|
||||
* Foreach over a Loader can only be used once; this is enforced by contract.
|
||||
*
|
||||
* Throws: YAMLException on a parsing error.
|
||||
*/
|
||||
int opApply(int delegate(ref Node) dg) @trusted
|
||||
in
|
||||
{
|
||||
assert(!done_, "Loader: Trying to load YAML twice");
|
||||
}
|
||||
body
|
||||
{
|
||||
scope(exit){done_ = true;}
|
||||
try
|
||||
{
|
||||
auto composer = new Composer(parser_, resolver_, constructor_);
|
||||
|
||||
int result = 0;
|
||||
while(composer.checkNode())
|
||||
{
|
||||
auto node = composer.getNode();
|
||||
result = dg(node);
|
||||
if(result){break;}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to load YAML from stream " ~
|
||||
name_ ~ " : " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
||||
package:
|
||||
//Scan and return all tokens. Used for debugging.
|
||||
Token[] scan() @safe
|
||||
{
|
||||
try
|
||||
{
|
||||
Token[] result;
|
||||
while(scanner_.checkToken()){result ~= scanner_.getToken();}
|
||||
return result;
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to scan YAML from stream " ~
|
||||
name_ ~ " : " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
||||
//Parse and return all events. Used for debugging.
|
||||
immutable(Event)[] parse() @safe
|
||||
{
|
||||
try
|
||||
{
|
||||
immutable(Event)[] result;
|
||||
while(parser_.checkEvent()){result ~= parser_.getEvent();}
|
||||
return result;
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to parse YAML from stream " ~
|
||||
name_ ~ " : " ~ e.msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
import std.stream;
|
||||
import std.stdio;
|
||||
|
||||
string yaml_input = "red: '#ff0000'\n"
|
||||
"green: '#00ff00'\n"
|
||||
"blue: '#0000ff'";
|
||||
|
||||
auto colors = Loader(new MemoryStream(cast(char[])yaml_input)).load();
|
||||
|
||||
foreach(string color, string value; colors)
|
||||
{
|
||||
writeln(color, " is ", value, " in HTML/CSS");
|
||||
}
|
||||
}
|
1809
source/dyaml/node.d
Normal file
1809
source/dyaml/node.d
Normal file
File diff suppressed because it is too large
Load diff
864
source/dyaml/parser.d
Normal file
864
source/dyaml/parser.d
Normal file
|
@ -0,0 +1,864 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* YAML parser.
|
||||
* Code based on PyYAML: http://www.pyyaml.org
|
||||
*/
|
||||
module dyaml.parser;
|
||||
|
||||
|
||||
import std.array;
|
||||
import std.container;
|
||||
import std.conv;
|
||||
import std.exception;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.anchor;
|
||||
import dyaml.event;
|
||||
import dyaml.exception;
|
||||
import dyaml.scanner;
|
||||
import dyaml.style;
|
||||
import dyaml.token;
|
||||
import dyaml.tag;
|
||||
import dyaml.tagdirective;
|
||||
|
||||
|
||||
package:
|
||||
/**
|
||||
* The following YAML grammar is LL(1) and is parsed by a recursive descent
|
||||
* parser.
|
||||
*
|
||||
* stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
|
||||
* implicit_document ::= block_node DOCUMENT-END*
|
||||
* explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||
* block_node_or_indentless_sequence ::=
|
||||
* ALIAS
|
||||
* | properties (block_content | indentless_block_sequence)?
|
||||
* | block_content
|
||||
* | indentless_block_sequence
|
||||
* block_node ::= ALIAS
|
||||
* | properties block_content?
|
||||
* | block_content
|
||||
* flow_node ::= ALIAS
|
||||
* | properties flow_content?
|
||||
* | flow_content
|
||||
* properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||
* block_content ::= block_collection | flow_collection | SCALAR
|
||||
* flow_content ::= flow_collection | SCALAR
|
||||
* block_collection ::= block_sequence | block_mapping
|
||||
* flow_collection ::= flow_sequence | flow_mapping
|
||||
* block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
||||
* indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||
* block_mapping ::= BLOCK-MAPPING_START
|
||||
* ((KEY block_node_or_indentless_sequence?)?
|
||||
* (VALUE block_node_or_indentless_sequence?)?)*
|
||||
* BLOCK-END
|
||||
* flow_sequence ::= FLOW-SEQUENCE-START
|
||||
* (flow_sequence_entry FLOW-ENTRY)*
|
||||
* flow_sequence_entry?
|
||||
* FLOW-SEQUENCE-END
|
||||
* flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
* flow_mapping ::= FLOW-MAPPING-START
|
||||
* (flow_mapping_entry FLOW-ENTRY)*
|
||||
* flow_mapping_entry?
|
||||
* FLOW-MAPPING-END
|
||||
* flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
*
|
||||
* FIRST sets:
|
||||
*
|
||||
* stream: { STREAM-START }
|
||||
* explicit_document: { DIRECTIVE DOCUMENT-START }
|
||||
* implicit_document: FIRST(block_node)
|
||||
* block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
* flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
* block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||
* flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||
* block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
|
||||
* flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
* block_sequence: { BLOCK-SEQUENCE-START }
|
||||
* block_mapping: { BLOCK-MAPPING-START }
|
||||
* block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
|
||||
* indentless_sequence: { ENTRY }
|
||||
* flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
* flow_sequence: { FLOW-SEQUENCE-START }
|
||||
* flow_mapping: { FLOW-MAPPING-START }
|
||||
* flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
|
||||
* flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* Marked exception thrown at parser errors.
|
||||
*
|
||||
* See_Also: MarkedYAMLException
|
||||
*/
|
||||
class ParserException : MarkedYAMLException
|
||||
{
|
||||
mixin MarkedExceptionCtors;
|
||||
}
|
||||
|
||||
private alias ParserException Error;
|
||||
|
||||
///Generates events from tokens provided by a Scanner.
|
||||
final class Parser
|
||||
{
|
||||
private:
|
||||
///Default tag handle shortcuts and replacements.
|
||||
static TagDirective[] defaultTagDirectives_;
|
||||
static this()
|
||||
{
|
||||
defaultTagDirectives_ = [TagDirective("!", "!"), TagDirective("!!", "tag:yaml.org,2002:")];
|
||||
}
|
||||
|
||||
///Scanner providing YAML tokens.
|
||||
Scanner scanner_;
|
||||
|
||||
///Event produced by the most recent state.
|
||||
Event currentEvent_;
|
||||
|
||||
///YAML version string.
|
||||
string YAMLVersion_ = null;
|
||||
///Tag handle shortcuts and replacements.
|
||||
TagDirective[] tagDirectives_;
|
||||
|
||||
///Stack of states.
|
||||
Array!(Event delegate()) states_;
|
||||
///Stack of marks used to keep track of extents of e.g. YAML collections.
|
||||
Array!Mark marks_;
|
||||
|
||||
///Current state.
|
||||
Event delegate() state_;
|
||||
|
||||
public:
|
||||
///Construct a Parser using specified Scanner.
|
||||
this(Scanner scanner) @trusted
|
||||
{
|
||||
state_ = &parseStreamStart;
|
||||
scanner_ = scanner;
|
||||
states_.reserve(32);
|
||||
marks_.reserve(32);
|
||||
}
|
||||
|
||||
///Destroy the parser.
|
||||
@trusted ~this()
|
||||
{
|
||||
clear(currentEvent_);
|
||||
clear(tagDirectives_);
|
||||
tagDirectives_ = null;
|
||||
clear(states_);
|
||||
clear(marks_);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the next event is one of specified types.
|
||||
*
|
||||
* If no types are specified, checks if any events are left.
|
||||
*
|
||||
* Params: ids = Event IDs to check for.
|
||||
*
|
||||
* Returns: true if the next event is one of specified types,
|
||||
* or if there are any events left if no types specified.
|
||||
* false otherwise.
|
||||
*/
|
||||
bool checkEvent(EventID[] ids...) @trusted
|
||||
{
|
||||
//Check if the next event is one of specified types.
|
||||
if(currentEvent_.isNull && state_ !is null)
|
||||
{
|
||||
currentEvent_ = state_();
|
||||
}
|
||||
|
||||
if(!currentEvent_.isNull)
|
||||
{
|
||||
if(ids.length == 0){return true;}
|
||||
else
|
||||
{
|
||||
const nextId = currentEvent_.id;
|
||||
foreach(id; ids)
|
||||
{
|
||||
if(nextId == id){return true;}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the next event, but keep it in the queue.
|
||||
*
|
||||
* Must not be called if there are no events left.
|
||||
*/
|
||||
immutable(Event) peekEvent() @trusted
|
||||
{
|
||||
if(currentEvent_.isNull && state_ !is null)
|
||||
{
|
||||
currentEvent_ = state_();
|
||||
}
|
||||
if(!currentEvent_.isNull){return cast(immutable Event)currentEvent_;}
|
||||
assert(false, "No event left to peek");
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the next event, removing it from the queue.
|
||||
*
|
||||
* Must not be called if there are no events left.
|
||||
*/
|
||||
immutable(Event) getEvent() @trusted
|
||||
{
|
||||
//Get the next event and proceed further.
|
||||
if(currentEvent_.isNull && state_ !is null)
|
||||
{
|
||||
currentEvent_ = state_();
|
||||
}
|
||||
|
||||
if(!currentEvent_.isNull)
|
||||
{
|
||||
immutable Event result = cast(immutable Event)currentEvent_;
|
||||
currentEvent_.id = EventID.Invalid;
|
||||
return result;
|
||||
}
|
||||
assert(false, "No event left to get");
|
||||
}
|
||||
|
||||
private:
|
||||
///Pop and return the newest state in states_.
|
||||
Event delegate() popState() @trusted
|
||||
{
|
||||
enforce(states_.length > 0,
|
||||
new YAMLException("Parser: Need to pop state but no states left to pop"));
|
||||
const result = states_.back;
|
||||
states_.length = states_.length - 1;
|
||||
return result;
|
||||
}
|
||||
|
||||
///Pop and return the newest mark in marks_.
|
||||
Mark popMark() @trusted
|
||||
{
|
||||
enforce(marks_.length > 0,
|
||||
new YAMLException("Parser: Need to pop mark but no marks left to pop"));
|
||||
const result = marks_.back;
|
||||
marks_.length = marks_.length - 1;
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
|
||||
* implicit_document ::= block_node DOCUMENT-END*
|
||||
* explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||
*/
|
||||
|
||||
///Parse stream start.
|
||||
Event parseStreamStart() @safe
|
||||
{
|
||||
immutable token = scanner_.getToken();
|
||||
state_ = &parseImplicitDocumentStart;
|
||||
return streamStartEvent(token.startMark, token.endMark, token.encoding);
|
||||
}
|
||||
|
||||
///Parse implicit document start, unless explicit is detected: if so, parse explicit.
|
||||
Event parseImplicitDocumentStart() @trusted
|
||||
{
|
||||
//Parse an implicit document.
|
||||
if(!scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
|
||||
TokenID.StreamEnd))
|
||||
{
|
||||
tagDirectives_ = defaultTagDirectives_;
|
||||
immutable token = scanner_.peekToken();
|
||||
|
||||
states_ ~= &parseDocumentEnd;
|
||||
state_ = &parseBlockNode;
|
||||
|
||||
return documentStartEvent(token.startMark, token.endMark, false, null, null);
|
||||
}
|
||||
return parseDocumentStart();
|
||||
}
|
||||
|
||||
///Parse explicit document start.
|
||||
Event parseDocumentStart() @trusted
|
||||
{
|
||||
//Parse any extra document end indicators.
|
||||
while(scanner_.checkToken(TokenID.DocumentEnd)){scanner_.getToken();}
|
||||
|
||||
//Parse an explicit document.
|
||||
if(!scanner_.checkToken(TokenID.StreamEnd))
|
||||
{
|
||||
const startMark = scanner_.peekToken().startMark;
|
||||
|
||||
auto tagDirectives = processDirectives();
|
||||
enforce(scanner_.checkToken(TokenID.DocumentStart),
|
||||
new Error("Expected document start but found " ~
|
||||
scanner_.peekToken().idString,
|
||||
scanner_.peekToken().startMark));
|
||||
|
||||
const endMark = scanner_.getToken().endMark;
|
||||
states_ ~= &parseDocumentEnd;
|
||||
state_ = &parseDocumentContent;
|
||||
return documentStartEvent(startMark, endMark, true, YAMLVersion_, tagDirectives);
|
||||
}
|
||||
else
|
||||
{
|
||||
//Parse the end of the stream.
|
||||
immutable token = scanner_.getToken();
|
||||
assert(states_.length == 0);
|
||||
assert(marks_.length == 0);
|
||||
state_ = null;
|
||||
return streamEndEvent(token.startMark, token.endMark);
|
||||
}
|
||||
}
|
||||
|
||||
///Parse document end (explicit or implicit).
|
||||
Event parseDocumentEnd() @safe
|
||||
{
|
||||
Mark startMark = scanner_.peekToken().startMark;
|
||||
const bool explicit = scanner_.checkToken(TokenID.DocumentEnd);
|
||||
Mark endMark = explicit ? scanner_.getToken().endMark : startMark;
|
||||
|
||||
state_ = &parseDocumentStart;
|
||||
|
||||
return documentEndEvent(startMark, endMark, explicit);
|
||||
}
|
||||
|
||||
///Parse document content.
|
||||
Event parseDocumentContent() @safe
|
||||
{
|
||||
if(scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
|
||||
TokenID.DocumentEnd, TokenID.StreamEnd))
|
||||
{
|
||||
state_ = popState();
|
||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
||||
}
|
||||
return parseBlockNode();
|
||||
}
|
||||
|
||||
///Process directives at the beginning of a document.
|
||||
TagDirective[] processDirectives() @system
|
||||
{
|
||||
//Destroy version and tag handles from previous document.
|
||||
YAMLVersion_ = null;
|
||||
tagDirectives_.length = 0;
|
||||
|
||||
//Process directives.
|
||||
while(scanner_.checkToken(TokenID.Directive))
|
||||
{
|
||||
immutable token = scanner_.getToken();
|
||||
//Name and value are separated by '\0'.
|
||||
const parts = token.value.split("\0");
|
||||
const name = parts[0];
|
||||
if(name == "YAML")
|
||||
{
|
||||
enforce(YAMLVersion_ is null,
|
||||
new Error("Duplicate YAML directive", token.startMark));
|
||||
const minor = parts[1].split(".")[0];
|
||||
enforce(minor == "1",
|
||||
new Error("Incompatible document (version 1.x is required)",
|
||||
token.startMark));
|
||||
YAMLVersion_ = parts[1];
|
||||
}
|
||||
else if(name == "TAG")
|
||||
{
|
||||
assert(parts.length == 3, "Tag directive stored incorrectly in a token");
|
||||
auto handle = parts[1];
|
||||
|
||||
foreach(ref pair; tagDirectives_)
|
||||
{
|
||||
//handle
|
||||
const h = pair.handle;
|
||||
enforce(h != handle, new Error("Duplicate tag handle: " ~ handle,
|
||||
token.startMark));
|
||||
}
|
||||
tagDirectives_ ~= TagDirective(handle, parts[2]);
|
||||
}
|
||||
}
|
||||
|
||||
TagDirective[] value = tagDirectives_;
|
||||
|
||||
//Add any default tag handles that haven't been overridden.
|
||||
foreach(ref defaultPair; defaultTagDirectives_)
|
||||
{
|
||||
bool found = false;
|
||||
foreach(ref pair; tagDirectives_) if(defaultPair.handle == pair.handle)
|
||||
{
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
if(!found){tagDirectives_ ~= defaultPair;}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* block_node_or_indentless_sequence ::= ALIAS
|
||||
* | properties (block_content | indentless_block_sequence)?
|
||||
* | block_content
|
||||
* | indentless_block_sequence
|
||||
* block_node ::= ALIAS
|
||||
* | properties block_content?
|
||||
* | block_content
|
||||
* flow_node ::= ALIAS
|
||||
* | properties flow_content?
|
||||
* | flow_content
|
||||
* properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||
* block_content ::= block_collection | flow_collection | SCALAR
|
||||
* flow_content ::= flow_collection | SCALAR
|
||||
* block_collection ::= block_sequence | block_mapping
|
||||
* flow_collection ::= flow_sequence | flow_mapping
|
||||
*/
|
||||
|
||||
///Parse a node.
|
||||
Event parseNode(const Flag!"block" block,
|
||||
const Flag!"indentlessSequence" indentlessSequence = No.indentlessSequence) @safe
|
||||
{
|
||||
if(scanner_.checkToken(TokenID.Alias))
|
||||
{
|
||||
immutable token = scanner_.getToken();
|
||||
state_ = popState();
|
||||
return aliasEvent(token.startMark, token.endMark, Anchor(token.value));
|
||||
}
|
||||
|
||||
string anchor = null;
|
||||
string tag = null;
|
||||
Mark startMark, endMark, tagMark;
|
||||
bool invalidMarks = true;
|
||||
|
||||
//Get anchor/tag if detected. Return false otherwise.
|
||||
bool get(const TokenID id, const Flag!"first" first, ref string target)
|
||||
{
|
||||
if(!scanner_.checkToken(id)){return false;}
|
||||
invalidMarks = false;
|
||||
immutable token = scanner_.getToken();
|
||||
if(first){startMark = token.startMark;}
|
||||
if(id == TokenID.Tag){tagMark = token.startMark;}
|
||||
endMark = token.endMark;
|
||||
target = token.value;
|
||||
return true;
|
||||
}
|
||||
|
||||
//Anchor and/or tag can be in any order.
|
||||
if(get(TokenID.Anchor, Yes.first, anchor)){get(TokenID.Tag, No.first, tag);}
|
||||
else if(get(TokenID.Tag, Yes.first, tag)) {get(TokenID.Anchor, No.first, anchor);}
|
||||
|
||||
if(tag !is null){tag = processTag(tag, startMark, tagMark);}
|
||||
|
||||
if(invalidMarks)
|
||||
{
|
||||
startMark = endMark = scanner_.peekToken().startMark;
|
||||
}
|
||||
|
||||
bool implicit = (tag is null || tag == "!");
|
||||
|
||||
if(indentlessSequence && scanner_.checkToken(TokenID.BlockEntry))
|
||||
{
|
||||
state_ = &parseIndentlessSequenceEntry;
|
||||
return sequenceStartEvent
|
||||
(startMark, scanner_.peekToken().endMark, Anchor(anchor),
|
||||
Tag(tag), implicit, CollectionStyle.Block);
|
||||
}
|
||||
|
||||
if(scanner_.checkToken(TokenID.Scalar))
|
||||
{
|
||||
immutable token = scanner_.getToken();
|
||||
|
||||
implicit = (token.style == ScalarStyle.Plain && tag is null) || tag == "!";
|
||||
bool implicit_2 = (!implicit) && tag is null;
|
||||
state_ = popState();
|
||||
return scalarEvent(startMark, token.endMark, Anchor(anchor), Tag(tag),
|
||||
tuple(implicit, implicit_2), token.value, token.style);
|
||||
}
|
||||
|
||||
if(scanner_.checkToken(TokenID.FlowSequenceStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseFlowSequenceEntry!(Yes.first);
|
||||
return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Flow);
|
||||
}
|
||||
|
||||
if(scanner_.checkToken(TokenID.FlowMappingStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseFlowMappingKey!(Yes.first);
|
||||
return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Flow);
|
||||
}
|
||||
|
||||
if(block && scanner_.checkToken(TokenID.BlockSequenceStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseBlockSequenceEntry!(Yes.first);
|
||||
return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Block);
|
||||
}
|
||||
|
||||
if(block && scanner_.checkToken(TokenID.BlockMappingStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseBlockMappingKey!(Yes.first);
|
||||
return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Block);
|
||||
}
|
||||
|
||||
if(anchor != null || tag !is null)
|
||||
{
|
||||
state_ = popState();
|
||||
|
||||
//PyYAML uses a tuple(implicit, false) for the second last arg here,
|
||||
//but the second bool is never used after that - so we don't use it.
|
||||
|
||||
//Empty scalars are allowed even if a tag or an anchor is specified.
|
||||
return scalarEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
tuple(implicit, false) , "");
|
||||
}
|
||||
|
||||
immutable token = scanner_.peekToken();
|
||||
throw new Error("While parsing a " ~ (block ? "block" : "flow") ~ " node",
|
||||
startMark, "expected node content, but found: "
|
||||
~ token.idString, token.startMark);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a tag string retrieved from a tag token.
|
||||
*
|
||||
* Params: tag = Tag before processing.
|
||||
* startMark = Position of the node the tag belongs to.
|
||||
* tagMark = Position of the tag.
|
||||
*/
|
||||
string processTag(const string tag, const Mark startMark, const Mark tagMark)
|
||||
const @trusted
|
||||
{
|
||||
//Tag handle and suffix are separated by '\0'.
|
||||
const parts = tag.split("\0");
|
||||
assert(parts.length == 2, "Tag data stored incorrectly in a token");
|
||||
const handle = parts[0];
|
||||
const suffix = parts[1];
|
||||
|
||||
if(handle.length > 0)
|
||||
{
|
||||
string replacement = null;
|
||||
foreach(ref pair; tagDirectives_)
|
||||
{
|
||||
if(pair.handle == handle)
|
||||
{
|
||||
replacement = pair.prefix;
|
||||
break;
|
||||
}
|
||||
}
|
||||
//handle must be in tagDirectives_
|
||||
enforce(replacement !is null,
|
||||
new Error("While parsing a node", startMark,
|
||||
"found undefined tag handle: " ~ handle, tagMark));
|
||||
return replacement ~ suffix;
|
||||
}
|
||||
return suffix;
|
||||
}
|
||||
|
||||
///Wrappers to parse nodes.
|
||||
Event parseBlockNode() @safe {return parseNode(Yes.block);}
|
||||
Event parseFlowNode() @safe {return parseNode(No.block);}
|
||||
Event parseBlockNodeOrIndentlessSequence() @safe {return parseNode(Yes.block, Yes.indentlessSequence);}
|
||||
|
||||
///block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
||||
|
||||
///Parse an entry of a block sequence. If first is true, this is the first entry.
|
||||
Event parseBlockSequenceEntry(Flag!"first" first)() @trusted
|
||||
{
|
||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||
|
||||
if(scanner_.checkToken(TokenID.BlockEntry))
|
||||
{
|
||||
immutable token = scanner_.getToken();
|
||||
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.BlockEnd))
|
||||
{
|
||||
states_~= &parseBlockSequenceEntry!(No.first);
|
||||
return parseBlockNode();
|
||||
}
|
||||
|
||||
state_ = &parseBlockSequenceEntry!(No.first);
|
||||
return processEmptyScalar(token.endMark);
|
||||
}
|
||||
|
||||
if(!scanner_.checkToken(TokenID.BlockEnd))
|
||||
{
|
||||
immutable token = scanner_.peekToken();
|
||||
throw new Error("While parsing a block collection", marks_.back,
|
||||
"expected block end, but found " ~ token.idString,
|
||||
token.startMark);
|
||||
}
|
||||
|
||||
state_ = popState();
|
||||
popMark();
|
||||
immutable token = scanner_.getToken();
|
||||
return sequenceEndEvent(token.startMark, token.endMark);
|
||||
}
|
||||
|
||||
///indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||
|
||||
///Parse an entry of an indentless sequence.
|
||||
Event parseIndentlessSequenceEntry() @trusted
|
||||
{
|
||||
if(scanner_.checkToken(TokenID.BlockEntry))
|
||||
{
|
||||
immutable token = scanner_.getToken();
|
||||
|
||||
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.Key,
|
||||
TokenID.Value, TokenID.BlockEnd))
|
||||
{
|
||||
states_ ~= &parseIndentlessSequenceEntry;
|
||||
return parseBlockNode();
|
||||
}
|
||||
|
||||
state_ = &parseIndentlessSequenceEntry;
|
||||
return processEmptyScalar(token.endMark);
|
||||
}
|
||||
|
||||
state_ = popState();
|
||||
immutable token = scanner_.peekToken();
|
||||
return sequenceEndEvent(token.startMark, token.endMark);
|
||||
}
|
||||
|
||||
/**
|
||||
* block_mapping ::= BLOCK-MAPPING_START
|
||||
* ((KEY block_node_or_indentless_sequence?)?
|
||||
* (VALUE block_node_or_indentless_sequence?)?)*
|
||||
* BLOCK-END
|
||||
*/
|
||||
|
||||
///Parse a key in a block mapping. If first is true, this is the first key.
|
||||
Event parseBlockMappingKey(Flag!"first" first)() @trusted
|
||||
{
|
||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||
|
||||
if(scanner_.checkToken(TokenID.Key))
|
||||
{
|
||||
immutable token = scanner_.getToken();
|
||||
|
||||
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
|
||||
{
|
||||
states_ ~= &parseBlockMappingValue;
|
||||
return parseBlockNodeOrIndentlessSequence();
|
||||
}
|
||||
|
||||
state_ = &parseBlockMappingValue;
|
||||
return processEmptyScalar(token.endMark);
|
||||
}
|
||||
|
||||
if(!scanner_.checkToken(TokenID.BlockEnd))
|
||||
{
|
||||
immutable token = scanner_.peekToken();
|
||||
throw new Error("While parsing a block mapping", marks_.back,
|
||||
"expected block end, but found: " ~ token.idString,
|
||||
token.startMark);
|
||||
}
|
||||
|
||||
state_ = popState();
|
||||
popMark();
|
||||
immutable token = scanner_.getToken();
|
||||
return mappingEndEvent(token.startMark, token.endMark);
|
||||
}
|
||||
|
||||
///Parse a value in a block mapping.
|
||||
Event parseBlockMappingValue() @trusted
|
||||
{
|
||||
if(scanner_.checkToken(TokenID.Value))
|
||||
{
|
||||
immutable token = scanner_.getToken();
|
||||
|
||||
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
|
||||
{
|
||||
states_ ~= &parseBlockMappingKey!(No.first);
|
||||
return parseBlockNodeOrIndentlessSequence();
|
||||
}
|
||||
|
||||
state_ = &parseBlockMappingKey!(No.first);
|
||||
return processEmptyScalar(token.endMark);
|
||||
}
|
||||
|
||||
state_= &parseBlockMappingKey!(No.first);
|
||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
||||
}
|
||||
|
||||
/**
|
||||
* flow_sequence ::= FLOW-SEQUENCE-START
|
||||
* (flow_sequence_entry FLOW-ENTRY)*
|
||||
* flow_sequence_entry?
|
||||
* FLOW-SEQUENCE-END
|
||||
* flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
*
|
||||
* Note that while production rules for both flow_sequence_entry and
|
||||
* flow_mapping_entry are equal, their interpretations are different.
|
||||
* For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
|
||||
* generate an inline mapping (set syntax).
|
||||
*/
|
||||
|
||||
///Parse an entry in a flow sequence. If first is true, this is the first entry.
|
||||
Event parseFlowSequenceEntry(Flag!"first" first)() @trusted
|
||||
{
|
||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||
|
||||
if(!scanner_.checkToken(TokenID.FlowSequenceEnd))
|
||||
{
|
||||
static if(!first)
|
||||
{
|
||||
if(scanner_.checkToken(TokenID.FlowEntry))
|
||||
{
|
||||
scanner_.getToken();
|
||||
}
|
||||
else
|
||||
{
|
||||
immutable token = scanner_.peekToken;
|
||||
throw new Error("While parsing a flow sequence", marks_.back,
|
||||
"expected ',' or ']', but got: " ~
|
||||
token.idString, token.startMark);
|
||||
}
|
||||
}
|
||||
|
||||
if(scanner_.checkToken(TokenID.Key))
|
||||
{
|
||||
immutable token = scanner_.peekToken();
|
||||
state_ = &parseFlowSequenceEntryMappingKey;
|
||||
return mappingStartEvent(token.startMark, token.endMark,
|
||||
Anchor(), Tag(), true, CollectionStyle.Flow);
|
||||
}
|
||||
else if(!scanner_.checkToken(TokenID.FlowSequenceEnd))
|
||||
{
|
||||
states_ ~= &parseFlowSequenceEntry!(No.first);
|
||||
return parseFlowNode();
|
||||
}
|
||||
}
|
||||
|
||||
immutable token = scanner_.getToken();
|
||||
state_ = popState();
|
||||
popMark();
|
||||
return sequenceEndEvent(token.startMark, token.endMark);
|
||||
}
|
||||
|
||||
///Parse a key in flow context.
|
||||
Event parseFlowKey(in Event delegate() nextState) @trusted
|
||||
{
|
||||
immutable token = scanner_.getToken();
|
||||
|
||||
if(!scanner_.checkToken(TokenID.Value, TokenID.FlowEntry,
|
||||
TokenID.FlowSequenceEnd))
|
||||
{
|
||||
states_ ~= nextState;
|
||||
return parseFlowNode();
|
||||
}
|
||||
|
||||
state_ = nextState;
|
||||
return processEmptyScalar(token.endMark);
|
||||
}
|
||||
|
||||
///Parse a mapping key in an entry in a flow sequence.
|
||||
Event parseFlowSequenceEntryMappingKey() @safe
|
||||
{
|
||||
return parseFlowKey(&parseFlowSequenceEntryMappingValue);
|
||||
}
|
||||
|
||||
///Parse a mapping value in a flow context.
|
||||
Event parseFlowValue(TokenID checkId, in Event delegate() nextState)
|
||||
@trusted
|
||||
{
|
||||
if(scanner_.checkToken(TokenID.Value))
|
||||
{
|
||||
immutable token = scanner_.getToken();
|
||||
if(!scanner_.checkToken(TokenID.FlowEntry, checkId))
|
||||
{
|
||||
states_ ~= nextState;
|
||||
return parseFlowNode();
|
||||
}
|
||||
|
||||
state_ = nextState;
|
||||
return processEmptyScalar(token.endMark);
|
||||
}
|
||||
|
||||
state_ = nextState;
|
||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
||||
}
|
||||
|
||||
///Parse a mapping value in an entry in a flow sequence.
|
||||
Event parseFlowSequenceEntryMappingValue() @safe
|
||||
{
|
||||
return parseFlowValue(TokenID.FlowSequenceEnd,
|
||||
&parseFlowSequenceEntryMappingEnd);
|
||||
}
|
||||
|
||||
///Parse end of a mapping in a flow sequence entry.
|
||||
Event parseFlowSequenceEntryMappingEnd() @safe
|
||||
{
|
||||
state_ = &parseFlowSequenceEntry!(No.first);
|
||||
immutable token = scanner_.peekToken();
|
||||
return mappingEndEvent(token.startMark, token.startMark);
|
||||
}
|
||||
|
||||
/**
|
||||
* flow_mapping ::= FLOW-MAPPING-START
|
||||
* (flow_mapping_entry FLOW-ENTRY)*
|
||||
* flow_mapping_entry?
|
||||
* FLOW-MAPPING-END
|
||||
* flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
*/
|
||||
|
||||
///Parse a key in a flow mapping.
|
||||
Event parseFlowMappingKey(Flag!"first" first)() @trusted
|
||||
{
|
||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||
|
||||
if(!scanner_.checkToken(TokenID.FlowMappingEnd))
|
||||
{
|
||||
static if(!first)
|
||||
{
|
||||
if(scanner_.checkToken(TokenID.FlowEntry))
|
||||
{
|
||||
scanner_.getToken();
|
||||
}
|
||||
else
|
||||
{
|
||||
immutable token = scanner_.peekToken;
|
||||
throw new Error("While parsing a flow mapping", marks_.back,
|
||||
"expected ',' or '}', but got: " ~
|
||||
token.idString, token.startMark);
|
||||
}
|
||||
}
|
||||
|
||||
if(scanner_.checkToken(TokenID.Key))
|
||||
{
|
||||
return parseFlowKey(&parseFlowMappingValue);
|
||||
}
|
||||
|
||||
if(!scanner_.checkToken(TokenID.FlowMappingEnd))
|
||||
{
|
||||
states_ ~= &parseFlowMappingEmptyValue;
|
||||
return parseFlowNode();
|
||||
}
|
||||
}
|
||||
|
||||
immutable token = scanner_.getToken();
|
||||
state_ = popState();
|
||||
popMark();
|
||||
return mappingEndEvent(token.startMark, token.endMark);
|
||||
}
|
||||
|
||||
///Parse a value in a flow mapping.
|
||||
Event parseFlowMappingValue() @safe
|
||||
{
|
||||
return parseFlowValue(TokenID.FlowMappingEnd, &parseFlowMappingKey!(No.first));
|
||||
}
|
||||
|
||||
///Parse an empty value in a flow mapping.
|
||||
Event parseFlowMappingEmptyValue() @safe
|
||||
{
|
||||
state_ = &parseFlowMappingKey!(No.first);
|
||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
||||
}
|
||||
|
||||
///Return an empty scalar.
|
||||
Event processEmptyScalar(const Mark mark) const pure @safe nothrow
|
||||
{
|
||||
return scalarEvent(mark, mark, Anchor(), Tag(), tuple(true, false), "");
|
||||
}
|
||||
}
|
233
source/dyaml/queue.d
Normal file
233
source/dyaml/queue.d
Normal file
|
@ -0,0 +1,233 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.queue;
|
||||
|
||||
|
||||
///Queue collection.
|
||||
import core.stdc.stdlib;
|
||||
import core.memory;
|
||||
|
||||
import std.container;
|
||||
import std.traits;
|
||||
|
||||
|
||||
package:
|
||||
|
||||
/**
|
||||
* Simple queue implemented as a singly linked list with a tail pointer.
|
||||
*
|
||||
* Needed in some D:YAML code that needs a queue-like structure without too
|
||||
* much reallocation that goes with an array.
|
||||
*
|
||||
* This should be replaced once Phobos has a decent queue/linked list.
|
||||
*
|
||||
* Uses manual allocation through malloc/free.
|
||||
*
|
||||
* Also has some features uncommon for a queue, e.g. iteration.
|
||||
* Couldn't bother with implementing a range, as this is used only as
|
||||
* a placeholder until Phobos gets a decent replacement.
|
||||
*/
|
||||
struct Queue(T)
|
||||
{
|
||||
private:
|
||||
///Linked list node containing one element and pointer to the next node.
|
||||
struct Node
|
||||
{
|
||||
T payload_;
|
||||
Node* next_ = null;
|
||||
}
|
||||
|
||||
///Start of the linked list - first element added in time (end of the queue).
|
||||
Node* first_ = null;
|
||||
///Last element of the linked list - last element added in time (start of the queue).
|
||||
Node* last_ = null;
|
||||
///Cursor pointing to the current node in iteration.
|
||||
Node* cursor_ = null;
|
||||
///Length of the queue.
|
||||
size_t length_ = 0;
|
||||
|
||||
public:
|
||||
@disable void opAssign(ref Queue);
|
||||
@disable bool opEquals(ref Queue);
|
||||
@disable int opCmp(ref Queue);
|
||||
|
||||
///Destroy the queue, deallocating all its elements.
|
||||
@safe nothrow ~this()
|
||||
{
|
||||
while(!empty){pop();}
|
||||
cursor_ = last_ = first_ = null;
|
||||
length_ = 0;
|
||||
}
|
||||
|
||||
///Start iterating over the queue.
|
||||
void startIteration() pure @safe nothrow
|
||||
{
|
||||
cursor_ = first_;
|
||||
}
|
||||
|
||||
///Get next element in the queue.
|
||||
ref const(T) next() pure @safe nothrow
|
||||
in
|
||||
{
|
||||
assert(!empty);
|
||||
assert(cursor_ !is null);
|
||||
}
|
||||
body
|
||||
{
|
||||
const previous = cursor_;
|
||||
cursor_ = cursor_.next_;
|
||||
return previous.payload_;
|
||||
}
|
||||
|
||||
///Are we done iterating?
|
||||
bool iterationOver() const pure @safe nothrow
|
||||
{
|
||||
return cursor_ is null;
|
||||
}
|
||||
|
||||
///Push new item to the queue.
|
||||
void push(T item) @trusted nothrow
|
||||
{
|
||||
Node* newLast = allocate!Node(item, cast(Node*)null);
|
||||
if(last_ !is null){last_.next_ = newLast;}
|
||||
if(first_ is null){first_ = newLast;}
|
||||
last_ = newLast;
|
||||
++length_;
|
||||
}
|
||||
|
||||
///Insert a new item putting it to specified index in the linked list.
|
||||
void insert(T item, in size_t idx) @trusted nothrow
|
||||
in
|
||||
{
|
||||
assert(idx <= length_);
|
||||
}
|
||||
body
|
||||
{
|
||||
if(idx == 0)
|
||||
{
|
||||
//Add after the first element - so this will be the next to pop.
|
||||
first_ = allocate!Node(item, first_);
|
||||
++length_;
|
||||
}
|
||||
else if(idx == length_)
|
||||
{
|
||||
//Adding before last added element, so we can just push.
|
||||
push(item);
|
||||
}
|
||||
else
|
||||
{
|
||||
//Get the element before one we're inserting.
|
||||
Node* current = first_;
|
||||
foreach(i; 1 .. idx)
|
||||
{
|
||||
current = current.next_;
|
||||
}
|
||||
|
||||
//Insert a new node after current, and put current.next_ behind it.
|
||||
current.next_ = allocate!Node(item, current.next_);
|
||||
++length_;
|
||||
}
|
||||
}
|
||||
|
||||
///Return the next element in the queue and remove it.
|
||||
T pop() @trusted nothrow
|
||||
in
|
||||
{
|
||||
assert(!empty, "Trying to pop an element from an empty queue");
|
||||
}
|
||||
body
|
||||
{
|
||||
T result = peek();
|
||||
Node* temp = first_;
|
||||
first_ = first_.next_;
|
||||
free(temp);
|
||||
if(--length_ == 0)
|
||||
{
|
||||
assert(first_ is null);
|
||||
last_ = null;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
///Return the next element in the queue.
|
||||
ref inout(T) peek() inout pure @safe nothrow
|
||||
in
|
||||
{
|
||||
assert(!empty, "Trying to peek at an element in an empty queue");
|
||||
}
|
||||
body
|
||||
{
|
||||
return first_.payload_;
|
||||
}
|
||||
|
||||
///Is the queue empty?
|
||||
@property bool empty() const pure @safe nothrow
|
||||
{
|
||||
return first_ is null;
|
||||
}
|
||||
|
||||
///Return number of elements in the queue.
|
||||
@property size_t length() const pure @safe nothrow
|
||||
{
|
||||
return length_;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private:
|
||||
|
||||
///Allocate a struct, passing arguments to its constructor or default initializer.
|
||||
T* allocate(T, Args...)(Args args) @system nothrow
|
||||
{
|
||||
T* ptr = cast(T*)malloc(T.sizeof);
|
||||
*ptr = T(args);
|
||||
//The struct might contain references to GC-allocated memory, so tell the GC about it.
|
||||
static if(hasIndirections!T){GC.addRange(cast(void*)ptr, T.sizeof);}
|
||||
return ptr;
|
||||
}
|
||||
|
||||
///Deallocate struct pointed at by specified pointer.
|
||||
void free(T)(T* ptr) @system nothrow
|
||||
{
|
||||
//GC doesn't need to care about any references in this struct anymore.
|
||||
static if(hasIndirections!T){GC.removeRange(cast(void*)ptr);}
|
||||
static if(hasMember!(T, "__dtor")){clear(*ptr);}
|
||||
std.c.stdlib.free(ptr);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
auto queue = Queue!int();
|
||||
assert(queue.empty);
|
||||
foreach(i; 0 .. 65)
|
||||
{
|
||||
queue.push(5);
|
||||
assert(queue.pop() == 5);
|
||||
assert(queue.empty);
|
||||
assert(queue.length_ == 0);
|
||||
}
|
||||
|
||||
int[] array = [1, -1, 2, -2, 3, -3, 4, -4, 5, -5];
|
||||
foreach(i; array)
|
||||
{
|
||||
queue.push(i);
|
||||
}
|
||||
|
||||
array = 42 ~ array[0 .. 3] ~ 42 ~ array[3 .. $] ~ 42;
|
||||
queue.insert(42, 3);
|
||||
queue.insert(42, 0);
|
||||
queue.insert(42, queue.length);
|
||||
|
||||
int[] array2;
|
||||
while(!queue.empty)
|
||||
{
|
||||
array2 ~= queue.pop();
|
||||
}
|
||||
|
||||
assert(array == array2);
|
||||
}
|
702
source/dyaml/reader.d
Normal file
702
source/dyaml/reader.d
Normal file
|
@ -0,0 +1,702 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.reader;
|
||||
|
||||
|
||||
import core.stdc.stdlib;
|
||||
import core.stdc.string;
|
||||
import core.thread;
|
||||
|
||||
import std.algorithm;
|
||||
import std.conv;
|
||||
import std.exception;
|
||||
import std.stdio;
|
||||
import std.stream;
|
||||
import std.string;
|
||||
import std.system;
|
||||
import std.utf;
|
||||
|
||||
import dyaml.fastcharsearch;
|
||||
import dyaml.encoding;
|
||||
import dyaml.exception;
|
||||
|
||||
|
||||
package:
|
||||
|
||||
///Exception thrown at Reader errors.
|
||||
class ReaderException : YAMLException
|
||||
{
|
||||
this(string msg, string file = __FILE__, int line = __LINE__)
|
||||
@safe nothrow
|
||||
{
|
||||
super("Error reading stream: " ~ msg, file, line);
|
||||
}
|
||||
}
|
||||
|
||||
///Lazily reads and decodes data from stream, only storing as much as needed at any moment.
|
||||
final class Reader
|
||||
{
|
||||
private:
|
||||
//Input stream.
|
||||
EndianStream stream_;
|
||||
//Allocated space for buffer_.
|
||||
dchar[] bufferAllocated_ = null;
|
||||
//Buffer of currently loaded characters.
|
||||
dchar[] buffer_ = null;
|
||||
//Current position within buffer. Only data after this position can be read.
|
||||
uint bufferOffset_ = 0;
|
||||
//Index of the current character in the stream.
|
||||
size_t charIndex_ = 0;
|
||||
//Current line in file.
|
||||
uint line_;
|
||||
//Current column in file.
|
||||
uint column_;
|
||||
//Decoder reading data from file and decoding it to UTF-32.
|
||||
UTFFastDecoder decoder_;
|
||||
|
||||
public:
|
||||
/*
|
||||
* Construct an AbstractReader.
|
||||
*
|
||||
* Params: stream = Input stream. Must be readable and seekable.
|
||||
*
|
||||
* Throws: ReaderException if the stream is invalid.
|
||||
*/
|
||||
this(Stream stream) @trusted
|
||||
in
|
||||
{
|
||||
assert(stream.readable && stream.seekable,
|
||||
"Can't read YAML from a stream that is not readable and seekable");
|
||||
}
|
||||
body
|
||||
{
|
||||
stream_ = new EndianStream(stream);
|
||||
decoder_ = UTFFastDecoder(stream_);
|
||||
}
|
||||
|
||||
@trusted nothrow ~this()
|
||||
{
|
||||
//Delete the buffer, if allocated.
|
||||
if(bufferAllocated_ is null){return;}
|
||||
free(bufferAllocated_.ptr);
|
||||
buffer_ = bufferAllocated_ = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get character at specified index relative to current position.
|
||||
*
|
||||
* Params: index = Index of the character to get relative to current position
|
||||
* in the stream.
|
||||
*
|
||||
* Returns: Character at specified position.
|
||||
*
|
||||
* Throws: ReaderException if trying to read past the end of the stream
|
||||
* or if invalid data is read.
|
||||
*/
|
||||
dchar peek(size_t index = 0) @trusted
|
||||
{
|
||||
if(buffer_.length < bufferOffset_ + index + 1)
|
||||
{
|
||||
updateBuffer(index + 1);
|
||||
}
|
||||
|
||||
if(buffer_.length <= bufferOffset_ + index)
|
||||
{
|
||||
throw new ReaderException("Trying to read past the end of the stream");
|
||||
}
|
||||
|
||||
return buffer_[bufferOffset_ + index];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get specified number of characters starting at current position.
|
||||
*
|
||||
* Note: This gets only a "view" into the internal buffer,
|
||||
* which WILL get invalidated after other Reader calls.
|
||||
*
|
||||
* Params: length = Number of characters to get.
|
||||
*
|
||||
* Returns: Characters starting at current position or an empty slice if out of bounds.
|
||||
*/
|
||||
const(dstring) prefix(size_t length) @safe
|
||||
{
|
||||
return slice(0, length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a slice view of the internal buffer.
|
||||
*
|
||||
* Note: This gets only a "view" into the internal buffer,
|
||||
* which WILL get invalidated after other Reader calls.
|
||||
*
|
||||
* Params: start = Start of the slice relative to current position.
|
||||
* end = End of the slice relative to current position.
|
||||
*
|
||||
* Returns: Slice into the internal buffer or an empty slice if out of bounds.
|
||||
*/
|
||||
const(dstring) slice(size_t start, size_t end) @trusted
|
||||
{
|
||||
if(buffer_.length <= bufferOffset_ + end)
|
||||
{
|
||||
updateBuffer(end);
|
||||
}
|
||||
|
||||
end += bufferOffset_;
|
||||
start += bufferOffset_;
|
||||
end = min(buffer_.length, end);
|
||||
|
||||
return end > start ? cast(dstring)buffer_[start .. end] : "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next character, moving stream position beyond it.
|
||||
*
|
||||
* Returns: Next character.
|
||||
*
|
||||
* Throws: ReaderException if trying to read past the end of the stream
|
||||
* or if invalid data is read.
|
||||
*/
|
||||
dchar get() @safe
|
||||
{
|
||||
const result = peek();
|
||||
forward();
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get specified number of characters, moving stream position beyond them.
|
||||
*
|
||||
* Params: length = Number or characters to get.
|
||||
*
|
||||
* Returns: Characters starting at current position.
|
||||
*
|
||||
* Throws: ReaderException if trying to read past the end of the stream
|
||||
* or if invalid data is read.
|
||||
*/
|
||||
dstring get(size_t length) @safe
|
||||
{
|
||||
auto result = prefix(length).idup;
|
||||
forward(length);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Move current position forward.
|
||||
*
|
||||
* Params: length = Number of characters to move position forward.
|
||||
*
|
||||
* Throws: ReaderException if trying to read past the end of the stream
|
||||
* or if invalid data is read.
|
||||
*/
|
||||
void forward(size_t length = 1) @trusted
|
||||
{
|
||||
if(buffer_.length <= bufferOffset_ + length + 1)
|
||||
{
|
||||
updateBuffer(length + 1);
|
||||
}
|
||||
|
||||
mixin FastCharSearch!"\n\u0085\u2028\u2029"d search;
|
||||
|
||||
while(length > 0)
|
||||
{
|
||||
const c = buffer_[bufferOffset_];
|
||||
++bufferOffset_;
|
||||
++charIndex_;
|
||||
//New line.
|
||||
if(search.canFind(c) || (c == '\r' && buffer_[bufferOffset_] != '\n'))
|
||||
{
|
||||
++line_;
|
||||
column_ = 0;
|
||||
}
|
||||
else if(c != '\uFEFF'){++column_;}
|
||||
--length;
|
||||
}
|
||||
}
|
||||
|
||||
///Get a string describing current stream position, used for error messages.
|
||||
@property final Mark mark() const pure @safe nothrow {return Mark(line_, column_);}
|
||||
|
||||
///Get current line number.
|
||||
@property final uint line() const pure @safe nothrow {return line_;}
|
||||
|
||||
///Get current column number.
|
||||
@property final uint column() const pure @safe nothrow {return column_;}
|
||||
|
||||
///Get index of the current character in the stream.
|
||||
@property final size_t charIndex() const pure @safe nothrow {return charIndex_;}
|
||||
|
||||
///Get encoding of the input stream.
|
||||
@property final Encoding encoding() const pure @safe nothrow {return decoder_.encoding;}
|
||||
|
||||
private:
|
||||
/**
|
||||
* Update buffer to be able to read length characters after buffer offset.
|
||||
*
|
||||
* If there are not enough characters in the stream, it will get
|
||||
* as many as possible.
|
||||
*
|
||||
* Params: length = Number of characters we need to read.
|
||||
*
|
||||
* Throws: ReaderException if trying to read past the end of the stream
|
||||
* or if invalid data is read.
|
||||
*/
|
||||
void updateBuffer(in size_t length) @system
|
||||
{
|
||||
//Get rid of unneeded data in the buffer.
|
||||
if(bufferOffset_ > 0)
|
||||
{
|
||||
size_t bufferLength = buffer_.length - bufferOffset_;
|
||||
memmove(buffer_.ptr, buffer_.ptr + bufferOffset_,
|
||||
bufferLength * dchar.sizeof);
|
||||
buffer_ = buffer_[0 .. bufferLength];
|
||||
bufferOffset_ = 0;
|
||||
}
|
||||
|
||||
//Load chars in batches of at most 1024 bytes (256 chars)
|
||||
while(buffer_.length <= bufferOffset_ + length)
|
||||
{
|
||||
loadChars(512);
|
||||
|
||||
if(decoder_.done)
|
||||
{
|
||||
if(buffer_.length == 0 || buffer_[$ - 1] != '\0')
|
||||
{
|
||||
bufferReserve(buffer_.length + 1);
|
||||
buffer_ = bufferAllocated_[0 .. buffer_.length + 1];
|
||||
buffer_[$ - 1] = '\0';
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load more characters to the buffer.
|
||||
*
|
||||
* Params: chars = Recommended number of characters to load.
|
||||
* More characters might be loaded.
|
||||
* Less will be loaded if not enough available.
|
||||
*
|
||||
* Throws: ReaderException on Unicode decoding error,
|
||||
* if nonprintable characters are detected, or
|
||||
* if there is an error reading from the stream.
|
||||
*/
|
||||
void loadChars(size_t chars) @system
|
||||
{
|
||||
const oldLength = buffer_.length;
|
||||
const oldPosition = stream_.position;
|
||||
|
||||
bufferReserve(buffer_.length + chars);
|
||||
buffer_ = bufferAllocated_[0 .. buffer_.length + chars];
|
||||
scope(success)
|
||||
{
|
||||
buffer_ = buffer_[0 .. $ - chars];
|
||||
enforce(printable(buffer_[oldLength .. $]),
|
||||
new ReaderException("Special unicode characters are not allowed"));
|
||||
}
|
||||
|
||||
try for(size_t c = 0; chars && !decoder_.done;)
|
||||
{
|
||||
const slice = decoder_.getDChars(chars);
|
||||
buffer_[oldLength + c .. oldLength + c + slice.length] = slice[];
|
||||
c += slice.length;
|
||||
chars -= slice.length;
|
||||
}
|
||||
catch(Exception e)
|
||||
{
|
||||
handleLoadCharsException(e, oldPosition);
|
||||
}
|
||||
}
|
||||
|
||||
//Handle an exception thrown in loadChars method of any Reader.
|
||||
void handleLoadCharsException(Exception e, ulong oldPosition) @system
|
||||
{
|
||||
try{throw e;}
|
||||
catch(UTFException e)
|
||||
{
|
||||
const position = stream_.position;
|
||||
throw new ReaderException(format("Unicode decoding error between bytes %s and %s : %s",
|
||||
oldPosition, position, e.msg));
|
||||
}
|
||||
catch(ReadException e)
|
||||
{
|
||||
throw new ReaderException(e.msg);
|
||||
}
|
||||
}
|
||||
|
||||
//Code shared by loadEntireFile methods.
|
||||
void loadEntireFile_() @system
|
||||
{
|
||||
const maxChars = decoder_.maxChars;
|
||||
bufferReserve(maxChars + 1);
|
||||
loadChars(maxChars);
|
||||
|
||||
if(buffer_.length == 0 || buffer_[$ - 1] != '\0')
|
||||
{
|
||||
buffer_ = bufferAllocated_[0 .. buffer_.length + 1];
|
||||
buffer_[$ - 1] = '\0';
|
||||
}
|
||||
}
|
||||
|
||||
//Ensure there is space for at least capacity characters in bufferAllocated_.
|
||||
void bufferReserve(in size_t capacity) @system nothrow
|
||||
{
|
||||
if(bufferAllocated_ !is null && bufferAllocated_.length >= capacity){return;}
|
||||
|
||||
//Handle first allocation as well as reallocation.
|
||||
auto ptr = bufferAllocated_ !is null
|
||||
? realloc(bufferAllocated_.ptr, capacity * dchar.sizeof)
|
||||
: malloc(capacity * dchar.sizeof);
|
||||
bufferAllocated_ = (cast(dchar*)ptr)[0 .. capacity];
|
||||
buffer_ = bufferAllocated_[0 .. buffer_.length];
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
alias UTFBlockDecoder!512 UTFFastDecoder;
|
||||
|
||||
///Decodes streams to UTF-32 in blocks.
|
||||
struct UTFBlockDecoder(size_t bufferSize_) if (bufferSize_ % 2 == 0)
|
||||
{
|
||||
private:
|
||||
//UTF-8 codepoint strides (0xFF are codepoints that can't start a sequence).
|
||||
static immutable ubyte[256] utf8Stride =
|
||||
[
|
||||
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
|
||||
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
|
||||
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
|
||||
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
|
||||
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
|
||||
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
|
||||
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
|
||||
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
|
||||
0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,
|
||||
0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,
|
||||
0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,
|
||||
0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,0xFF,
|
||||
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
|
||||
2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,
|
||||
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
|
||||
4,4,4,4,4,4,4,4,5,5,5,5,6,6,0xFF,0xFF,
|
||||
];
|
||||
|
||||
//Encoding of the input stream.
|
||||
Encoding encoding_;
|
||||
//Maximum number of characters that might be in the stream.
|
||||
size_t maxChars_;
|
||||
//Bytes available in the stream.
|
||||
size_t available_;
|
||||
//Input stream.
|
||||
EndianStream stream_;
|
||||
|
||||
//Buffer used to store raw UTF-8 or UTF-16 code points.
|
||||
union
|
||||
{
|
||||
char[bufferSize_] rawBuffer8_;
|
||||
wchar[bufferSize_ / 2] rawBuffer16_;
|
||||
}
|
||||
//Used space (in items) in rawBuffer8_/rawBuffer16_.
|
||||
size_t rawUsed_;
|
||||
|
||||
//Space used by buffer_.
|
||||
dchar[bufferSize_] bufferSpace_;
|
||||
//Buffer of decoded, UTF-32 characters. This is a slice into bufferSpace_.
|
||||
dchar[] buffer_;
|
||||
|
||||
public:
|
||||
///Construct a UTFBlockDecoder decoding a stream.
|
||||
this(EndianStream stream) @system
|
||||
{
|
||||
stream_ = stream;
|
||||
available_ = stream_.available;
|
||||
|
||||
//Handle files short enough not to have a BOM.
|
||||
if(available_ < 2)
|
||||
{
|
||||
encoding_ = Encoding.UTF_8;
|
||||
maxChars_ = 0;
|
||||
|
||||
if(available_ == 1)
|
||||
{
|
||||
bufferSpace_[0] = stream_.getc();
|
||||
buffer_ = bufferSpace_[0 .. 1];
|
||||
maxChars_ = 1;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
char[] rawBuffer8;
|
||||
wchar[] rawBuffer16;
|
||||
//readBOM will determine and set stream endianness.
|
||||
switch(stream_.readBOM(2))
|
||||
{
|
||||
case -1:
|
||||
//readBOM() eats two more bytes in this case so get them back.
|
||||
const wchar bytes = stream_.getcw();
|
||||
rawBuffer8_[0 .. 2] = [cast(ubyte)(bytes % 256), cast(ubyte)(bytes / 256)];
|
||||
rawUsed_ = 2;
|
||||
goto case 0;
|
||||
case 0:
|
||||
maxChars_ = available_;
|
||||
encoding_ = Encoding.UTF_8;
|
||||
break;
|
||||
case 1, 2:
|
||||
maxChars_ = available_ / 2;
|
||||
//readBOM() eats two more bytes in this case so get them back.
|
||||
encoding_ = Encoding.UTF_16;
|
||||
rawBuffer16_[0] = stream_.getcw();
|
||||
rawUsed_ = 1;
|
||||
enforce(available_ % 2 == 0,
|
||||
new ReaderException("Odd byte count in an UTF-16 stream"));
|
||||
break;
|
||||
case 3, 4:
|
||||
maxChars_ = available_ / 4;
|
||||
encoding_ = Encoding.UTF_32;
|
||||
enforce(available_ % 4 == 0,
|
||||
new ReaderException("Byte count in an UTF-32 stream not divisible by 4"));
|
||||
break;
|
||||
default: assert(false, "Unknown UTF BOM");
|
||||
}
|
||||
available_ = stream_.available;
|
||||
}
|
||||
|
||||
///Get maximum number of characters that might be in the stream.
|
||||
@property size_t maxChars() const pure @safe nothrow {return maxChars_;}
|
||||
|
||||
///Get encoding we're decoding from.
|
||||
@property Encoding encoding() const pure @safe nothrow {return encoding_;}
|
||||
|
||||
///Are we done decoding?
|
||||
@property bool done() const pure @safe nothrow
|
||||
{
|
||||
return rawUsed_ == 0 && buffer_.length == 0 && available_ == 0;
|
||||
}
|
||||
|
||||
///Get next character.
|
||||
dchar getDChar() @system
|
||||
{
|
||||
if(buffer_.length)
|
||||
{
|
||||
const result = buffer_[0];
|
||||
buffer_ = buffer_[1 .. $];
|
||||
return result;
|
||||
}
|
||||
|
||||
assert(available_ > 0 || rawUsed_ > 0);
|
||||
updateBuffer();
|
||||
return getDChar();
|
||||
}
|
||||
|
||||
///Get as many characters as possible, but at most maxChars. Slice returned will be invalidated in further calls.
|
||||
const(dchar[]) getDChars(size_t maxChars = size_t.max) @system
|
||||
{
|
||||
if(buffer_.length)
|
||||
{
|
||||
const slice = min(buffer_.length, maxChars);
|
||||
const result = buffer_[0 .. slice];
|
||||
buffer_ = buffer_[slice .. $];
|
||||
return result;
|
||||
}
|
||||
|
||||
assert(available_ > 0 || rawUsed_ > 0);
|
||||
updateBuffer();
|
||||
return getDChars(maxChars);
|
||||
}
|
||||
|
||||
private:
|
||||
//Read and decode characters from file and store them in the buffer.
|
||||
void updateBuffer() @system
|
||||
{
|
||||
assert(buffer_.length == 0);
|
||||
final switch(encoding_)
|
||||
{
|
||||
case Encoding.UTF_8:
|
||||
const bytes = min(bufferSize_ - rawUsed_, available_);
|
||||
//Current length of valid data in rawBuffer8_.
|
||||
const rawLength = rawUsed_ + bytes;
|
||||
stream_.readExact(rawBuffer8_.ptr + rawUsed_, bytes);
|
||||
available_ -= bytes;
|
||||
decodeRawBuffer(rawBuffer8_, rawLength);
|
||||
break;
|
||||
case Encoding.UTF_16:
|
||||
const words = min((bufferSize_ / 2) - rawUsed_, available_ / 2);
|
||||
//Current length of valid data in rawBuffer16_.
|
||||
const rawLength = rawUsed_ + words;
|
||||
foreach(c; rawUsed_ .. rawLength)
|
||||
{
|
||||
stream_.read(rawBuffer16_[c]);
|
||||
available_ -= 2;
|
||||
}
|
||||
decodeRawBuffer(rawBuffer16_, rawLength);
|
||||
break;
|
||||
case Encoding.UTF_32:
|
||||
const chars = min(bufferSize_ / 4, available_ / 4);
|
||||
foreach(c; 0 .. chars)
|
||||
{
|
||||
stream_.read(bufferSpace_[c]);
|
||||
available_ -= 4;
|
||||
}
|
||||
buffer_ = bufferSpace_[0 .. chars];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
//Decode contents of a UTF-8 or UTF-16 raw buffer.
|
||||
void decodeRawBuffer(C)(C[] buffer, const size_t length) pure @system
|
||||
{
|
||||
//End of part of rawBuffer8_ that contains
|
||||
//complete characters and can be decoded.
|
||||
const end = endOfLastUTFSequence(buffer, length);
|
||||
//If end is 0, there are no full UTF-8 chars.
|
||||
//This can happen at the end of file if there is an incomplete UTF-8 sequence.
|
||||
enforce(end > 0,
|
||||
new ReaderException("Invalid UTF-8 character at the end of stream"));
|
||||
|
||||
decodeUTF(buffer[0 .. end]);
|
||||
|
||||
//After decoding, any code points not decoded go to the start of raw buffer.
|
||||
rawUsed_ = length - end;
|
||||
foreach(i; 0 .. rawUsed_){buffer[i] = buffer[i + end];}
|
||||
}
|
||||
|
||||
//Determine the end of last UTF-8 or UTF-16 sequence in a raw buffer.
|
||||
size_t endOfLastUTFSequence(C)(const C[] buffer, const size_t max)
|
||||
pure @system nothrow
|
||||
{
|
||||
static if(is(C == char))
|
||||
{
|
||||
for(long end = max - 1; end >= 0; --end)
|
||||
{
|
||||
const s = utf8Stride[buffer[cast(size_t)end]];
|
||||
if(s != 0xFF)
|
||||
{
|
||||
//If stride goes beyond end of the buffer (max), return end.
|
||||
//Otherwise the last sequence ends at max, so we can return that.
|
||||
//(Unless there is an invalid code point, which is
|
||||
//caught at decoding)
|
||||
return (s > max - end) ? cast(size_t)end : max;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
size_t end = 0;
|
||||
while(end < max)
|
||||
{
|
||||
const s = stride(buffer, end);
|
||||
if(s + end > max){break;}
|
||||
end += s;
|
||||
}
|
||||
return end;
|
||||
}
|
||||
}
|
||||
|
||||
//Decode a UTF-8 or UTF-16 buffer (with no incomplete sequences at the end).
|
||||
void decodeUTF(C)(const C[] source) pure @system
|
||||
{
|
||||
size_t bufpos = 0;
|
||||
const srclength = source.length;
|
||||
for(size_t srcpos = 0; srcpos < srclength;)
|
||||
{
|
||||
const c = source[srcpos];
|
||||
if(c < 0x80)
|
||||
{
|
||||
bufferSpace_[bufpos++] = c;
|
||||
++srcpos;
|
||||
}
|
||||
else
|
||||
{
|
||||
bufferSpace_[bufpos++] = decode(source, srcpos);
|
||||
}
|
||||
}
|
||||
buffer_ = bufferSpace_[0 .. bufpos];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if all characters in an array are printable.
|
||||
*
|
||||
* Params: chars = Characters to check.
|
||||
*
|
||||
* Returns: True if all the characters are printable, false otherwise.
|
||||
*/
|
||||
bool printable(const dchar[] chars) pure @safe nothrow
|
||||
{
|
||||
foreach(c; chars)
|
||||
{
|
||||
if(!((c == 0x09 || c == 0x0A || c == 0x0D || c == 0x85) ||
|
||||
(c >= 0x20 && c <= 0x7E) ||
|
||||
(c >= 0xA0 && c <= '\uD7FF') ||
|
||||
(c >= '\uE000' && c <= '\uFFFD')))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
//Unittests.
|
||||
|
||||
void testEndian(R)()
|
||||
{
|
||||
writeln(typeid(R).toString() ~ ": endian unittest");
|
||||
void endian_test(ubyte[] data, Encoding encoding_expected, Endian endian_expected)
|
||||
{
|
||||
Reader reader = new R(new MemoryStream(data));
|
||||
assert(reader.encoding == encoding_expected);
|
||||
assert(reader.stream_.endian == endian_expected);
|
||||
}
|
||||
ubyte[] little_endian_utf_16 = [0xFF, 0xFE, 0x7A, 0x00];
|
||||
ubyte[] big_endian_utf_16 = [0xFE, 0xFF, 0x00, 0x7A];
|
||||
endian_test(little_endian_utf_16, Encoding.UTF_16, Endian.littleEndian);
|
||||
endian_test(big_endian_utf_16, Encoding.UTF_16, Endian.bigEndian);
|
||||
}
|
||||
|
||||
void testPeekPrefixForward(R)()
|
||||
{
|
||||
writeln(typeid(R).toString() ~ ": peek/prefix/forward unittest");
|
||||
ubyte[] data = ByteOrderMarks[BOM.UTF8] ~ cast(ubyte[])"data";
|
||||
Reader reader = new R(new MemoryStream(data));
|
||||
assert(reader.peek() == 'd');
|
||||
assert(reader.peek(1) == 'a');
|
||||
assert(reader.peek(2) == 't');
|
||||
assert(reader.peek(3) == 'a');
|
||||
assert(reader.peek(4) == '\0');
|
||||
assert(reader.prefix(4) == "data");
|
||||
assert(reader.prefix(6) == "data\0");
|
||||
reader.forward(2);
|
||||
assert(reader.peek(1) == 'a');
|
||||
assert(collectException(reader.peek(3)));
|
||||
}
|
||||
|
||||
void testUTF(R)()
|
||||
{
|
||||
writeln(typeid(R).toString() ~ ": UTF formats unittest");
|
||||
dchar[] data = cast(dchar[])"data";
|
||||
void utf_test(T)(T[] data, BOM bom)
|
||||
{
|
||||
ubyte[] bytes = ByteOrderMarks[bom] ~
|
||||
(cast(ubyte*)data.ptr)[0 .. data.length * T.sizeof];
|
||||
Reader reader = new R(new MemoryStream(bytes));
|
||||
assert(reader.peek() == 'd');
|
||||
assert(reader.peek(1) == 'a');
|
||||
assert(reader.peek(2) == 't');
|
||||
assert(reader.peek(3) == 'a');
|
||||
}
|
||||
utf_test!char(to!(char[])(data), BOM.UTF8);
|
||||
utf_test!wchar(to!(wchar[])(data), endian == Endian.bigEndian ? BOM.UTF16BE : BOM.UTF16LE);
|
||||
utf_test(data, endian == Endian.bigEndian ? BOM.UTF32BE : BOM.UTF32LE);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
testEndian!Reader();
|
||||
testPeekPrefixForward!Reader();
|
||||
testUTF!Reader();
|
||||
}
|
700
source/dyaml/representer.d
Normal file
700
source/dyaml/representer.d
Normal file
|
@ -0,0 +1,700 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* YAML node _representer. Prepares YAML nodes for output. A tutorial can be
|
||||
* found $(LINK2 ../tutorials/custom_types.html, here).
|
||||
*
|
||||
* Code based on $(LINK2 http://www.pyyaml.org, PyYAML).
|
||||
*/
|
||||
module dyaml.representer;
|
||||
|
||||
|
||||
import std.algorithm;
|
||||
import std.array;
|
||||
import std.base64;
|
||||
import std.container;
|
||||
import std.conv;
|
||||
import std.datetime;
|
||||
import std.exception;
|
||||
import std.format;
|
||||
import std.math;
|
||||
import std.stream;
|
||||
import std.typecons;
|
||||
import std.string;
|
||||
|
||||
import dyaml.exception;
|
||||
import dyaml.node;
|
||||
import dyaml.serializer;
|
||||
import dyaml.style;
|
||||
import dyaml.tag;
|
||||
|
||||
|
||||
///Exception thrown on Representer errors.
|
||||
class RepresenterException : YAMLException
|
||||
{
|
||||
mixin ExceptionCtors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents YAML nodes as scalar, sequence and mapping nodes ready for output.
|
||||
*
|
||||
* This class is used to add support for dumping of custom data types.
|
||||
*
|
||||
* It can also override default node formatting styles for output.
|
||||
*/
|
||||
final class Representer
|
||||
{
|
||||
private:
|
||||
///Representer functions indexed by types.
|
||||
Node function(ref Node, Representer)[TypeInfo] representers_;
|
||||
///Default style for scalar nodes.
|
||||
ScalarStyle defaultScalarStyle_ = ScalarStyle.Invalid;
|
||||
///Default style for collection nodes.
|
||||
CollectionStyle defaultCollectionStyle_ = CollectionStyle.Invalid;
|
||||
|
||||
public:
|
||||
@disable bool opEquals(ref Representer);
|
||||
@disable int opCmp(ref Representer);
|
||||
|
||||
/**
|
||||
* Construct a Representer.
|
||||
*
|
||||
* Params: useDefaultRepresenters = Use default representer functions
|
||||
* for default YAML types? This can be
|
||||
* disabled to use custom representer
|
||||
* functions for default types.
|
||||
*/
|
||||
this(const Flag!"useDefaultRepresenters" useDefaultRepresenters = Yes.useDefaultRepresenters)
|
||||
@safe
|
||||
{
|
||||
if(!useDefaultRepresenters){return;}
|
||||
addRepresenter!YAMLNull(&representNull);
|
||||
addRepresenter!string(&representString);
|
||||
addRepresenter!(ubyte[])(&representBytes);
|
||||
addRepresenter!bool(&representBool);
|
||||
addRepresenter!long(&representLong);
|
||||
addRepresenter!real(&representReal);
|
||||
addRepresenter!(Node[])(&representNodes);
|
||||
addRepresenter!(Node.Pair[])(&representPairs);
|
||||
addRepresenter!SysTime(&representSysTime);
|
||||
}
|
||||
|
||||
///Destroy the Representer.
|
||||
pure @safe nothrow ~this()
|
||||
{
|
||||
clear(representers_);
|
||||
representers_ = null;
|
||||
}
|
||||
|
||||
///Set default _style for scalars. If style is $(D ScalarStyle.Invalid), the _style is chosen automatically.
|
||||
@property void defaultScalarStyle(ScalarStyle style) pure @safe nothrow
|
||||
{
|
||||
defaultScalarStyle_ = style;
|
||||
}
|
||||
|
||||
///Set default _style for collections. If style is $(D CollectionStyle.Invalid), the _style is chosen automatically.
|
||||
@property void defaultCollectionStyle(CollectionStyle style) pure @safe nothrow
|
||||
{
|
||||
defaultCollectionStyle_ = style;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a function to represent nodes with a specific data type.
|
||||
*
|
||||
* The representer function takes references to a $(D Node) storing the data
|
||||
* type and to the $(D Representer). It returns the represented node and may
|
||||
* throw a $(D RepresenterException). See the example for more information.
|
||||
*
|
||||
*
|
||||
* Only one function may be specified for one data type. Default data
|
||||
* types already have representer functions unless disabled in the
|
||||
* $(D Representer) constructor.
|
||||
*
|
||||
*
|
||||
* Structs and classes must implement the $(D opCmp()) operator for D:YAML
|
||||
* support. The signature of the operator that must be implemented
|
||||
* is $(D const int opCmp(ref const MyStruct s)) for structs where
|
||||
* $(I MyStruct) is the struct type, and $(D int opCmp(Object o)) for
|
||||
* classes. Note that the class $(D opCmp()) should not alter the compared
|
||||
* values - it is not const for compatibility reasons.
|
||||
*
|
||||
* Params: representer = Representer function to add.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* Representing a simple struct:
|
||||
* --------------------
|
||||
* import std.string;
|
||||
*
|
||||
* import yaml;
|
||||
*
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
*
|
||||
* //Any D:YAML type must have a custom opCmp operator.
|
||||
* //This is used for ordering in mappings.
|
||||
* const int opCmp(ref const MyStruct s)
|
||||
* {
|
||||
* if(x != s.x){return x - s.x;}
|
||||
* if(y != s.y){return y - s.y;}
|
||||
* if(z != s.z){return z - s.z;}
|
||||
* return 0;
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* Node representMyStruct(ref Node node, Representer representer)
|
||||
* {
|
||||
* //The node is guaranteed to be MyStruct as we add representer for MyStruct.
|
||||
* auto value = node.as!MyStruct;
|
||||
* //Using custom scalar format, x:y:z.
|
||||
* auto scalar = format("%s:%s:%s", value.x, value.y, value.z);
|
||||
* //Representing as a scalar, with custom tag to specify this data type.
|
||||
* return representer.representScalar("!mystruct.tag", scalar);
|
||||
* }
|
||||
*
|
||||
* void main()
|
||||
* {
|
||||
* auto dumper = Dumper("file.yaml");
|
||||
* auto representer = new Representer;
|
||||
* representer.addRepresenter!MyStruct(&representMyStruct);
|
||||
* dumper.representer = representer;
|
||||
* dumper.dump(Node(MyStruct(1,2,3)));
|
||||
* }
|
||||
* --------------------
|
||||
*
|
||||
* Representing a class:
|
||||
* --------------------
|
||||
* import std.string;
|
||||
*
|
||||
* import yaml;
|
||||
*
|
||||
* class MyClass
|
||||
* {
|
||||
* int x, y, z;
|
||||
*
|
||||
* this(int x, int y, int z)
|
||||
* {
|
||||
* this.x = x;
|
||||
* this.y = y;
|
||||
* this.z = z;
|
||||
* }
|
||||
*
|
||||
* //Any D:YAML type must have a custom opCmp operator.
|
||||
* //This is used for ordering in mappings.
|
||||
* override int opCmp(Object o)
|
||||
* {
|
||||
* MyClass s = cast(MyClass)o;
|
||||
* if(s is null){return -1;}
|
||||
* if(x != s.x){return x - s.x;}
|
||||
* if(y != s.y){return y - s.y;}
|
||||
* if(z != s.z){return z - s.z;}
|
||||
* return 0;
|
||||
* }
|
||||
*
|
||||
* ///Useful for Node.as!string .
|
||||
* override string toString()
|
||||
* {
|
||||
* return format("MyClass(%s, %s, %s)", x, y, z);
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* //Same as representMyStruct.
|
||||
* Node representMyClass(ref Node node, Representer representer)
|
||||
* {
|
||||
* //The node is guaranteed to be MyClass as we add representer for MyClass.
|
||||
* auto value = node.as!MyClass;
|
||||
* //Using custom scalar format, x:y:z.
|
||||
* auto scalar = format("%s:%s:%s", value.x, value.y, value.z);
|
||||
* //Representing as a scalar, with custom tag to specify this data type.
|
||||
* return representer.representScalar("!myclass.tag", scalar);
|
||||
* }
|
||||
*
|
||||
* void main()
|
||||
* {
|
||||
* auto dumper = Dumper("file.yaml");
|
||||
* auto representer = new Representer;
|
||||
* representer.addRepresenter!MyClass(&representMyClass);
|
||||
* dumper.representer = representer;
|
||||
* dumper.dump(Node(new MyClass(1,2,3)));
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
void addRepresenter(T)(Node function(ref Node, Representer) representer) @trusted
|
||||
{
|
||||
assert((typeid(T) in representers_) is null,
|
||||
"Representer function for data type " ~ typeid(T).toString() ~
|
||||
" already specified. Can't specify another one");
|
||||
representers_[typeid(T)] = representer;
|
||||
}
|
||||
|
||||
//If profiling shows a bottleneck on tag construction in these 3 methods,
|
||||
//we'll need to take Tag directly and have string based wrappers for
|
||||
//user code.
|
||||
|
||||
/**
|
||||
* Represent a _scalar with specified _tag.
|
||||
*
|
||||
* This is used by representer functions that produce scalars.
|
||||
*
|
||||
* Params: tag = Tag of the _scalar.
|
||||
* scalar = Scalar value.
|
||||
* style = Style of the _scalar. If invalid, default _style will be used.
|
||||
* If the node was loaded before, previous _style will always be used.
|
||||
*
|
||||
* Returns: The represented node.
|
||||
*
|
||||
* Example:
|
||||
* --------------------
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
*
|
||||
* //Any D:YAML type must have a custom opCmp operator.
|
||||
* //This is used for ordering in mappings.
|
||||
* const int opCmp(ref const MyStruct s)
|
||||
* {
|
||||
* if(x != s.x){return x - s.x;}
|
||||
* if(y != s.y){return y - s.y;}
|
||||
* if(z != s.z){return z - s.z;}
|
||||
* return 0;
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* Node representMyStruct(ref Node node, Representer representer)
|
||||
* {
|
||||
* auto value = node.as!MyStruct;
|
||||
* auto scalar = format("%s:%s:%s", value.x, value.y, value.z);
|
||||
* return representer.representScalar("!mystruct.tag", scalar);
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
Node representScalar(string tag, string scalar,
|
||||
ScalarStyle style = ScalarStyle.Invalid) @safe
|
||||
{
|
||||
if(style == ScalarStyle.Invalid){style = defaultScalarStyle_;}
|
||||
return Node.rawNode(Node.Value(scalar), Mark(), Tag(tag), style,
|
||||
CollectionStyle.Invalid);
|
||||
}
|
||||
|
||||
/**
|
||||
* Represent a _sequence with specified _tag, representing children first.
|
||||
*
|
||||
* This is used by representer functions that produce sequences.
|
||||
*
|
||||
* Params: tag = Tag of the _sequence.
|
||||
* sequence = Sequence of nodes.
|
||||
* style = Style of the _sequence. If invalid, default _style will be used.
|
||||
* If the node was loaded before, previous _style will always be used.
|
||||
*
|
||||
* Returns: The represented node.
|
||||
*
|
||||
* Throws: $(D RepresenterException) if a child could not be represented.
|
||||
*
|
||||
* Example:
|
||||
* --------------------
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
*
|
||||
* //Any D:YAML type must have a custom opCmp operator.
|
||||
* //This is used for ordering in mappings.
|
||||
* const int opCmp(ref const MyStruct s)
|
||||
* {
|
||||
* if(x != s.x){return x - s.x;}
|
||||
* if(y != s.y){return y - s.y;}
|
||||
* if(z != s.z){return z - s.z;}
|
||||
* return 0;
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* Node representMyStruct(ref Node node, Representer representer)
|
||||
* {
|
||||
* auto value = node.as!MyStruct;
|
||||
* auto nodes = [Node(value.x), Node(value.y), Node(value.z)];
|
||||
* //use flow style
|
||||
* return representer.representSequence("!mystruct.tag", nodes,
|
||||
* CollectionStyle.Flow);
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
Node representSequence(string tag, Node[] sequence,
|
||||
CollectionStyle style = CollectionStyle.Invalid) @trusted
|
||||
{
|
||||
Node[] value;
|
||||
value.length = sequence.length;
|
||||
|
||||
auto bestStyle = CollectionStyle.Flow;
|
||||
foreach(idx, ref item; sequence)
|
||||
{
|
||||
value[idx] = representData(item);
|
||||
const isScalar = value[idx].isScalar;
|
||||
const s = value[idx].scalarStyle;
|
||||
if(!isScalar || (s != ScalarStyle.Invalid && s != ScalarStyle.Plain))
|
||||
{
|
||||
bestStyle = CollectionStyle.Block;
|
||||
}
|
||||
}
|
||||
|
||||
if(style == CollectionStyle.Invalid)
|
||||
{
|
||||
style = defaultCollectionStyle_ != CollectionStyle.Invalid
|
||||
? defaultCollectionStyle_
|
||||
: bestStyle;
|
||||
}
|
||||
return Node.rawNode(Node.Value(value), Mark(), Tag(tag),
|
||||
ScalarStyle.Invalid, style);
|
||||
}
|
||||
|
||||
/**
|
||||
* Represent a mapping with specified _tag, representing children first.
|
||||
*
|
||||
* This is used by representer functions that produce mappings.
|
||||
*
|
||||
* Params: tag = Tag of the mapping.
|
||||
* pairs = Key-value _pairs of the mapping.
|
||||
* style = Style of the mapping. If invalid, default _style will be used.
|
||||
* If the node was loaded before, previous _style will always be used.
|
||||
*
|
||||
* Returns: The represented node.
|
||||
*
|
||||
* Throws: $(D RepresenterException) if a child could not be represented.
|
||||
*
|
||||
* Example:
|
||||
* --------------------
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
*
|
||||
* //Any D:YAML type must have a custom opCmp operator.
|
||||
* //This is used for ordering in mappings.
|
||||
* const int opCmp(ref const MyStruct s)
|
||||
* {
|
||||
* if(x != s.x){return x - s.x;}
|
||||
* if(y != s.y){return y - s.y;}
|
||||
* if(z != s.z){return z - s.z;}
|
||||
* return 0;
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* Node representMyStruct(ref Node node, Representer representer)
|
||||
* {
|
||||
* auto value = node.as!MyStruct;
|
||||
* auto pairs = [Node.Pair("x", value.x),
|
||||
* Node.Pair("y", value.y),
|
||||
* Node.Pair("z", value.z)];
|
||||
* return representer.representMapping("!mystruct.tag", pairs);
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
Node representMapping(string tag, Node.Pair[] pairs,
|
||||
CollectionStyle style = CollectionStyle.Invalid) @trusted
|
||||
{
|
||||
Node.Pair[] value;
|
||||
value.length = pairs.length;
|
||||
|
||||
auto bestStyle = CollectionStyle.Flow;
|
||||
foreach(idx, ref pair; pairs)
|
||||
{
|
||||
value[idx] = Node.Pair(representData(pair.key), representData(pair.value));
|
||||
const keyScalar = value[idx].key.isScalar;
|
||||
const valScalar = value[idx].value.isScalar;
|
||||
const keyStyle = value[idx].key.scalarStyle;
|
||||
const valStyle = value[idx].value.scalarStyle;
|
||||
if(!keyScalar ||
|
||||
(keyStyle != ScalarStyle.Invalid && keyStyle != ScalarStyle.Plain))
|
||||
{
|
||||
bestStyle = CollectionStyle.Block;
|
||||
}
|
||||
if(!valScalar ||
|
||||
(valStyle != ScalarStyle.Invalid && valStyle != ScalarStyle.Plain))
|
||||
{
|
||||
bestStyle = CollectionStyle.Block;
|
||||
}
|
||||
}
|
||||
|
||||
if(style == CollectionStyle.Invalid)
|
||||
{
|
||||
style = defaultCollectionStyle_ != CollectionStyle.Invalid
|
||||
? defaultCollectionStyle_
|
||||
: bestStyle;
|
||||
}
|
||||
return Node.rawNode(Node.Value(value), Mark(), Tag(tag),
|
||||
ScalarStyle.Invalid, style);
|
||||
}
|
||||
|
||||
package:
|
||||
//Represent a node based on its type, and return the represented result.
|
||||
Node representData(ref Node data) @system
|
||||
{
|
||||
//User types are wrapped in YAMLObject.
|
||||
auto type = data.isUserType ? data.as!YAMLObject.type : data.type;
|
||||
|
||||
enforce((type in representers_) !is null,
|
||||
new RepresenterException("No representer function for type "
|
||||
~ type.toString() ~ " , cannot represent."));
|
||||
Node result = representers_[type](data, this);
|
||||
|
||||
//Override tag if specified.
|
||||
if(!data.tag_.isNull()){result.tag_ = data.tag_;}
|
||||
|
||||
//Remember style if this was loaded before.
|
||||
if(data.scalarStyle != ScalarStyle.Invalid)
|
||||
{
|
||||
result.scalarStyle = data.scalarStyle;
|
||||
}
|
||||
if(data.collectionStyle != CollectionStyle.Invalid)
|
||||
{
|
||||
result.collectionStyle = data.collectionStyle;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
//Represent a node, serializing with specified Serializer.
|
||||
void represent(ref Serializer serializer, ref Node node) @trusted
|
||||
{
|
||||
auto data = representData(node);
|
||||
serializer.serialize(data);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
///Represent a _null _node as a _null YAML value.
|
||||
Node representNull(ref Node node, Representer representer) @safe
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:null", "null");
|
||||
}
|
||||
|
||||
///Represent a string _node as a string scalar.
|
||||
Node representString(ref Node node, Representer representer) @safe
|
||||
{
|
||||
string value = node.as!string;
|
||||
return value is null
|
||||
? representNull(node, representer)
|
||||
: representer.representScalar("tag:yaml.org,2002:str", value);
|
||||
}
|
||||
|
||||
///Represent a bytes _node as a binary scalar.
|
||||
Node representBytes(ref Node node, Representer representer) @system
|
||||
{
|
||||
const ubyte[] value = node.as!(ubyte[]);
|
||||
if(value is null){return representNull(node, representer);}
|
||||
return representer.representScalar("tag:yaml.org,2002:binary",
|
||||
cast(string)Base64.encode(value),
|
||||
ScalarStyle.Literal);
|
||||
}
|
||||
|
||||
///Represent a bool _node as a bool scalar.
|
||||
Node representBool(ref Node node, Representer representer) @safe
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:bool",
|
||||
node.as!bool ? "true" : "false");
|
||||
}
|
||||
|
||||
///Represent a long _node as an integer scalar.
|
||||
Node representLong(ref Node node, Representer representer) @system
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:int",
|
||||
to!string(node.as!long));
|
||||
}
|
||||
|
||||
///Represent a real _node as a floating point scalar.
|
||||
Node representReal(ref Node node, Representer representer) @system
|
||||
{
|
||||
real f = node.as!real;
|
||||
string value = isNaN(f) ? ".nan":
|
||||
f == real.infinity ? ".inf":
|
||||
f == -1.0 * real.infinity ? "-.inf":
|
||||
{auto a = appender!string;
|
||||
formattedWrite(a, "%12f", f);
|
||||
return a.data.strip();}();
|
||||
|
||||
return representer.representScalar("tag:yaml.org,2002:float", value);
|
||||
}
|
||||
|
||||
///Represent a SysTime _node as a timestamp.
|
||||
Node representSysTime(ref Node node, Representer representer) @system
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:timestamp",
|
||||
node.as!SysTime.toISOExtString());
|
||||
}
|
||||
|
||||
///Represent a sequence _node as sequence/set.
|
||||
Node representNodes(ref Node node, Representer representer) @safe
|
||||
{
|
||||
auto nodes = node.as!(Node[]);
|
||||
if(node.tag_ == Tag("tag:yaml.org,2002:set"))
|
||||
{
|
||||
///YAML sets are mapping with null values.
|
||||
Node.Pair[] pairs;
|
||||
pairs.length = nodes.length;
|
||||
Node dummy;
|
||||
foreach(idx, ref key; nodes)
|
||||
{
|
||||
pairs[idx] = Node.Pair(key, representNull(dummy, representer));
|
||||
}
|
||||
return representer.representMapping(node.tag_.get, pairs);
|
||||
}
|
||||
else
|
||||
{
|
||||
return representer.representSequence("tag:yaml.org,2002:seq", nodes);
|
||||
}
|
||||
}
|
||||
|
||||
///Represent a mapping _node as map/ordered map/pairs.
|
||||
Node representPairs(ref Node node, Representer representer) @system
|
||||
{
|
||||
auto pairs = node.as!(Node.Pair[]);
|
||||
|
||||
bool hasDuplicates(Node.Pair[] pairs)
|
||||
{
|
||||
//TODO this should be replaced by something with deterministic memory allocation.
|
||||
auto keys = redBlackTree!Node();
|
||||
scope(exit){clear(keys);}
|
||||
foreach(ref pair; pairs)
|
||||
{
|
||||
if(pair.key in keys){return true;}
|
||||
keys.insert(pair.key);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
Node[] mapToSequence(Node.Pair[] pairs)
|
||||
{
|
||||
Node[] nodes;
|
||||
nodes.length = pairs.length;
|
||||
foreach(idx, ref pair; pairs)
|
||||
{
|
||||
nodes[idx] = representer.representMapping("tag:yaml.org,2002:map", [pair]);
|
||||
}
|
||||
return nodes;
|
||||
}
|
||||
|
||||
if(node.tag_ == Tag("tag:yaml.org,2002:omap"))
|
||||
{
|
||||
enforce(!hasDuplicates(pairs),
|
||||
new RepresenterException("Duplicate entry in an ordered map"));
|
||||
return representer.representSequence(node.tag_.get, mapToSequence(pairs));
|
||||
}
|
||||
else if(node.tag_ == Tag("tag:yaml.org,2002:pairs"))
|
||||
{
|
||||
return representer.representSequence(node.tag_.get, mapToSequence(pairs));
|
||||
}
|
||||
else
|
||||
{
|
||||
enforce(!hasDuplicates(pairs),
|
||||
new RepresenterException("Duplicate entry in an unordered map"));
|
||||
return representer.representMapping("tag:yaml.org,2002:map", pairs);
|
||||
}
|
||||
}
|
||||
|
||||
//Unittests
|
||||
//These should really all be encapsulated in unittests.
|
||||
private:
|
||||
|
||||
import dyaml.dumper;
|
||||
|
||||
struct MyStruct
|
||||
{
|
||||
int x, y, z;
|
||||
|
||||
const int opCmp(ref const MyStruct s) const pure @safe nothrow
|
||||
{
|
||||
if(x != s.x){return x - s.x;}
|
||||
if(y != s.y){return y - s.y;}
|
||||
if(z != s.z){return z - s.z;}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
Node representMyStruct(ref Node node, Representer representer) @system
|
||||
{
|
||||
//The node is guaranteed to be MyStruct as we add representer for MyStruct.
|
||||
auto value = node.as!MyStruct;
|
||||
//Using custom scalar format, x:y:z.
|
||||
auto scalar = format("%s:%s:%s", value.x, value.y, value.z);
|
||||
//Representing as a scalar, with custom tag to specify this data type.
|
||||
return representer.representScalar("!mystruct.tag", scalar);
|
||||
}
|
||||
|
||||
Node representMyStructSeq(ref Node node, Representer representer) @safe
|
||||
{
|
||||
auto value = node.as!MyStruct;
|
||||
auto nodes = [Node(value.x), Node(value.y), Node(value.z)];
|
||||
return representer.representSequence("!mystruct.tag", nodes);
|
||||
}
|
||||
|
||||
Node representMyStructMap(ref Node node, Representer representer) @safe
|
||||
{
|
||||
auto value = node.as!MyStruct;
|
||||
auto pairs = [Node.Pair("x", value.x),
|
||||
Node.Pair("y", value.y),
|
||||
Node.Pair("z", value.z)];
|
||||
return representer.representMapping("!mystruct.tag", pairs);
|
||||
}
|
||||
|
||||
class MyClass
|
||||
{
|
||||
int x, y, z;
|
||||
|
||||
this(int x, int y, int z) pure @safe nothrow
|
||||
{
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.z = z;
|
||||
}
|
||||
|
||||
override int opCmp(Object o) pure @safe nothrow
|
||||
{
|
||||
MyClass s = cast(MyClass)o;
|
||||
if(s is null){return -1;}
|
||||
if(x != s.x){return x - s.x;}
|
||||
if(y != s.y){return y - s.y;}
|
||||
if(z != s.z){return z - s.z;}
|
||||
return 0;
|
||||
}
|
||||
|
||||
///Useful for Node.as!string .
|
||||
override string toString() @trusted
|
||||
{
|
||||
return format("MyClass(%s, %s, %s)", x, y, z);
|
||||
}
|
||||
}
|
||||
|
||||
//Same as representMyStruct.
|
||||
Node representMyClass(ref Node node, Representer representer) @system
|
||||
{
|
||||
//The node is guaranteed to be MyClass as we add representer for MyClass.
|
||||
auto value = node.as!MyClass;
|
||||
//Using custom scalar format, x:y:z.
|
||||
auto scalar = format("%s:%s:%s", value.x, value.y, value.z);
|
||||
//Representing as a scalar, with custom tag to specify this data type.
|
||||
return representer.representScalar("!myclass.tag", scalar);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
foreach(r; [&representMyStruct,
|
||||
&representMyStructSeq,
|
||||
&representMyStructMap])
|
||||
{
|
||||
auto dumper = Dumper(new MemoryStream());
|
||||
auto representer = new Representer;
|
||||
representer.addRepresenter!MyStruct(r);
|
||||
dumper.representer = representer;
|
||||
dumper.dump(Node(MyStruct(1,2,3)));
|
||||
}
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
auto dumper = Dumper(new MemoryStream());
|
||||
auto representer = new Representer;
|
||||
representer.addRepresenter!MyClass(&representMyClass);
|
||||
dumper.representer = representer;
|
||||
dumper.dump(Node(new MyClass(1,2,3)));
|
||||
}
|
265
source/dyaml/resolver.d
Normal file
265
source/dyaml/resolver.d
Normal file
|
@ -0,0 +1,265 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* Implements a class that resolves YAML tags. This can be used to implicitly
|
||||
* resolve tags for custom data types, removing the need to explicitly
|
||||
* specify tags in YAML. A tutorial can be found
|
||||
* $(LINK2 ../tutorials/custom_types.html, here).
|
||||
*
|
||||
* Code based on $(LINK2 http://www.pyyaml.org, PyYAML).
|
||||
*/
|
||||
module dyaml.resolver;
|
||||
|
||||
|
||||
import std.conv;
|
||||
import std.regex;
|
||||
import std.stdio;
|
||||
import std.typecons;
|
||||
import std.utf;
|
||||
|
||||
import dyaml.node;
|
||||
import dyaml.exception;
|
||||
import dyaml.tag;
|
||||
|
||||
|
||||
/**
|
||||
* Resolves YAML tags (data types).
|
||||
*
|
||||
* Can be used to implicitly resolve custom data types of scalar values.
|
||||
*/
|
||||
final class Resolver
|
||||
{
|
||||
private:
|
||||
///Default tag to use for scalars.
|
||||
Tag defaultScalarTag_;
|
||||
///Default tag to use for sequences.
|
||||
Tag defaultSequenceTag_;
|
||||
///Default tag to use for mappings.
|
||||
Tag defaultMappingTag_;
|
||||
|
||||
/**
|
||||
* Arrays of scalar resolver tuples indexed by starting character of a scalar.
|
||||
*
|
||||
* Each tuple stores regular expression the scalar must match,
|
||||
* and tag to assign to it if it matches.
|
||||
*/
|
||||
Tuple!(Tag, Regex!char)[][dchar] yamlImplicitResolvers_;
|
||||
|
||||
public:
|
||||
@disable bool opEquals(ref Resolver);
|
||||
@disable int opCmp(ref Resolver);
|
||||
|
||||
/**
|
||||
* Construct a Resolver.
|
||||
*
|
||||
* If you don't want to implicitly resolve default YAML tags/data types,
|
||||
* you can use defaultImplicitResolvers to disable default resolvers.
|
||||
*
|
||||
* Params: defaultImplicitResolvers = Use default YAML implicit resolvers?
|
||||
*/
|
||||
this(Flag!"useDefaultImplicitResolvers" defaultImplicitResolvers = Yes.useDefaultImplicitResolvers)
|
||||
@safe
|
||||
{
|
||||
defaultScalarTag_ = Tag("tag:yaml.org,2002:str");
|
||||
defaultSequenceTag_ = Tag("tag:yaml.org,2002:seq");
|
||||
defaultMappingTag_ = Tag("tag:yaml.org,2002:map");
|
||||
if(defaultImplicitResolvers){addImplicitResolvers();}
|
||||
}
|
||||
|
||||
///Destroy the Resolver.
|
||||
pure @safe nothrow ~this()
|
||||
{
|
||||
clear(yamlImplicitResolvers_);
|
||||
yamlImplicitResolvers_ = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an implicit scalar resolver.
|
||||
*
|
||||
* If a scalar matches regexp and starts with any character in first,
|
||||
* its _tag is set to tag. If it matches more than one resolver _regexp
|
||||
* resolvers added _first override ones added later. Default resolvers
|
||||
* override any user specified resolvers, but they can be disabled in
|
||||
* Resolver constructor.
|
||||
*
|
||||
* If a scalar is not resolved to anything, it is assigned the default
|
||||
* YAML _tag for strings.
|
||||
*
|
||||
* Params: tag = Tag to resolve to.
|
||||
* regexp = Regular expression the scalar must match to have this _tag.
|
||||
* first = String of possible starting characters of the scalar.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* Resolve scalars starting with 'A' to !_tag :
|
||||
* --------------------
|
||||
* import std.regex;
|
||||
*
|
||||
* import yaml;
|
||||
*
|
||||
* void main()
|
||||
* {
|
||||
* auto loader = Loader("file.txt");
|
||||
* auto resolver = new Resolver();
|
||||
* resolver.addImplicitResolver("!tag", std.regex.regex("A.*"), "A");
|
||||
* loader.resolver = resolver;
|
||||
*
|
||||
* //Note that we have no constructor from tag "!tag", so we can't
|
||||
* //actually load anything that resolves to this tag.
|
||||
* //See Constructor API documentation and tutorial for more information.
|
||||
*
|
||||
* auto node = loader.load();
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
void addImplicitResolver(string tag, Regex!char regexp, string first)
|
||||
pure @safe
|
||||
{
|
||||
foreach(const dchar c; first)
|
||||
{
|
||||
if((c in yamlImplicitResolvers_) is null)
|
||||
{
|
||||
yamlImplicitResolvers_[c] = [];
|
||||
}
|
||||
yamlImplicitResolvers_[c] ~= tuple(Tag(tag), regexp);
|
||||
}
|
||||
}
|
||||
|
||||
package:
|
||||
/*
|
||||
* Resolve tag of a node.
|
||||
*
|
||||
* Params: kind = Type of the node.
|
||||
* tag = Explicit tag of the node, if any.
|
||||
* value = Value of the node, if any.
|
||||
* implicit = Should the node be implicitly resolved?
|
||||
*
|
||||
* If the tag is already specified and not non-specific, that tag will
|
||||
* be returned.
|
||||
*
|
||||
* Returns: Resolved tag.
|
||||
*/
|
||||
Tag resolve(const NodeID kind, const Tag tag, const string value,
|
||||
const bool implicit) @safe
|
||||
{
|
||||
if(!tag.isNull() && tag.get() != "!"){return tag;}
|
||||
|
||||
if(kind == NodeID.Scalar)
|
||||
{
|
||||
if(!implicit){return defaultScalarTag_;}
|
||||
|
||||
//Get the first char of the value.
|
||||
size_t dummy;
|
||||
const dchar first = value.length == 0 ? '\0' : decode(value, dummy);
|
||||
|
||||
auto resolvers = (first in yamlImplicitResolvers_) is null ?
|
||||
[] : yamlImplicitResolvers_[first];
|
||||
|
||||
//If regexp matches, return tag.
|
||||
foreach(resolver; resolvers) if(!(match(value, resolver[1]).empty))
|
||||
{
|
||||
return resolver[0];
|
||||
}
|
||||
return defaultScalarTag_;
|
||||
}
|
||||
else if(kind == NodeID.Sequence){return defaultSequenceTag_;}
|
||||
else if(kind == NodeID.Mapping) {return defaultMappingTag_;}
|
||||
assert(false, "This line of code should never be reached");
|
||||
}
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Resolver unittest");
|
||||
|
||||
auto resolver = new Resolver();
|
||||
|
||||
bool tagMatch(string tag, string[] values)
|
||||
{
|
||||
Tag expected = Tag(tag);
|
||||
foreach(value; values)
|
||||
{
|
||||
Tag resolved = resolver.resolve(NodeID.Scalar, Tag(), value, true);
|
||||
if(expected != resolved)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
assert(tagMatch("tag:yaml.org,2002:bool",
|
||||
["yes", "NO", "True", "on"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:float",
|
||||
["6.8523015e+5", "685.230_15e+03", "685_230.15",
|
||||
"190:20:30.15", "-.inf", ".NaN"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:int",
|
||||
["685230", "+685_230", "02472256", "0x_0A_74_AE",
|
||||
"0b1010_0111_0100_1010_1110", "190:20:30"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:merge", ["<<"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:null", ["~", "null", ""]));
|
||||
assert(tagMatch("tag:yaml.org,2002:str",
|
||||
["abcd", "9a8b", "9.1adsf"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:timestamp",
|
||||
["2001-12-15T02:59:43.1Z",
|
||||
"2001-12-14t21:59:43.10-05:00",
|
||||
"2001-12-14 21:59:43.10 -5",
|
||||
"2001-12-15 2:59:43.10",
|
||||
"2002-12-14"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:value", ["="]));
|
||||
assert(tagMatch("tag:yaml.org,2002:yaml", ["!", "&", "*"]));
|
||||
}
|
||||
|
||||
///Return default scalar tag.
|
||||
@property Tag defaultScalarTag() const pure @safe nothrow {return defaultScalarTag_;}
|
||||
|
||||
///Return default sequence tag.
|
||||
@property Tag defaultSequenceTag() const pure @safe nothrow {return defaultSequenceTag_;}
|
||||
|
||||
///Return default mapping tag.
|
||||
@property Tag defaultMappingTag() const pure @safe nothrow {return defaultMappingTag_;}
|
||||
|
||||
private:
|
||||
///Add default implicit resolvers.
|
||||
void addImplicitResolvers() @safe
|
||||
{
|
||||
addImplicitResolver("tag:yaml.org,2002:bool",
|
||||
regex(r"^(?:yes|Yes|YES|no|No|NO|true|True|TRUE"
|
||||
"|false|False|FALSE|on|On|ON|off|Off|OFF)$"),
|
||||
"yYnNtTfFoO");
|
||||
addImplicitResolver("tag:yaml.org,2002:float",
|
||||
regex(r"^(?:[-+]?([0-9][0-9_]*)\\.[0-9_]*"
|
||||
"(?:[eE][-+][0-9]+)?|[-+]?(?:[0-9][0-9_]"
|
||||
"*)?\\.[0-9_]+(?:[eE][-+][0-9]+)?|[-+]?"
|
||||
"[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]"
|
||||
"*|[-+]?\\.(?:inf|Inf|INF)|\\."
|
||||
"(?:nan|NaN|NAN))$"),
|
||||
"-+0123456789.");
|
||||
addImplicitResolver("tag:yaml.org,2002:int",
|
||||
regex(r"^(?:[-+]?0b[0-1_]+"
|
||||
"|[-+]?0[0-7_]+"
|
||||
"|[-+]?(?:0|[1-9][0-9_]*)"
|
||||
"|[-+]?0x[0-9a-fA-F_]+"
|
||||
"|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$"),
|
||||
"-+0123456789");
|
||||
addImplicitResolver("tag:yaml.org,2002:merge", regex(r"^<<$"), "<");
|
||||
addImplicitResolver("tag:yaml.org,2002:null",
|
||||
regex(r"^$|^(?:~|null|Null|NULL)$"), "~nN\0");
|
||||
addImplicitResolver("tag:yaml.org,2002:timestamp",
|
||||
regex(r"^[0-9][0-9][0-9][0-9]-[0-9][0-9]-"
|
||||
"[0-9][0-9]|[0-9][0-9][0-9][0-9]-[0-9]"
|
||||
"[0-9]?-[0-9][0-9]?[Tt]|[ \t]+[0-9]"
|
||||
"[0-9]?:[0-9][0-9]:[0-9][0-9]"
|
||||
"(?:\\.[0-9]*)?(?:[ \t]*Z|[-+][0-9]"
|
||||
"[0-9]?(?::[0-9][0-9])?)?$"),
|
||||
"0123456789");
|
||||
addImplicitResolver("tag:yaml.org,2002:value", regex(r"^=$"), "=");
|
||||
|
||||
|
||||
//The following resolver is only for documentation purposes. It cannot work
|
||||
//because plain scalars cannot start with '!', '&', or '*'.
|
||||
addImplicitResolver("tag:yaml.org,2002:yaml", regex(r"^(?:!|&|\*)$"), "!&*");
|
||||
}
|
||||
}
|
1651
source/dyaml/scanner.d
Normal file
1651
source/dyaml/scanner.d
Normal file
File diff suppressed because it is too large
Load diff
230
source/dyaml/serializer.d
Normal file
230
source/dyaml/serializer.d
Normal file
|
@ -0,0 +1,230 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* YAML serializer.
|
||||
* Code based on PyYAML: http://www.pyyaml.org
|
||||
*/
|
||||
module dyaml.serializer;
|
||||
|
||||
|
||||
import std.array;
|
||||
import std.format;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.anchor;
|
||||
import dyaml.emitter;
|
||||
import dyaml.encoding;
|
||||
import dyaml.event;
|
||||
import dyaml.exception;
|
||||
import dyaml.node;
|
||||
import dyaml.resolver;
|
||||
import dyaml.tag;
|
||||
import dyaml.tagdirective;
|
||||
import dyaml.token;
|
||||
|
||||
|
||||
package:
|
||||
|
||||
///Serializes represented YAML nodes, generating events which are then emitted by Emitter.
|
||||
struct Serializer
|
||||
{
|
||||
private:
|
||||
///Emitter to emit events produced.
|
||||
Emitter* emitter_;
|
||||
///Resolver used to determine which tags are automaticaly resolvable.
|
||||
Resolver resolver_;
|
||||
|
||||
///Do all document starts have to be specified explicitly?
|
||||
Flag!"explicitStart" explicitStart_;
|
||||
///Do all document ends have to be specified explicitly?
|
||||
Flag!"explicitEnd" explicitEnd_;
|
||||
///YAML version string.
|
||||
string YAMLVersion_;
|
||||
|
||||
///Tag directives to emit.
|
||||
TagDirective[] tagDirectives_;
|
||||
|
||||
//TODO Use something with more deterministic memory usage.
|
||||
///Nodes with assigned anchors.
|
||||
Anchor[Node] anchors_;
|
||||
///Nodes with assigned anchors that are already serialized.
|
||||
bool[Node] serializedNodes_;
|
||||
///ID of the last anchor generated.
|
||||
uint lastAnchorID_ = 0;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Construct a Serializer.
|
||||
*
|
||||
* Params: emitter = Emitter to emit events produced.
|
||||
* resolver = Resolver used to determine which tags are automaticaly resolvable.
|
||||
* encoding = Character encoding to use.
|
||||
* explicitStart = Do all document starts have to be specified explicitly?
|
||||
* explicitEnd = Do all document ends have to be specified explicitly?
|
||||
* YAMLVersion = YAML version string.
|
||||
* tagDirectives = Tag directives to emit.
|
||||
*/
|
||||
this(ref Emitter emitter, Resolver resolver, Encoding encoding,
|
||||
const Flag!"explicitStart" explicitStart,
|
||||
const Flag!"explicitEnd" explicitEnd, string YAMLVersion,
|
||||
TagDirective[] tagDirectives) @trusted
|
||||
{
|
||||
emitter_ = &emitter;
|
||||
resolver_ = resolver;
|
||||
explicitStart_ = explicitStart;
|
||||
explicitEnd_ = explicitEnd;
|
||||
YAMLVersion_ = YAMLVersion;
|
||||
tagDirectives_ = tagDirectives;
|
||||
|
||||
emitter_.emit(streamStartEvent(Mark(), Mark(), encoding));
|
||||
}
|
||||
|
||||
///Destroy the Serializer.
|
||||
@safe ~this()
|
||||
{
|
||||
emitter_.emit(streamEndEvent(Mark(), Mark()));
|
||||
clear(YAMLVersion_);
|
||||
YAMLVersion_ = null;
|
||||
clear(serializedNodes_);
|
||||
serializedNodes_ = null;
|
||||
clear(anchors_);
|
||||
anchors_ = null;
|
||||
}
|
||||
|
||||
///Serialize a node, emitting it in the process.
|
||||
void serialize(ref Node node) @safe
|
||||
{
|
||||
emitter_.emit(documentStartEvent(Mark(), Mark(), explicitStart_,
|
||||
YAMLVersion_, tagDirectives_));
|
||||
anchorNode(node);
|
||||
serializeNode(node);
|
||||
emitter_.emit(documentEndEvent(Mark(), Mark(), explicitEnd_));
|
||||
clear(serializedNodes_);
|
||||
clear(anchors_);
|
||||
Anchor[Node] emptyAnchors;
|
||||
anchors_ = emptyAnchors;
|
||||
lastAnchorID_ = 0;
|
||||
}
|
||||
|
||||
private:
|
||||
/**
|
||||
* Determine if it's a good idea to add an anchor to a node.
|
||||
*
|
||||
* Used to prevent associating every single repeating scalar with an
|
||||
* anchor/alias - only nodes long enough can use anchors.
|
||||
*
|
||||
* Params: node = Node to check for anchorability.
|
||||
*
|
||||
* Returns: True if the node is anchorable, false otherwise.
|
||||
*/
|
||||
static bool anchorable(ref Node node) @safe
|
||||
{
|
||||
if(node.isScalar)
|
||||
{
|
||||
return node.isType!string ? node.as!string.length > 64 :
|
||||
node.isType!(ubyte[]) ? node.as!(ubyte[]).length > 64:
|
||||
false;
|
||||
}
|
||||
return node.length > 2;
|
||||
}
|
||||
|
||||
///Add an anchor to the node if it's anchorable and not anchored yet.
|
||||
void anchorNode(ref Node node) @safe
|
||||
{
|
||||
if(!anchorable(node)){return;}
|
||||
|
||||
if((node in anchors_) !is null)
|
||||
{
|
||||
if(anchors_[node].isNull())
|
||||
{
|
||||
anchors_[node] = generateAnchor();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
anchors_[node] = Anchor(null);
|
||||
if(node.isSequence) foreach(ref Node item; node)
|
||||
{
|
||||
anchorNode(item);
|
||||
}
|
||||
else if(node.isMapping) foreach(ref Node key, ref Node value; node)
|
||||
{
|
||||
anchorNode(key);
|
||||
anchorNode(value);
|
||||
}
|
||||
}
|
||||
|
||||
///Generate and return a new anchor.
|
||||
Anchor generateAnchor() @trusted
|
||||
{
|
||||
++lastAnchorID_;
|
||||
auto appender = appender!string;
|
||||
formattedWrite(appender, "id%03d", lastAnchorID_);
|
||||
return Anchor(appender.data);
|
||||
}
|
||||
|
||||
///Serialize a node and all its subnodes.
|
||||
void serializeNode(ref Node node) @trusted
|
||||
{
|
||||
//If the node has an anchor, emit an anchor (as aliasEvent) on the
|
||||
//first occurrence, save it in serializedNodes_, and emit an alias
|
||||
//if it reappears.
|
||||
Anchor aliased = Anchor(null);
|
||||
if(anchorable(node) && (node in anchors_) !is null)
|
||||
{
|
||||
aliased = anchors_[node];
|
||||
if((node in serializedNodes_) !is null)
|
||||
{
|
||||
emitter_.emit(aliasEvent(Mark(), Mark(), aliased));
|
||||
return;
|
||||
}
|
||||
serializedNodes_[node] = true;
|
||||
}
|
||||
|
||||
if(node.isScalar)
|
||||
{
|
||||
assert(node.isType!string, "Scalar node type must be string before serialized");
|
||||
auto value = node.as!string;
|
||||
const detectedTag = resolver_.resolve(NodeID.Scalar, Tag(null), value, true);
|
||||
const defaultTag = resolver_.resolve(NodeID.Scalar, Tag(null), value, false);
|
||||
bool isDetected = node.tag_ == detectedTag;
|
||||
bool isDefault = node.tag_ == defaultTag;
|
||||
|
||||
emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_,
|
||||
tuple(isDetected, isDefault), value, node.scalarStyle));
|
||||
return;
|
||||
}
|
||||
if(node.isSequence)
|
||||
{
|
||||
const defaultTag = resolver_.defaultSequenceTag;
|
||||
const implicit = node.tag_ == defaultTag;
|
||||
emitter_.emit(sequenceStartEvent(Mark(), Mark(), aliased, node.tag_,
|
||||
implicit, node.collectionStyle));
|
||||
foreach(ref Node item; node)
|
||||
{
|
||||
serializeNode(item);
|
||||
}
|
||||
emitter_.emit(sequenceEndEvent(Mark(), Mark()));
|
||||
return;
|
||||
}
|
||||
if(node.isMapping)
|
||||
{
|
||||
const defaultTag = resolver_.defaultMappingTag;
|
||||
const implicit = node.tag_ == defaultTag;
|
||||
emitter_.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag_,
|
||||
implicit, node.collectionStyle));
|
||||
foreach(ref Node key, ref Node value; node)
|
||||
{
|
||||
serializeNode(key);
|
||||
serializeNode(value);
|
||||
}
|
||||
emitter_.emit(mappingEndEvent(Mark(), Mark()));
|
||||
return;
|
||||
}
|
||||
assert(false, "This code should never be reached");
|
||||
}
|
||||
}
|
28
source/dyaml/style.d
Normal file
28
source/dyaml/style.d
Normal file
|
@ -0,0 +1,28 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///YAML node formatting styles.
|
||||
module dyaml.style;
|
||||
|
||||
|
||||
///Scalar styles.
|
||||
enum ScalarStyle : ubyte
|
||||
{
|
||||
Invalid = 0, /// Invalid (uninitialized) style
|
||||
Literal, /// | (Literal block style)
|
||||
Folded, /// > (Folded block style)
|
||||
Plain, /// Plain scalar
|
||||
SingleQuoted, /// Single quoted scalar
|
||||
DoubleQuoted /// Double quoted scalar
|
||||
}
|
||||
|
||||
///Collection styles.
|
||||
enum CollectionStyle : ubyte
|
||||
{
|
||||
Invalid = 0, /// Invalid (uninitialized) style
|
||||
Block, /// Block style.
|
||||
Flow /// Flow style.
|
||||
}
|
13
source/dyaml/tag.d
Normal file
13
source/dyaml/tag.d
Normal file
|
@ -0,0 +1,13 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///YAML tag.
|
||||
module dyaml.tag;
|
||||
|
||||
import dyaml.zerostring;
|
||||
|
||||
///YAML tag (data type) struct. Encapsulates a tag to save memory and speed up comparison.
|
||||
alias ZeroString!"Tag" Tag;
|
15
source/dyaml/tagdirective.d
Normal file
15
source/dyaml/tagdirective.d
Normal file
|
@ -0,0 +1,15 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///Tag directives.
|
||||
module dyaml.tagdirective;
|
||||
|
||||
///Single tag directive. handle is the shortcut, prefix is the prefix that replaces it.
|
||||
struct TagDirective
|
||||
{
|
||||
string handle;
|
||||
string prefix;
|
||||
}
|
150
source/dyaml/token.d
Normal file
150
source/dyaml/token.d
Normal file
|
@ -0,0 +1,150 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* YAML tokens.
|
||||
* Code based on PyYAML: http://www.pyyaml.org
|
||||
*/
|
||||
module dyaml.token;
|
||||
|
||||
|
||||
import std.conv;
|
||||
|
||||
import dyaml.encoding;
|
||||
import dyaml.exception;
|
||||
import dyaml.reader;
|
||||
import dyaml.style;
|
||||
|
||||
|
||||
package:
|
||||
///Token types.
|
||||
enum TokenID : ubyte
|
||||
{
|
||||
Invalid = 0, /// Invalid (uninitialized) token
|
||||
Directive, /// DIRECTIVE
|
||||
DocumentStart, /// DOCUMENT-START
|
||||
DocumentEnd, /// DOCUMENT-END
|
||||
StreamStart, /// STREAM-START
|
||||
StreamEnd, /// STREAM-END
|
||||
BlockSequenceStart, /// BLOCK-SEQUENCE-START
|
||||
BlockMappingStart, /// BLOCK-MAPPING-START
|
||||
BlockEnd, /// BLOCK-END
|
||||
FlowSequenceStart, /// FLOW-SEQUENCE-START
|
||||
FlowMappingStart, /// FLOW-MAPPING-START
|
||||
FlowSequenceEnd, /// FLOW-SEQUENCE-END
|
||||
FlowMappingEnd, /// FLOW-MAPPING-END
|
||||
Key, /// KEY
|
||||
Value, /// VALUE
|
||||
BlockEntry, /// BLOCK-ENTRY
|
||||
FlowEntry, /// FLOW-ENTRY
|
||||
Alias, /// ALIAS
|
||||
Anchor, /// ANCHOR
|
||||
Tag, /// TAG
|
||||
Scalar /// SCALAR
|
||||
}
|
||||
|
||||
/**
|
||||
* Token produced by scanner.
|
||||
*
|
||||
* 32 bytes on 64-bit.
|
||||
*/
|
||||
struct Token
|
||||
{
|
||||
@disable int opCmp(ref Token);
|
||||
|
||||
///Value of the token, if any.
|
||||
string value;
|
||||
///Start position of the token in file/stream.
|
||||
Mark startMark;
|
||||
///End position of the token in file/stream.
|
||||
Mark endMark;
|
||||
///Token type.
|
||||
TokenID id;
|
||||
///Style of scalar token, if this is a scalar token.
|
||||
ScalarStyle style;
|
||||
///Encoding, if this is a stream start token.
|
||||
Encoding encoding;
|
||||
|
||||
///Get string representation of the token ID.
|
||||
@property string idString() const @trusted {return to!string(id);}
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a directive token.
|
||||
*
|
||||
* Params: start = Start position of the token.
|
||||
* end = End position of the token.
|
||||
* value = Value of the token.
|
||||
*/
|
||||
Token directiveToken(const Mark start, const Mark end, const string value) pure @safe nothrow
|
||||
{
|
||||
return Token(value, start, end, TokenID.Directive);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a simple (no value) token with specified type.
|
||||
*
|
||||
* Params: id = Type of the token.
|
||||
* start = Start position of the token.
|
||||
* end = End position of the token.
|
||||
*/
|
||||
Token simpleToken(TokenID id)(const Mark start, const Mark end) pure @safe nothrow
|
||||
{
|
||||
return Token(null, start, end, id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a stream start token.
|
||||
*
|
||||
* Params: start = Start position of the token.
|
||||
* end = End position of the token.
|
||||
* encoding = Encoding of the stream.
|
||||
*/
|
||||
Token streamStartToken(const Mark start, const Mark end, const Encoding encoding) pure @safe nothrow
|
||||
{
|
||||
return Token(null, start, end, TokenID.StreamStart, ScalarStyle.Invalid, encoding);
|
||||
}
|
||||
|
||||
///Aliases for construction of simple token types.
|
||||
alias simpleToken!(TokenID.StreamEnd) streamEndToken;
|
||||
alias simpleToken!(TokenID.BlockSequenceStart) blockSequenceStartToken;
|
||||
alias simpleToken!(TokenID.BlockMappingStart) blockMappingStartToken;
|
||||
alias simpleToken!(TokenID.BlockEnd) blockEndToken;
|
||||
alias simpleToken!(TokenID.Key) keyToken;
|
||||
alias simpleToken!(TokenID.Value) valueToken;
|
||||
alias simpleToken!(TokenID.BlockEntry) blockEntryToken;
|
||||
alias simpleToken!(TokenID.FlowEntry) flowEntryToken;
|
||||
|
||||
/**
|
||||
* Construct a simple token with value with specified type.
|
||||
*
|
||||
* Params: id = Type of the token.
|
||||
* start = Start position of the token.
|
||||
* end = End position of the token.
|
||||
* value = Value of the token.
|
||||
*/
|
||||
Token simpleValueToken(TokenID id)(const Mark start, const Mark end, string value) pure @safe nothrow
|
||||
{
|
||||
return Token(value, start, end, id);
|
||||
}
|
||||
|
||||
///Alias for construction of tag token.
|
||||
alias simpleValueToken!(TokenID.Tag) tagToken;
|
||||
alias simpleValueToken!(TokenID.Alias) aliasToken;
|
||||
alias simpleValueToken!(TokenID.Anchor) anchorToken;
|
||||
|
||||
/**
|
||||
* Construct a scalar token.
|
||||
*
|
||||
* Params: start = Start position of the token.
|
||||
* end = End position of the token.
|
||||
* value = Value of the token.
|
||||
* style = Style of the token.
|
||||
*/
|
||||
Token scalarToken(const Mark start, const Mark end, const string value, in ScalarStyle style) pure @safe nothrow
|
||||
{
|
||||
return Token(value, start, end, TokenID.Scalar, style);
|
||||
}
|
81
source/dyaml/zerostring.d
Normal file
81
source/dyaml/zerostring.d
Normal file
|
@ -0,0 +1,81 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///Zero terminated string.
|
||||
module dyaml.zerostring;
|
||||
|
||||
import core.stdc.string;
|
||||
|
||||
/**
|
||||
* Zero terminated string used to decrease data structure size.
|
||||
*
|
||||
* TypeName is used to differentiate types (better than simple alias).
|
||||
*/
|
||||
struct ZeroString(string TypeName)
|
||||
{
|
||||
private:
|
||||
///Zero terminated string.
|
||||
immutable(char)* str_ = null;
|
||||
|
||||
public:
|
||||
@disable int opCmp(ref ZeroString);
|
||||
|
||||
///Construct a string.
|
||||
this(const string str) pure nothrow @safe
|
||||
{
|
||||
if(str is null || str == "")
|
||||
{
|
||||
str_ = null;
|
||||
return;
|
||||
}
|
||||
|
||||
str_ = (str ~ '\0').ptr;
|
||||
}
|
||||
|
||||
///Get the string.
|
||||
@property string get() const nothrow @trusted
|
||||
in{assert(!isNull());}
|
||||
body
|
||||
{
|
||||
return cast(string)str_[0 .. strlen(str_)];
|
||||
}
|
||||
|
||||
///Test for equality with another string.
|
||||
bool opEquals(const ZeroString str) const nothrow @trusted
|
||||
{
|
||||
return isNull ? str.isNull :
|
||||
str.isNull ? false : (0 == strcmp(str_, str.str_));
|
||||
}
|
||||
|
||||
///Compute a hash.
|
||||
hash_t toHash() const nothrow @safe
|
||||
in{assert(!isNull);}
|
||||
body
|
||||
{
|
||||
auto str = get();
|
||||
return getHash(str);
|
||||
}
|
||||
|
||||
///Compare with another string.
|
||||
int opCmp(const ref ZeroString str) const nothrow @trusted
|
||||
in{assert(!isNull && !str.isNull);}
|
||||
body
|
||||
{
|
||||
return strcmp(str_, str.str_);
|
||||
}
|
||||
|
||||
///Is this string null (invalid)?
|
||||
@property bool isNull() pure const nothrow @safe {return str_ is null;}
|
||||
|
||||
private:
|
||||
///Hack to allow toHash to be @safe.
|
||||
//
|
||||
//To remove this hack, need a typeid(string).getHash() replacement that does not take a pointer.
|
||||
hash_t getHash(ref string str) const nothrow @trusted
|
||||
{
|
||||
return typeid(string).getHash(&str);
|
||||
}
|
||||
}
|
18
source/yaml.d
Normal file
18
source/yaml.d
Normal file
|
@ -0,0 +1,18 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module yaml;
|
||||
|
||||
public import dyaml.constructor;
|
||||
public import dyaml.dumper;
|
||||
public import dyaml.encoding;
|
||||
public import dyaml.exception;
|
||||
public import dyaml.linebreak;
|
||||
public import dyaml.loader;
|
||||
public import dyaml.representer;
|
||||
public import dyaml.resolver;
|
||||
public import dyaml.style;
|
||||
public import dyaml.node;
|
Loading…
Add table
Add a link
Reference in a new issue