make node types into enums and clean up code using them (#225)

* make node types into enums and clean up code using them

* add some tests for anchorable
This commit is contained in:
Cameron Ross 2019-01-27 22:26:00 -03:30 committed by Basile-z
parent bbfe2bbb69
commit b63ea1aaae
9 changed files with 505 additions and 453 deletions

View file

@ -14,41 +14,41 @@ void main()
JSONValue toJSON(Node node) JSONValue toJSON(Node node)
{ {
JSONValue output; JSONValue output;
if (node.isSequence) final switch (node.type)
{ {
output = JSONValue(string[].init); case NodeType.sequence:
foreach (Node seqNode; node) output = JSONValue(string[].init);
{ foreach (Node seqNode; node)
output.array ~= seqNode.toJSON(); {
} output.array ~= seqNode.toJSON();
} }
else if (node.isMapping) break;
{ case NodeType.mapping:
output = JSONValue(string[string].init); output = JSONValue(string[string].init);
foreach (Node keyNode, Node valueNode; node) foreach (Node keyNode, Node valueNode; node)
{ {
output[keyNode.as!string] = valueNode.toJSON(); output[keyNode.as!string] = valueNode.toJSON();
} }
} break;
else if (node.isString) case NodeType.string:
{ output = node.as!string;
output = node.as!string; break;
} case NodeType.integer:
else if (node.isInt) output = node.as!long;
{ break;
output = node.as!long; case NodeType.decimal:
} output = node.as!real;
else if (node.isFloat) break;
{ case NodeType.boolean:
output = node.as!real; output = node.as!bool;
} break;
else if (node.isBool) case NodeType.timestamp:
{ output = node.as!SysTime.toISOExtString();
output = node.as!bool; break;
} case NodeType.merge:
else if (node.isTime) case NodeType.null_:
{ case NodeType.binary:
output = node.as!SysTime.toISOExtString(); case NodeType.invalid:
} }
return output; return output;
} }

View file

@ -18,25 +18,36 @@ void extract(ref Node document) @safe
{ {
void crawl(ref Node root) @safe void crawl(ref Node root) @safe
{ {
if(root.isScalar) switch(root.tag) final switch (root.nodeID)
{ {
case "tag:yaml.org,2002:null": auto value = root.as!YAMLNull; break; case NodeID.scalar:
case "tag:yaml.org,2002:bool": auto value = root.as!bool; break; switch(root.tag)
case "tag:yaml.org,2002:int": auto value = root.as!long; break; {
case "tag:yaml.org,2002:float": auto value = root.as!real; break; case "tag:yaml.org,2002:null": auto value = root.as!YAMLNull; break;
case "tag:yaml.org,2002:binary": auto value = root.as!(ubyte[]); break; case "tag:yaml.org,2002:bool": auto value = root.as!bool; break;
case "tag:yaml.org,2002:timestamp": auto value = root.as!SysTime; break; case "tag:yaml.org,2002:int": auto value = root.as!long; break;
case "tag:yaml.org,2002:str": auto value = root.as!string; break; case "tag:yaml.org,2002:float": auto value = root.as!real; break;
default: writeln("Unrecognozed tag: ", root.tag); case "tag:yaml.org,2002:binary": auto value = root.as!(ubyte[]); break;
} case "tag:yaml.org,2002:timestamp": auto value = root.as!SysTime; break;
else if(root.isSequence) foreach(ref Node node; root) case "tag:yaml.org,2002:str": auto value = root.as!string; break;
{ default: writeln("Unrecognozed tag: ", root.tag);
crawl(node); }
} break;
else if(root.isMapping) foreach(ref Node key, ref Node value; root) case NodeID.sequence:
{ foreach(ref Node node; root)
crawl(key); {
crawl(value); crawl(node);
}
break;
case NodeID.mapping:
foreach(ref Node key, ref Node value; root)
{
crawl(key);
crawl(value);
}
break;
case NodeID.invalid:
assert(0);
} }
} }

View file

@ -23,32 +23,30 @@ string statistics(ref Node document)
tags[root.tag] = 0; tags[root.tag] = 0;
} }
++tags[root.tag]; ++tags[root.tag];
final switch (root.nodeID)
if(root.isScalar)
{ {
++scalars; case NodeID.scalar:
return; ++scalars;
} return;
if(root.isSequence) case NodeID.sequence:
{ ++sequences;
++sequences; seqItems += root.length;
seqItems += root.length; foreach(ref Node node; root)
foreach(ref Node node; root) {
{ crawl(node);
crawl(node); }
} return;
return; case NodeID.mapping:
} ++mappings;
if(root.isMapping) mapPairs += root.length;
{ foreach(ref Node key, ref Node value; root)
++mappings; {
mapPairs += root.length; crawl(key);
foreach(ref Node key, ref Node value; root) crawl(value);
{ }
crawl(key); return;
crawl(value); case NodeID.invalid:
} assert(0);
return;
} }
} }

View file

@ -275,7 +275,7 @@ struct Composer
throw new ConstructorException("While constructing a mapping, " ~ throw new ConstructorException("While constructing a mapping, " ~
"expected a mapping or a list of " ~ "expected a mapping or a list of " ~
"mappings for merging, but found: " ~ "mappings for merging, but found: " ~
node.type.toString() ~ text(node.type) ~
" NOTE: line/column shows topmost parent " ~ " NOTE: line/column shows topmost parent " ~
"to which the content is being merged", "to which the content is being merged",
startMark, endMark); startMark, endMark);
@ -284,38 +284,44 @@ struct Composer
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel); ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
auto pairAppender = &(pairAppenders_[pairAppenderLevel]); auto pairAppender = &(pairAppenders_[pairAppenderLevel]);
if(root.isMapping) final switch (root.nodeID)
{ {
Node[] toMerge; case NodeID.mapping:
toMerge.reserve(root.length); Node[] toMerge;
foreach(ref Node key, ref Node value; root) toMerge.reserve(root.length);
{ foreach (ref Node key, ref Node value; root)
if(key.isType!YAMLMerge)
{ {
toMerge ~= value; if(key.type == NodeType.merge)
{
toMerge ~= value;
}
else
{
auto temp = Node.Pair(key, value);
pairAppender.put(temp);
}
} }
else foreach (node; toMerge)
{ {
auto temp = Node.Pair(key, value); pairAppender.put(flatten(node, startMark, endMark,
pairAppender.put(temp); pairAppenderLevel + 1, nodeAppenderLevel));
} }
} break;
foreach(node; toMerge) case NodeID.sequence:
{ foreach (ref Node node; root)
pairAppender.put(flatten(node, startMark, endMark, {
pairAppenderLevel + 1, nodeAppenderLevel)); if (node.nodeID != NodeID.mapping)
} {
} error(node);
//Must be a sequence of mappings. }
else if(root.isSequence) foreach(ref Node node; root) pairAppender.put(flatten(node, startMark, endMark,
{ pairAppenderLevel + 1, nodeAppenderLevel));
if(!node.isType!(Node.Pair[])){error(node);} }
pairAppender.put(flatten(node, startMark, endMark, break;
pairAppenderLevel + 1, nodeAppenderLevel)); case NodeID.scalar:
} case NodeID.invalid:
else error(root);
{ break;
error(root);
} }
auto flattened = pairAppender.data.dup; auto flattened = pairAppender.data.dup;
@ -345,7 +351,7 @@ struct Composer
composeNode(pairAppenderLevel + 1, nodeAppenderLevel)); composeNode(pairAppenderLevel + 1, nodeAppenderLevel));
//Need to flatten and merge the node referred by YAMLMerge. //Need to flatten and merge the node referred by YAMLMerge.
if(pair.key.isType!YAMLMerge) if(pair.key.type == NodeType.merge)
{ {
toMerge ~= tuple(pair.value, cast(Mark)parser_.front.endMark); toMerge ~= tuple(pair.value, cast(Mark)parser_.front.endMark);
} }

View file

@ -464,7 +464,7 @@ Node.Pair[] getPairs(string type, const Node[] nodes) @safe
pairs.reserve(nodes.length); pairs.reserve(nodes.length);
foreach(node; nodes) foreach(node; nodes)
{ {
enforce(node.isMapping && node.length == 1, enforce(node.nodeID == NodeID.mapping && node.length == 1,
new Exception("While constructing " ~ type ~ new Exception("While constructing " ~ type ~
", expected a mapping with single element")); ", expected a mapping with single element"));

View file

@ -41,11 +41,12 @@ class NodeException : YAMLException
} }
// Node kinds. // Node kinds.
package enum NodeID : ubyte enum NodeID : ubyte
{ {
scalar, scalar,
sequence, sequence,
mapping mapping,
invalid
} }
/// Null YAML type. Used in nodes with _null values. /// Null YAML type. Used in nodes with _null values.
@ -102,7 +103,8 @@ enum NodeType
timestamp, timestamp,
string, string,
mapping, mapping,
sequence sequence,
invalid
} }
/** YAML node. /** YAML node.
@ -289,7 +291,7 @@ struct Node
{ {
{ {
auto node = Node(42); auto node = Node(42);
assert(node.isScalar && !node.isSequence && !node.isMapping); assert(node.nodeID == NodeID.scalar);
assert(node.as!int == 42 && node.as!float == 42.0f && node.as!string == "42"); assert(node.as!int == 42 && node.as!float == 42.0f && node.as!string == "42");
} }
@ -302,7 +304,7 @@ struct Node
{ {
with(Node([1, 2, 3])) with(Node([1, 2, 3]))
{ {
assert(!isScalar() && isSequence && !isMapping); assert(nodeID == NodeID.sequence);
assert(length == 3); assert(length == 3);
assert(opIndex(2).as!int == 3); assert(opIndex(2).as!int == 3);
} }
@ -315,7 +317,7 @@ struct Node
aa["2"] = 2; aa["2"] = 2;
with(Node(aa)) with(Node(aa))
{ {
assert(!isScalar() && !isSequence && isMapping); assert(nodeID == NodeID.mapping);
assert(length == 2); assert(length == 2);
assert(opIndex("2").as!int == 2); assert(opIndex("2").as!int == 2);
} }
@ -384,7 +386,7 @@ struct Node
{ {
with(Node(["1", "2"], [1, 2])) with(Node(["1", "2"], [1, 2]))
{ {
assert(!isScalar() && !isSequence && isMapping); assert(nodeID == NodeID.mapping);
assert(length == 2); assert(length == 2);
assert(opIndex("2").as!int == 2); assert(opIndex("2").as!int == 2);
} }
@ -397,30 +399,6 @@ struct Node
return value_.hasValue; return value_.hasValue;
} }
/// Is this node a scalar value?
@property bool isScalar() const @safe nothrow
{
return !(isMapping || isSequence);
}
/// Is this node a sequence?
@property bool isSequence() const @safe nothrow
{
return isType!(Node[]);
}
/// Is this node a mapping?
@property bool isMapping() const @safe nothrow
{
return isType!(Pair[]);
}
/// Is this node null?
@property bool isNull() const @safe nothrow
{
return isType!YAMLNull;
}
/// Return tag of the node. /// Return tag of the node.
@property string tag() const @safe nothrow @property string tag() const @safe nothrow
{ {
@ -552,15 +530,16 @@ struct Node
// If we're getting from a mapping and we're not getting Node.Pair[], // If we're getting from a mapping and we're not getting Node.Pair[],
// we're getting the default value. // we're getting the default value.
if(isMapping){return this["="].get!( T, stringConversion);} if(nodeID == NodeID.mapping){return this["="].get!( T, stringConversion);}
static if(isSomeString!T) static if(isSomeString!T)
{ {
static if(!stringConversion) static if(!stringConversion)
{ {
if(isString){return to!T(getValue!string);} enforce(type == NodeType.string, new NodeException(
throw new NodeException("Node stores unexpected type: " ~ type.toString() ~ "Node stores unexpected type: " ~ text(type) ~
". Expected: " ~ typeid(T).toString(), startMark_); ". Expected: " ~ typeid(T).toString(), startMark_));
return to!T(getValue!string);
} }
else else
{ {
@ -577,15 +556,28 @@ struct Node
} }
else static if(isFloatingPoint!T) else static if(isFloatingPoint!T)
{ {
/// Can convert int to float. final switch (type)
if(isInt()) {return to!T(getValue!long);} {
else if(isFloat()){return to!T(getValue!real);} case NodeType.integer:
else throw new NodeException("Node stores unexpected type: " ~ type.toString() ~ return to!T(getValue!long);
". Expected: " ~ typeid(T).toString, startMark_); case NodeType.decimal:
return to!T(getValue!real);
case NodeType.binary:
case NodeType.string:
case NodeType.boolean:
case NodeType.null_:
case NodeType.merge:
case NodeType.invalid:
case NodeType.timestamp:
case NodeType.mapping:
case NodeType.sequence:
throw new NodeException("Node stores unexpected type: " ~ text(type) ~
". Expected: " ~ typeid(T).toString, startMark_);
}
} }
else static if(isIntegral!T) else static if(isIntegral!T)
{ {
enforce(isInt(), new NodeException("Node stores unexpected type: " ~ type.toString() ~ enforce(type == NodeType.integer, new NodeException("Node stores unexpected type: " ~ text(type) ~
". Expected: " ~ typeid(T).toString, startMark_)); ". Expected: " ~ typeid(T).toString, startMark_));
immutable temp = getValue!long; immutable temp = getValue!long;
enforce(temp >= T.min && temp <= T.max, enforce(temp >= T.min && temp <= T.max,
@ -593,7 +585,7 @@ struct Node
" out of range. Value: " ~ to!string(temp), startMark_)); " out of range. Value: " ~ to!string(temp), startMark_));
return temp.to!T; return temp.to!T;
} }
else throw new NodeException("Node stores unexpected type: " ~ type.toString() ~ else throw new NodeException("Node stores unexpected type: " ~ text(type) ~
". Expected: " ~ typeid(T).toString, startMark_); ". Expected: " ~ typeid(T).toString, startMark_);
} }
} }
@ -856,10 +848,17 @@ struct Node
*/ */
@property size_t length() const @safe @property size_t length() const @safe
{ {
if(isSequence) { return getValue!(Node[]).length; } final switch(nodeID)
else if(isMapping) { return getValue!(Pair[]).length; } {
throw new NodeException("Trying to get length of a " ~ nodeTypeString ~ " node", case NodeID.sequence:
startMark_); return getValue!(Node[]).length;
case NodeID.mapping:
return getValue!(Pair[]).length;
case NodeID.scalar:
case NodeID.invalid:
throw new NodeException("Trying to get length of a " ~ nodeTypeString ~ " node",
startMark_);
}
} }
@safe unittest @safe unittest
{ {
@ -892,27 +891,31 @@ struct Node
*/ */
ref inout(Node) opIndex(T)(T index) inout @safe ref inout(Node) opIndex(T)(T index) inout @safe
{ {
if(isSequence) final switch (nodeID)
{ {
checkSequenceIndex(index); case NodeID.sequence:
static if(isIntegral!T) checkSequenceIndex(index);
{ static if(isIntegral!T)
return getValue!(Node[])[index]; {
} return getValue!(Node[])[index];
assert(false); }
} else
else if(isMapping) {
{ assert(false, "Only integers may index sequence nodes");
auto idx = findPair(index); }
if(idx >= 0) case NodeID.mapping:
{ auto idx = findPair(index);
return getValue!(Pair[])[idx].value; if(idx >= 0)
} {
return getValue!(Pair[])[idx].value;
}
string msg = "Mapping index not found" ~ (isSomeString!T ? ": " ~ to!string(index) : ""); string msg = "Mapping index not found" ~ (isSomeString!T ? ": " ~ to!string(index) : "");
throw new NodeException(msg, startMark_); throw new NodeException(msg, startMark_);
case NodeID.scalar:
case NodeID.invalid:
throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_);
} }
throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_);
} }
/// ///
@safe unittest @safe unittest
@ -1079,35 +1082,34 @@ struct Node
*/ */
void opIndexAssign(K, V)(V value, K index) void opIndexAssign(K, V)(V value, K index)
{ {
if(isSequence()) final switch (nodeID)
{ {
// This ensures K is integral. case NodeID.sequence:
checkSequenceIndex(index); checkSequenceIndex(index);
static if(isIntegral!K || is(Unqual!K == bool)) static if(isIntegral!K || is(Unqual!K == bool))
{ {
auto nodes = getValue!(Node[]); auto nodes = getValue!(Node[]);
static if(is(Unqual!V == Node)){nodes[index] = value;} static if(is(Unqual!V == Node)){nodes[index] = value;}
else {nodes[index] = Node(value);} else {nodes[index] = Node(value);}
setValue(nodes); setValue(nodes);
return;
}
assert(false, "Only integers may index sequence nodes");
case NodeID.mapping:
const idx = findPair(index);
if(idx < 0){add(index, value);}
else
{
auto pairs = as!(Node.Pair[])();
static if(is(Unqual!V == Node)){pairs[idx].value = value;}
else {pairs[idx].value = Node(value);}
setValue(pairs);
}
return; return;
} case NodeID.scalar:
assert(false); case NodeID.invalid:
throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_);
} }
else if(isMapping())
{
const idx = findPair(index);
if(idx < 0){add(index, value);}
else
{
auto pairs = as!(Node.Pair[])();
static if(is(Unqual!V == Node)){pairs[idx].value = value;}
else {pairs[idx].value = Node(value);}
setValue(pairs);
}
return;
}
throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_);
} }
@safe unittest @safe unittest
{ {
@ -1215,14 +1217,14 @@ struct Node
} }
auto sequence() auto sequence()
{ {
enforce(isSequence, enforce(nodeID == NodeID.sequence,
new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node", new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
return Range!(Node[])(get!(Node[])); return Range!(Node[])(get!(Node[]));
} }
auto sequence() const auto sequence() const
{ {
enforce(isSequence, enforce(nodeID == NodeID.sequence,
new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node", new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
return Range!(const(Node)[])(get!(Node[])); return Range!(const(Node)[])(get!(Node[]));
@ -1305,14 +1307,14 @@ struct Node
auto mapping() auto mapping()
{ {
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to 'mapping'-iterate over a " new NodeException("Trying to 'mapping'-iterate over a "
~ nodeTypeString ~ " node", startMark_)); ~ nodeTypeString ~ " node", startMark_));
return Range!(Node.Pair[])(get!(Node.Pair[])); return Range!(Node.Pair[])(get!(Node.Pair[]));
} }
auto mapping() const auto mapping() const
{ {
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to 'mapping'-iterate over a " new NodeException("Trying to 'mapping'-iterate over a "
~ nodeTypeString ~ " node", startMark_)); ~ nodeTypeString ~ " node", startMark_));
return Range!(const(Node.Pair)[])(get!(Node.Pair[])); return Range!(const(Node.Pair)[])(get!(Node.Pair[]));
@ -1350,7 +1352,7 @@ struct Node
*/ */
auto mappingKeys(K = Node)() const auto mappingKeys(K = Node)() const
{ {
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to 'mappingKeys'-iterate over a " new NodeException("Trying to 'mappingKeys'-iterate over a "
~ nodeTypeString ~ " node", startMark_)); ~ nodeTypeString ~ " node", startMark_));
static if (is(Unqual!K == Node)) static if (is(Unqual!K == Node))
@ -1382,7 +1384,7 @@ struct Node
*/ */
auto mappingValues(V = Node)() const auto mappingValues(V = Node)() const
{ {
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to 'mappingValues'-iterate over a " new NodeException("Trying to 'mappingValues'-iterate over a "
~ nodeTypeString ~ " node", startMark_)); ~ nodeTypeString ~ " node", startMark_));
static if (is(Unqual!V == Node)) static if (is(Unqual!V == Node))
@ -1415,7 +1417,7 @@ struct Node
*/ */
int opApply(D)(D dg) if (isDelegate!D && (Parameters!D.length == 1)) int opApply(D)(D dg) if (isDelegate!D && (Parameters!D.length == 1))
{ {
enforce(isSequence, enforce(nodeID == NodeID.sequence,
new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node", new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
@ -1438,7 +1440,7 @@ struct Node
/// ditto /// ditto
int opApply(D)(D dg) const if (isDelegate!D && (Parameters!D.length == 1)) int opApply(D)(D dg) const if (isDelegate!D && (Parameters!D.length == 1))
{ {
enforce(isSequence, enforce(nodeID == NodeID.sequence,
new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node", new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
@ -1529,7 +1531,7 @@ struct Node
{ {
alias K = Parameters!DG[0]; alias K = Parameters!DG[0];
alias V = Parameters!DG[1]; alias V = Parameters!DG[1];
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node", new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
@ -1566,7 +1568,7 @@ struct Node
{ {
alias K = Parameters!DG[0]; alias K = Parameters!DG[0];
alias V = Parameters!DG[1]; alias V = Parameters!DG[1];
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node", new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
@ -1703,7 +1705,7 @@ struct Node
{ {
setValue(Node[].init); setValue(Node[].init);
} }
enforce(isSequence(), enforce(nodeID == NodeID.sequence,
new NodeException("Trying to add an element to a " ~ nodeTypeString ~ " node", startMark_)); new NodeException("Trying to add an element to a " ~ nodeTypeString ~ " node", startMark_));
auto nodes = get!(Node[])(); auto nodes = get!(Node[])();
@ -1754,7 +1756,7 @@ struct Node
{ {
setValue(Node.Pair[].init); setValue(Node.Pair[].init);
} }
enforce(isMapping(), enforce(nodeID == NodeID.mapping,
new NodeException("Trying to add a key-value pair to a " ~ new NodeException("Trying to add a key-value pair to a " ~
nodeTypeString ~ " node", nodeTypeString ~ " node",
startMark_)); startMark_));
@ -1802,7 +1804,7 @@ struct Node
inout(Node*) opBinaryRight(string op, K)(K key) inout inout(Node*) opBinaryRight(string op, K)(K key) inout
if (op == "in") if (op == "in")
{ {
enforce(isMapping, new NodeException("Trying to use 'in' on a " ~ enforce(nodeID == NodeID.mapping, new NodeException("Trying to use 'in' on a " ~
nodeTypeString ~ " node", startMark_)); nodeTypeString ~ " node", startMark_));
auto idx = findPair(key); auto idx = findPair(key);
@ -1943,7 +1945,7 @@ struct Node
if(!v1){return v2 ? -1 : 0;} if(!v1){return v2 ? -1 : 0;}
if(!v2){return 1;} if(!v2){return 1;}
const typeCmp = cmp(newType, rhs.newType); const typeCmp = cmp(type, rhs.type);
if(typeCmp != 0){return typeCmp;} if(typeCmp != 0){return typeCmp;}
static int compareCollections(T)(const ref Node lhs, const ref Node rhs) static int compareCollections(T)(const ref Node lhs, const ref Node rhs)
@ -1964,62 +1966,54 @@ struct Node
return 0; return 0;
} }
if(isSequence){return compareCollections!(Node[])(this, rhs);} final switch(type)
if(isMapping) {return compareCollections!(Pair[])(this, rhs);}
if(isString)
{ {
return std.algorithm.cmp(getValue!string, case NodeType.string:
rhs.getValue!string); return std.algorithm.cmp(getValue!string,
} rhs.getValue!string);
if(isInt) case NodeType.integer:
{ return cmp(getValue!long, rhs.getValue!long);
return cmp(getValue!long, rhs.getValue!long); case NodeType.boolean:
} const b1 = getValue!bool;
if(isBool) const b2 = rhs.getValue!bool;
{ return b1 ? b2 ? 0 : 1
const b1 = getValue!bool; : b2 ? -1 : 0;
const b2 = rhs.getValue!bool; case NodeType.binary:
return b1 ? b2 ? 0 : 1 const b1 = getValue!(ubyte[]);
: b2 ? -1 : 0; const b2 = rhs.getValue!(ubyte[]);
} return std.algorithm.cmp(b1, b2);
if(isBinary) case NodeType.null_:
{
const b1 = getValue!(ubyte[]);
const b2 = rhs.getValue!(ubyte[]);
return std.algorithm.cmp(b1, b2);
}
if(isNull)
{
return 0;
}
// Floats need special handling for NaNs .
// We consider NaN to be lower than any float.
if(isFloat)
{
const r1 = getValue!real;
const r2 = rhs.getValue!real;
if(isNaN(r1))
{
return isNaN(r2) ? 0 : -1;
}
if(isNaN(r2))
{
return 1;
}
// Fuzzy equality.
if(r1 <= r2 + real.epsilon && r1 >= r2 - real.epsilon)
{
return 0; return 0;
} case NodeType.decimal:
return cmp(r1, r2); const r1 = getValue!real;
const r2 = rhs.getValue!real;
if(isNaN(r1))
{
return isNaN(r2) ? 0 : -1;
}
if(isNaN(r2))
{
return 1;
}
// Fuzzy equality.
if(r1 <= r2 + real.epsilon && r1 >= r2 - real.epsilon)
{
return 0;
}
return cmp(r1, r2);
case NodeType.timestamp:
const t1 = getValue!SysTime;
const t2 = rhs.getValue!SysTime;
return cmp(t1, t2);
case NodeType.mapping:
return compareCollections!(Pair[])(this, rhs);
case NodeType.sequence:
return compareCollections!(Node[])(this, rhs);
case NodeType.merge:
assert(false, "Cannot compare merge nodes");
case NodeType.invalid:
assert(false, "Cannot compare invalid nodes");
} }
else if(isTime)
{
const t1 = getValue!SysTime;
const t2 = rhs.getValue!SysTime;
return cmp(t1, t2);
}
assert(false, "Unknown type of node for comparison : " ~ type.toString());
} }
// Ensure opCmp is symmetric for collections // Ensure opCmp is symmetric for collections
@ -2054,56 +2048,8 @@ struct Node
assert(Node(42).toHash() != Node(42, "some-tag").toHash()); assert(Node(42).toHash() != Node(42, "some-tag").toHash());
} }
package: /// Get type of the node value.
@property NodeType type() const @safe nothrow
// Get a string representation of the node tree. Used for debugging.
//
// Params: level = Level of the node in the tree.
//
// Returns: String representing the node tree.
@property string debugString(uint level = 0) const @safe
{
string indent;
foreach(i; 0 .. level){indent ~= " ";}
if(!isValid){return indent ~ "invalid";}
if(isSequence)
{
string result = indent ~ "sequence:\n";
foreach(ref node; get!(Node[]))
{
result ~= node.debugString(level + 1);
}
return result;
}
if(isMapping)
{
string result = indent ~ "mapping:\n";
foreach(ref pair; get!(Node.Pair[]))
{
result ~= indent ~ " pair\n";
result ~= pair.key.debugString(level + 2);
result ~= pair.value.debugString(level + 2);
}
return result;
}
if(isScalar)
{
return indent ~ "scalar(" ~
(convertsTo!string ? get!string : type.toString()) ~ ")\n";
}
assert(false);
}
// Get type of the node value.
@property TypeInfo type() const @safe nothrow
{
return value_.type;
}
// Get type of the node value.
@property NodeType newType() const @safe nothrow
{ {
if (value_.type is typeid(bool)) if (value_.type is typeid(bool))
{ {
@ -2145,49 +2091,88 @@ struct Node
{ {
return NodeType.decimal; return NodeType.decimal;
} }
else if (!value_.hasValue)
{
return NodeType.invalid;
}
else assert(0, text(value_.type)); else assert(0, text(value_.type));
} }
public: /// Get the kind of node this is.
// Determine if the value stored by the node is of specified type. @property NodeID nodeID() const @safe nothrow
{
final switch (type)
{
case NodeType.sequence:
return NodeID.sequence;
case NodeType.mapping:
return NodeID.mapping;
case NodeType.boolean:
case NodeType.integer:
case NodeType.binary:
case NodeType.string:
case NodeType.timestamp:
case NodeType.null_:
case NodeType.merge:
case NodeType.decimal:
return NodeID.scalar;
case NodeType.invalid:
return NodeID.invalid;
}
}
package:
// Get a string representation of the node tree. Used for debugging.
// //
// This only works for default YAML types, not for user defined types. // Params: level = Level of the node in the tree.
@property bool isType(T)() const //
// Returns: String representing the node tree.
@property string debugString(uint level = 0) const @safe
{ {
return this.type is typeid(Unqual!T); string indent;
foreach(i; 0 .. level){indent ~= " ";}
final switch (nodeID)
{
case NodeID.invalid:
return indent ~ "invalid";
case NodeID.sequence:
string result = indent ~ "sequence:\n";
foreach(ref node; get!(Node[]))
{
result ~= node.debugString(level + 1);
}
return result;
case NodeID.mapping:
string result = indent ~ "mapping:\n";
foreach(ref pair; get!(Node.Pair[]))
{
result ~= indent ~ " pair\n";
result ~= pair.key.debugString(level + 2);
result ~= pair.value.debugString(level + 2);
}
return result;
case NodeID.scalar:
return indent ~ "scalar(" ~
(convertsTo!string ? get!string : text(type)) ~ ")\n";
}
} }
// Is the value a bool?
alias isBool = isType!bool;
// Is the value a raw binary buffer? public:
alias isBinary = isType!(ubyte[]);
// Is the value an integer?
alias isInt = isType!long;
// Is the value a floating point number?
alias isFloat = isType!real;
// Is the value a string?
alias isString = isType!string;
// Is the value a timestamp?
alias isTime = isType!SysTime;
// Does given node have the same type as this node?
bool hasEqualType(const ref Node node) const @safe
{
return this.type is node.type;
}
// Return a string describing node type (sequence, mapping or scalar)
@property string nodeTypeString() const @safe nothrow @property string nodeTypeString() const @safe nothrow
{ {
assert(isScalar || isSequence || isMapping, "Unknown node type"); final switch (nodeID)
return isScalar ? "scalar" : {
isSequence ? "sequence" : case NodeID.mapping:
isMapping ? "mapping" : ""; return "mapping";
case NodeID.sequence:
return "sequence";
case NodeID.scalar:
return "scalar";
case NodeID.invalid:
return "invalid";
}
} }
// Determine if the value can be converted to specified type. // Determine if the value can be converted to specified type.
@ -2197,9 +2182,9 @@ struct Node
// Every type allowed in Value should be convertible to string. // Every type allowed in Value should be convertible to string.
static if(isSomeString!T) {return true;} static if(isSomeString!T) {return true;}
else static if(isFloatingPoint!T){return isInt() || isFloat();} else static if(isFloatingPoint!T){return type.among!(NodeType.integer, NodeType.decimal);}
else static if(isIntegral!T) {return isInt();} else static if(isIntegral!T) {return type == NodeType.integer;}
else static if(is(Unqual!T==bool)){return isBool();} else static if(is(Unqual!T==bool)){return type == NodeType.boolean;}
else {return false;} else {return false;}
} }
/** /**
@ -2209,14 +2194,14 @@ struct Node
*/ */
void setStyle(CollectionStyle style) @safe void setStyle(CollectionStyle style) @safe
{ {
enforce(!isValid || isSequence || isMapping, new NodeException( enforce(!isValid || (nodeID.among(NodeID.mapping, NodeID.sequence)), new NodeException(
"Cannot set collection style for non-collection nodes", startMark_)); "Cannot set collection style for non-collection nodes", startMark_));
collectionStyle = style; collectionStyle = style;
} }
/// Ditto /// Ditto
void setStyle(ScalarStyle style) @safe void setStyle(ScalarStyle style) @safe
{ {
enforce(!isValid || (!isSequence && !isMapping), new NodeException( enforce(!isValid || (nodeID == NodeID.scalar), new NodeException(
"Cannot set scalar style for non-scalar nodes", startMark_)); "Cannot set scalar style for non-scalar nodes", startMark_));
scalarStyle = style; scalarStyle = style;
} }
@ -2303,34 +2288,46 @@ struct Node
} }
private: private:
// Determine if the value stored by the node is of specified type.
//
// This only works for default YAML types, not for user defined types.
@property bool isType(T)() const
{
return value_.type is typeid(Unqual!T);
}
// Implementation of contains() and containsKey(). // Implementation of contains() and containsKey().
bool contains_(T, Flag!"key" key, string func)(T rhs) const bool contains_(T, Flag!"key" key, string func)(T rhs) const
{ {
static if(!key) if(isSequence) final switch (nodeID)
{ {
foreach(ref node; getValue!(Node[])) case NodeID.mapping:
{ return findPair!(T, key)(rhs) >= 0;
if(node == rhs){return true;} case NodeID.sequence:
} static if(!key)
return false; {
foreach(ref node; getValue!(Node[]))
{
if(node == rhs){return true;}
}
return false;
}
else
{
throw new NodeException("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node",
startMark_);
}
case NodeID.scalar:
case NodeID.invalid:
throw new NodeException("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node",
startMark_);
} }
if(isMapping)
{
return findPair!(T, key)(rhs) >= 0;
}
throw new NodeException("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node",
startMark_);
} }
// Implementation of remove() and removeAt() // Implementation of remove() and removeAt()
void remove_(T, Flag!"key" key, string func)(T rhs) void remove_(T, Flag!"key" key, string func)(T rhs)
{ {
enforce(isSequence || isMapping,
new NodeException("Trying to " ~ func ~ "() from a " ~ nodeTypeString ~ " node",
startMark_));
static void removeElem(E, I)(ref Node node, I index) static void removeElem(E, I)(ref Node node, I index)
{ {
auto elems = node.getValue!(E[]); auto elems = node.getValue!(E[]);
@ -2339,32 +2336,36 @@ struct Node
node.setValue(elems); node.setValue(elems);
} }
if(isSequence()) final switch (nodeID)
{ {
static long getIndex(ref Node node, ref T rhs) case NodeID.mapping:
{ const index = findPair!(T, key)(rhs);
foreach(idx, ref elem; node.get!(Node[])) if(index >= 0){removeElem!Pair(this, index);}
break;
case NodeID.sequence:
static long getIndex(ref Node node, ref T rhs)
{ {
if(elem.convertsTo!T && elem.as!(T, No.stringConversion) == rhs) foreach(idx, ref elem; node.get!(Node[]))
{ {
return idx; if(elem.convertsTo!T && elem.as!(T, No.stringConversion) == rhs)
{
return idx;
}
} }
return -1;
} }
return -1;
}
const index = select!key(rhs, getIndex(this, rhs)); const index = select!key(rhs, getIndex(this, rhs));
// This throws if the index is not integral. // This throws if the index is not integral.
checkSequenceIndex(index); checkSequenceIndex(index);
static if(isIntegral!(typeof(index))){removeElem!Node(this, index);} static if(isIntegral!(typeof(index))){removeElem!Node(this, index); break; }
else {assert(false, "Non-integral sequence index");} else {assert(false, "Non-integral sequence index");}
} case NodeID.scalar:
else if(isMapping()) case NodeID.invalid:
{ throw new NodeException("Trying to " ~ func ~ "() from a " ~ nodeTypeString ~ " node",
const index = findPair!(T, key)(rhs); startMark_);
if(index >= 0){removeElem!Pair(this, index);}
} }
} }
@ -2380,10 +2381,10 @@ struct Node
else {node = &pair.value;} else {node = &pair.value;}
const bool typeMatch = (isFloatingPoint!T && (node.isInt || node.isFloat)) || const bool typeMatch = (isFloatingPoint!T && (node.type.among!(NodeType.integer, NodeType.decimal))) ||
(isIntegral!T && node.isInt) || (isIntegral!T && node.type == NodeType.integer) ||
(is(Unqual!T==bool) && node.isBool) || (is(Unqual!T==bool) && node.type == NodeType.boolean) ||
(isSomeString!T && node.isString) || (isSomeString!T && node.type == NodeType.string) ||
(node.isType!T); (node.isType!T);
if(typeMatch && *node == index) if(typeMatch && *node == index)
{ {
@ -2396,7 +2397,7 @@ struct Node
// Check if index is integral and in range. // Check if index is integral and in range.
void checkSequenceIndex(T)(T index) const void checkSequenceIndex(T)(T index) const
{ {
assert(isSequence, assert(nodeID == NodeID.sequence,
"checkSequenceIndex() called on a " ~ nodeTypeString ~ " node"); "checkSequenceIndex() called on a " ~ nodeTypeString ~ " node");
static if(!isIntegral!T) static if(!isIntegral!T)

View file

@ -43,7 +43,8 @@ class RepresenterException : YAMLException
Node representData(const Node data, ScalarStyle defaultScalarStyle, CollectionStyle defaultCollectionStyle) @safe Node representData(const Node data, ScalarStyle defaultScalarStyle, CollectionStyle defaultCollectionStyle) @safe
{ {
Node result; Node result;
final switch(data.newType) { final switch(data.type)
{
case NodeType.null_: case NodeType.null_:
result = representNull(); result = representNull();
break; break;
@ -73,17 +74,26 @@ Node representData(const Node data, ScalarStyle defaultScalarStyle, CollectionSt
case NodeType.sequence: case NodeType.sequence:
result = representNodes(data, defaultScalarStyle, defaultCollectionStyle); result = representNodes(data, defaultScalarStyle, defaultCollectionStyle);
break; break;
case NodeType.invalid:
assert(0);
} }
if (result.isScalar && (result.scalarStyle == ScalarStyle.invalid)) final switch (result.nodeID)
{ {
result.scalarStyle = defaultScalarStyle; case NodeID.scalar:
if (result.scalarStyle == ScalarStyle.invalid)
{
result.scalarStyle = defaultScalarStyle;
}
break;
case NodeID.sequence, NodeID.mapping:
if (defaultCollectionStyle != CollectionStyle.invalid)
{
result.collectionStyle = defaultCollectionStyle;
}
case NodeID.invalid:
} }
if ((result.isSequence || result.isMapping) && (defaultCollectionStyle != CollectionStyle.invalid))
{
result.collectionStyle = defaultCollectionStyle;
}
//Override tag if specified. //Override tag if specified.
if(data.tag_ !is null){result.tag_ = data.tag_;} if(data.tag_ !is null){result.tag_ = data.tag_;}
@ -367,7 +377,7 @@ Node representNodes(const Node node, ScalarStyle defaultScalarStyle, CollectionS
foreach(idx, item; nodes) foreach(idx, item; nodes)
{ {
value[idx] = representData(item, defaultScalarStyle, defaultCollectionStyle); value[idx] = representData(item, defaultScalarStyle, defaultCollectionStyle);
const isScalar = value[idx].isScalar; const isScalar = value[idx].nodeID == NodeID.scalar;
const s = value[idx].scalarStyle; const s = value[idx].scalarStyle;
if(!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain)) if(!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain))
{ {
@ -383,14 +393,14 @@ Node representNodes(const Node node, ScalarStyle defaultScalarStyle, CollectionS
bool shouldUseBlockStyle(const Node value) @safe bool shouldUseBlockStyle(const Node value) @safe
{ {
const isScalar = value.isScalar; const isScalar = value.nodeID == NodeID.scalar;
const s = value.scalarStyle; const s = value.scalarStyle;
return (!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain)); return (!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain));
} }
bool shouldUseBlockStyle(const Node.Pair value) @safe bool shouldUseBlockStyle(const Node.Pair value) @safe
{ {
const keyScalar = value.key.isScalar; const keyScalar = value.key.nodeID == NodeID.scalar;
const valScalar = value.value.isScalar; const valScalar = value.value.nodeID == NodeID.scalar;
const keyStyle = value.key.scalarStyle; const keyStyle = value.key.scalarStyle;
const valStyle = value.value.scalarStyle; const valStyle = value.value.scalarStyle;
if(!keyScalar || if(!keyScalar ||

View file

@ -113,15 +113,34 @@ struct Serializer(Range, CharType)
*/ */
static bool anchorable(ref Node node) @safe static bool anchorable(ref Node node) @safe
{ {
if(node.isScalar) if(node.nodeID == NodeID.scalar)
{ {
return node.isType!string ? node.as!string.length > 64 : return (node.type == NodeType.string) ? node.as!string.length > 64 :
node.isType!(ubyte[]) ? node.as!(ubyte[]).length > 64: (node.type == NodeType.binary) ? node.as!(ubyte[]).length > 64 :
false; false;
} }
return node.length > 2; return node.length > 2;
} }
@safe unittest
{
import std.string : representation;
auto shortString = "not much";
auto longString = "A fairly long string that would be a good idea to add an anchor to";
auto node1 = Node(shortString);
auto node2 = Node(shortString.representation.dup);
auto node3 = Node(longString);
auto node4 = Node(longString.representation.dup);
auto node5 = Node([node1]);
auto node6 = Node([node1, node2, node3, node4]);
assert(!anchorable(node1));
assert(!anchorable(node2));
assert(anchorable(node3));
assert(anchorable(node4));
assert(!anchorable(node5));
assert(anchorable(node6));
}
///Add an anchor to the node if it's anchorable and not anchored yet. ///Add an anchor to the node if it's anchorable and not anchored yet.
void anchorNode(ref Node node) @safe void anchorNode(ref Node node) @safe
{ {
@ -137,14 +156,24 @@ struct Serializer(Range, CharType)
} }
anchors_[node] = null; anchors_[node] = null;
if(node.isSequence) foreach(ref Node item; node) final switch (node.nodeID)
{ {
anchorNode(item); case NodeID.mapping:
} foreach(ref Node key, ref Node value; node)
else if(node.isMapping) foreach(ref Node key, ref Node value; node) {
{ anchorNode(key);
anchorNode(key); anchorNode(value);
anchorNode(value); }
break;
case NodeID.sequence:
foreach(ref Node item; node)
{
anchorNode(item);
}
break;
case NodeID.invalid:
assert(0);
case NodeID.scalar:
} }
} }
@ -174,45 +203,42 @@ struct Serializer(Range, CharType)
} }
serializedNodes_[node] = true; serializedNodes_[node] = true;
} }
final switch (node.nodeID)
{
case NodeID.mapping:
const defaultTag = resolver_.defaultMappingTag;
const implicit = node.tag_ == defaultTag;
emitter_.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag_,
implicit, node.collectionStyle));
foreach(ref Node key, ref Node value; node)
{
serializeNode(key);
serializeNode(value);
}
emitter_.emit(mappingEndEvent(Mark(), Mark()));
return;
case NodeID.sequence:
const defaultTag = resolver_.defaultSequenceTag;
const implicit = node.tag_ == defaultTag;
emitter_.emit(sequenceStartEvent(Mark(), Mark(), aliased, node.tag_,
implicit, node.collectionStyle));
foreach(ref Node item; node)
{
serializeNode(item);
}
emitter_.emit(sequenceEndEvent(Mark(), Mark()));
return;
case NodeID.scalar:
assert(node.type == NodeType.string, "Scalar node type must be string before serialized");
auto value = node.as!string;
const detectedTag = resolver_.resolve(NodeID.scalar, null, value, true);
const bool isDetected = node.tag_ == detectedTag;
if(node.isScalar) emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_,
{ isDetected, value, node.scalarStyle));
assert(node.isType!string, "Scalar node type must be string before serialized"); return;
auto value = node.as!string; case NodeID.invalid:
const detectedTag = resolver_.resolve(NodeID.scalar, null, value, true); assert(0);
const bool isDetected = node.tag_ == detectedTag;
emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_,
isDetected, value, node.scalarStyle));
return;
} }
if(node.isSequence)
{
const defaultTag = resolver_.defaultSequenceTag;
const implicit = node.tag_ == defaultTag;
emitter_.emit(sequenceStartEvent(Mark(), Mark(), aliased, node.tag_,
implicit, node.collectionStyle));
foreach(ref Node item; node)
{
serializeNode(item);
}
emitter_.emit(sequenceEndEvent(Mark(), Mark()));
return;
}
if(node.isMapping)
{
const defaultTag = resolver_.defaultMappingTag;
const implicit = node.tag_ == defaultTag;
emitter_.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag_,
implicit, node.collectionStyle));
foreach(ref Node key, ref Node value; node)
{
serializeNode(key);
serializeNode(value);
}
emitter_.emit(mappingEndEvent(Mark(), Mark()));
return;
}
assert(false, "This code should never be reached");
} }
} }

View file

@ -39,10 +39,10 @@ void testImplicitResolver(string dataFilename, string detectFilename) @safe
correctTag = readText(detectFilename).strip(); correctTag = readText(detectFilename).strip();
node = Loader.fromFile(dataFilename).load(); node = Loader.fromFile(dataFilename).load();
assert(node.isSequence); assert(node.nodeID == NodeID.sequence);
foreach(ref Node scalar; node) foreach(ref Node scalar; node)
{ {
assert(scalar.isScalar); assert(scalar.nodeID == NodeID.scalar);
assert(scalar.tag == correctTag); assert(scalar.tag == correctTag);
} }
} }