make node types into enums and clean up code using them (#225)

* make node types into enums and clean up code using them

* add some tests for anchorable
This commit is contained in:
Cameron Ross 2019-01-27 22:26:00 -03:30 committed by Basile-z
parent bbfe2bbb69
commit b63ea1aaae
9 changed files with 505 additions and 453 deletions

View file

@ -14,41 +14,41 @@ void main()
JSONValue toJSON(Node node) JSONValue toJSON(Node node)
{ {
JSONValue output; JSONValue output;
if (node.isSequence) final switch (node.type)
{ {
case NodeType.sequence:
output = JSONValue(string[].init); output = JSONValue(string[].init);
foreach (Node seqNode; node) foreach (Node seqNode; node)
{ {
output.array ~= seqNode.toJSON(); output.array ~= seqNode.toJSON();
} }
} break;
else if (node.isMapping) case NodeType.mapping:
{
output = JSONValue(string[string].init); output = JSONValue(string[string].init);
foreach (Node keyNode, Node valueNode; node) foreach (Node keyNode, Node valueNode; node)
{ {
output[keyNode.as!string] = valueNode.toJSON(); output[keyNode.as!string] = valueNode.toJSON();
} }
} break;
else if (node.isString) case NodeType.string:
{
output = node.as!string; output = node.as!string;
} break;
else if (node.isInt) case NodeType.integer:
{
output = node.as!long; output = node.as!long;
} break;
else if (node.isFloat) case NodeType.decimal:
{
output = node.as!real; output = node.as!real;
} break;
else if (node.isBool) case NodeType.boolean:
{
output = node.as!bool; output = node.as!bool;
} break;
else if (node.isTime) case NodeType.timestamp:
{
output = node.as!SysTime.toISOExtString(); output = node.as!SysTime.toISOExtString();
break;
case NodeType.merge:
case NodeType.null_:
case NodeType.binary:
case NodeType.invalid:
} }
return output; return output;
} }

View file

@ -18,7 +18,10 @@ void extract(ref Node document) @safe
{ {
void crawl(ref Node root) @safe void crawl(ref Node root) @safe
{ {
if(root.isScalar) switch(root.tag) final switch (root.nodeID)
{
case NodeID.scalar:
switch(root.tag)
{ {
case "tag:yaml.org,2002:null": auto value = root.as!YAMLNull; break; case "tag:yaml.org,2002:null": auto value = root.as!YAMLNull; break;
case "tag:yaml.org,2002:bool": auto value = root.as!bool; break; case "tag:yaml.org,2002:bool": auto value = root.as!bool; break;
@ -29,15 +32,23 @@ void extract(ref Node document) @safe
case "tag:yaml.org,2002:str": auto value = root.as!string; break; case "tag:yaml.org,2002:str": auto value = root.as!string; break;
default: writeln("Unrecognozed tag: ", root.tag); default: writeln("Unrecognozed tag: ", root.tag);
} }
else if(root.isSequence) foreach(ref Node node; root) break;
case NodeID.sequence:
foreach(ref Node node; root)
{ {
crawl(node); crawl(node);
} }
else if(root.isMapping) foreach(ref Node key, ref Node value; root) break;
case NodeID.mapping:
foreach(ref Node key, ref Node value; root)
{ {
crawl(key); crawl(key);
crawl(value); crawl(value);
} }
break;
case NodeID.invalid:
assert(0);
}
} }
crawl(document); crawl(document);

View file

@ -23,14 +23,12 @@ string statistics(ref Node document)
tags[root.tag] = 0; tags[root.tag] = 0;
} }
++tags[root.tag]; ++tags[root.tag];
final switch (root.nodeID)
if(root.isScalar)
{ {
case NodeID.scalar:
++scalars; ++scalars;
return; return;
} case NodeID.sequence:
if(root.isSequence)
{
++sequences; ++sequences;
seqItems += root.length; seqItems += root.length;
foreach(ref Node node; root) foreach(ref Node node; root)
@ -38,9 +36,7 @@ string statistics(ref Node document)
crawl(node); crawl(node);
} }
return; return;
} case NodeID.mapping:
if(root.isMapping)
{
++mappings; ++mappings;
mapPairs += root.length; mapPairs += root.length;
foreach(ref Node key, ref Node value; root) foreach(ref Node key, ref Node value; root)
@ -49,6 +45,8 @@ string statistics(ref Node document)
crawl(value); crawl(value);
} }
return; return;
case NodeID.invalid:
assert(0);
} }
} }

View file

@ -275,7 +275,7 @@ struct Composer
throw new ConstructorException("While constructing a mapping, " ~ throw new ConstructorException("While constructing a mapping, " ~
"expected a mapping or a list of " ~ "expected a mapping or a list of " ~
"mappings for merging, but found: " ~ "mappings for merging, but found: " ~
node.type.toString() ~ text(node.type) ~
" NOTE: line/column shows topmost parent " ~ " NOTE: line/column shows topmost parent " ~
"to which the content is being merged", "to which the content is being merged",
startMark, endMark); startMark, endMark);
@ -284,13 +284,14 @@ struct Composer
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel); ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
auto pairAppender = &(pairAppenders_[pairAppenderLevel]); auto pairAppender = &(pairAppenders_[pairAppenderLevel]);
if(root.isMapping) final switch (root.nodeID)
{ {
case NodeID.mapping:
Node[] toMerge; Node[] toMerge;
toMerge.reserve(root.length); toMerge.reserve(root.length);
foreach (ref Node key, ref Node value; root) foreach (ref Node key, ref Node value; root)
{ {
if(key.isType!YAMLMerge) if(key.type == NodeType.merge)
{ {
toMerge ~= value; toMerge ~= value;
} }
@ -305,17 +306,22 @@ struct Composer
pairAppender.put(flatten(node, startMark, endMark, pairAppender.put(flatten(node, startMark, endMark,
pairAppenderLevel + 1, nodeAppenderLevel)); pairAppenderLevel + 1, nodeAppenderLevel));
} }
} break;
//Must be a sequence of mappings. case NodeID.sequence:
else if(root.isSequence) foreach(ref Node node; root) foreach (ref Node node; root)
{ {
if(!node.isType!(Node.Pair[])){error(node);} if (node.nodeID != NodeID.mapping)
{
error(node);
}
pairAppender.put(flatten(node, startMark, endMark, pairAppender.put(flatten(node, startMark, endMark,
pairAppenderLevel + 1, nodeAppenderLevel)); pairAppenderLevel + 1, nodeAppenderLevel));
} }
else break;
{ case NodeID.scalar:
case NodeID.invalid:
error(root); error(root);
break;
} }
auto flattened = pairAppender.data.dup; auto flattened = pairAppender.data.dup;
@ -345,7 +351,7 @@ struct Composer
composeNode(pairAppenderLevel + 1, nodeAppenderLevel)); composeNode(pairAppenderLevel + 1, nodeAppenderLevel));
//Need to flatten and merge the node referred by YAMLMerge. //Need to flatten and merge the node referred by YAMLMerge.
if(pair.key.isType!YAMLMerge) if(pair.key.type == NodeType.merge)
{ {
toMerge ~= tuple(pair.value, cast(Mark)parser_.front.endMark); toMerge ~= tuple(pair.value, cast(Mark)parser_.front.endMark);
} }

View file

@ -464,7 +464,7 @@ Node.Pair[] getPairs(string type, const Node[] nodes) @safe
pairs.reserve(nodes.length); pairs.reserve(nodes.length);
foreach(node; nodes) foreach(node; nodes)
{ {
enforce(node.isMapping && node.length == 1, enforce(node.nodeID == NodeID.mapping && node.length == 1,
new Exception("While constructing " ~ type ~ new Exception("While constructing " ~ type ~
", expected a mapping with single element")); ", expected a mapping with single element"));

View file

@ -41,11 +41,12 @@ class NodeException : YAMLException
} }
// Node kinds. // Node kinds.
package enum NodeID : ubyte enum NodeID : ubyte
{ {
scalar, scalar,
sequence, sequence,
mapping mapping,
invalid
} }
/// Null YAML type. Used in nodes with _null values. /// Null YAML type. Used in nodes with _null values.
@ -102,7 +103,8 @@ enum NodeType
timestamp, timestamp,
string, string,
mapping, mapping,
sequence sequence,
invalid
} }
/** YAML node. /** YAML node.
@ -289,7 +291,7 @@ struct Node
{ {
{ {
auto node = Node(42); auto node = Node(42);
assert(node.isScalar && !node.isSequence && !node.isMapping); assert(node.nodeID == NodeID.scalar);
assert(node.as!int == 42 && node.as!float == 42.0f && node.as!string == "42"); assert(node.as!int == 42 && node.as!float == 42.0f && node.as!string == "42");
} }
@ -302,7 +304,7 @@ struct Node
{ {
with(Node([1, 2, 3])) with(Node([1, 2, 3]))
{ {
assert(!isScalar() && isSequence && !isMapping); assert(nodeID == NodeID.sequence);
assert(length == 3); assert(length == 3);
assert(opIndex(2).as!int == 3); assert(opIndex(2).as!int == 3);
} }
@ -315,7 +317,7 @@ struct Node
aa["2"] = 2; aa["2"] = 2;
with(Node(aa)) with(Node(aa))
{ {
assert(!isScalar() && !isSequence && isMapping); assert(nodeID == NodeID.mapping);
assert(length == 2); assert(length == 2);
assert(opIndex("2").as!int == 2); assert(opIndex("2").as!int == 2);
} }
@ -384,7 +386,7 @@ struct Node
{ {
with(Node(["1", "2"], [1, 2])) with(Node(["1", "2"], [1, 2]))
{ {
assert(!isScalar() && !isSequence && isMapping); assert(nodeID == NodeID.mapping);
assert(length == 2); assert(length == 2);
assert(opIndex("2").as!int == 2); assert(opIndex("2").as!int == 2);
} }
@ -397,30 +399,6 @@ struct Node
return value_.hasValue; return value_.hasValue;
} }
/// Is this node a scalar value?
@property bool isScalar() const @safe nothrow
{
return !(isMapping || isSequence);
}
/// Is this node a sequence?
@property bool isSequence() const @safe nothrow
{
return isType!(Node[]);
}
/// Is this node a mapping?
@property bool isMapping() const @safe nothrow
{
return isType!(Pair[]);
}
/// Is this node null?
@property bool isNull() const @safe nothrow
{
return isType!YAMLNull;
}
/// Return tag of the node. /// Return tag of the node.
@property string tag() const @safe nothrow @property string tag() const @safe nothrow
{ {
@ -552,15 +530,16 @@ struct Node
// If we're getting from a mapping and we're not getting Node.Pair[], // If we're getting from a mapping and we're not getting Node.Pair[],
// we're getting the default value. // we're getting the default value.
if(isMapping){return this["="].get!( T, stringConversion);} if(nodeID == NodeID.mapping){return this["="].get!( T, stringConversion);}
static if(isSomeString!T) static if(isSomeString!T)
{ {
static if(!stringConversion) static if(!stringConversion)
{ {
if(isString){return to!T(getValue!string);} enforce(type == NodeType.string, new NodeException(
throw new NodeException("Node stores unexpected type: " ~ type.toString() ~ "Node stores unexpected type: " ~ text(type) ~
". Expected: " ~ typeid(T).toString(), startMark_); ". Expected: " ~ typeid(T).toString(), startMark_));
return to!T(getValue!string);
} }
else else
{ {
@ -577,15 +556,28 @@ struct Node
} }
else static if(isFloatingPoint!T) else static if(isFloatingPoint!T)
{ {
/// Can convert int to float. final switch (type)
if(isInt()) {return to!T(getValue!long);} {
else if(isFloat()){return to!T(getValue!real);} case NodeType.integer:
else throw new NodeException("Node stores unexpected type: " ~ type.toString() ~ return to!T(getValue!long);
case NodeType.decimal:
return to!T(getValue!real);
case NodeType.binary:
case NodeType.string:
case NodeType.boolean:
case NodeType.null_:
case NodeType.merge:
case NodeType.invalid:
case NodeType.timestamp:
case NodeType.mapping:
case NodeType.sequence:
throw new NodeException("Node stores unexpected type: " ~ text(type) ~
". Expected: " ~ typeid(T).toString, startMark_); ". Expected: " ~ typeid(T).toString, startMark_);
} }
}
else static if(isIntegral!T) else static if(isIntegral!T)
{ {
enforce(isInt(), new NodeException("Node stores unexpected type: " ~ type.toString() ~ enforce(type == NodeType.integer, new NodeException("Node stores unexpected type: " ~ text(type) ~
". Expected: " ~ typeid(T).toString, startMark_)); ". Expected: " ~ typeid(T).toString, startMark_));
immutable temp = getValue!long; immutable temp = getValue!long;
enforce(temp >= T.min && temp <= T.max, enforce(temp >= T.min && temp <= T.max,
@ -593,7 +585,7 @@ struct Node
" out of range. Value: " ~ to!string(temp), startMark_)); " out of range. Value: " ~ to!string(temp), startMark_));
return temp.to!T; return temp.to!T;
} }
else throw new NodeException("Node stores unexpected type: " ~ type.toString() ~ else throw new NodeException("Node stores unexpected type: " ~ text(type) ~
". Expected: " ~ typeid(T).toString, startMark_); ". Expected: " ~ typeid(T).toString, startMark_);
} }
} }
@ -856,11 +848,18 @@ struct Node
*/ */
@property size_t length() const @safe @property size_t length() const @safe
{ {
if(isSequence) { return getValue!(Node[]).length; } final switch(nodeID)
else if(isMapping) { return getValue!(Pair[]).length; } {
case NodeID.sequence:
return getValue!(Node[]).length;
case NodeID.mapping:
return getValue!(Pair[]).length;
case NodeID.scalar:
case NodeID.invalid:
throw new NodeException("Trying to get length of a " ~ nodeTypeString ~ " node", throw new NodeException("Trying to get length of a " ~ nodeTypeString ~ " node",
startMark_); startMark_);
} }
}
@safe unittest @safe unittest
{ {
auto node = Node([1,2,3]); auto node = Node([1,2,3]);
@ -892,17 +891,19 @@ struct Node
*/ */
ref inout(Node) opIndex(T)(T index) inout @safe ref inout(Node) opIndex(T)(T index) inout @safe
{ {
if(isSequence) final switch (nodeID)
{ {
case NodeID.sequence:
checkSequenceIndex(index); checkSequenceIndex(index);
static if(isIntegral!T) static if(isIntegral!T)
{ {
return getValue!(Node[])[index]; return getValue!(Node[])[index];
} }
assert(false); else
}
else if(isMapping)
{ {
assert(false, "Only integers may index sequence nodes");
}
case NodeID.mapping:
auto idx = findPair(index); auto idx = findPair(index);
if(idx >= 0) if(idx >= 0)
{ {
@ -911,9 +912,11 @@ struct Node
string msg = "Mapping index not found" ~ (isSomeString!T ? ": " ~ to!string(index) : ""); string msg = "Mapping index not found" ~ (isSomeString!T ? ": " ~ to!string(index) : "");
throw new NodeException(msg, startMark_); throw new NodeException(msg, startMark_);
} case NodeID.scalar:
case NodeID.invalid:
throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_); throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_);
} }
}
/// ///
@safe unittest @safe unittest
{ {
@ -1079,9 +1082,9 @@ struct Node
*/ */
void opIndexAssign(K, V)(V value, K index) void opIndexAssign(K, V)(V value, K index)
{ {
if(isSequence()) final switch (nodeID)
{ {
// This ensures K is integral. case NodeID.sequence:
checkSequenceIndex(index); checkSequenceIndex(index);
static if(isIntegral!K || is(Unqual!K == bool)) static if(isIntegral!K || is(Unqual!K == bool))
{ {
@ -1091,10 +1094,8 @@ struct Node
setValue(nodes); setValue(nodes);
return; return;
} }
assert(false); assert(false, "Only integers may index sequence nodes");
} case NodeID.mapping:
else if(isMapping())
{
const idx = findPair(index); const idx = findPair(index);
if(idx < 0){add(index, value);} if(idx < 0){add(index, value);}
else else
@ -1105,10 +1106,11 @@ struct Node
setValue(pairs); setValue(pairs);
} }
return; return;
} case NodeID.scalar:
case NodeID.invalid:
throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_); throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_);
} }
}
@safe unittest @safe unittest
{ {
with(Node([1, 2, 3, 4, 3])) with(Node([1, 2, 3, 4, 3]))
@ -1215,14 +1217,14 @@ struct Node
} }
auto sequence() auto sequence()
{ {
enforce(isSequence, enforce(nodeID == NodeID.sequence,
new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node", new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
return Range!(Node[])(get!(Node[])); return Range!(Node[])(get!(Node[]));
} }
auto sequence() const auto sequence() const
{ {
enforce(isSequence, enforce(nodeID == NodeID.sequence,
new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node", new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
return Range!(const(Node)[])(get!(Node[])); return Range!(const(Node)[])(get!(Node[]));
@ -1305,14 +1307,14 @@ struct Node
auto mapping() auto mapping()
{ {
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to 'mapping'-iterate over a " new NodeException("Trying to 'mapping'-iterate over a "
~ nodeTypeString ~ " node", startMark_)); ~ nodeTypeString ~ " node", startMark_));
return Range!(Node.Pair[])(get!(Node.Pair[])); return Range!(Node.Pair[])(get!(Node.Pair[]));
} }
auto mapping() const auto mapping() const
{ {
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to 'mapping'-iterate over a " new NodeException("Trying to 'mapping'-iterate over a "
~ nodeTypeString ~ " node", startMark_)); ~ nodeTypeString ~ " node", startMark_));
return Range!(const(Node.Pair)[])(get!(Node.Pair[])); return Range!(const(Node.Pair)[])(get!(Node.Pair[]));
@ -1350,7 +1352,7 @@ struct Node
*/ */
auto mappingKeys(K = Node)() const auto mappingKeys(K = Node)() const
{ {
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to 'mappingKeys'-iterate over a " new NodeException("Trying to 'mappingKeys'-iterate over a "
~ nodeTypeString ~ " node", startMark_)); ~ nodeTypeString ~ " node", startMark_));
static if (is(Unqual!K == Node)) static if (is(Unqual!K == Node))
@ -1382,7 +1384,7 @@ struct Node
*/ */
auto mappingValues(V = Node)() const auto mappingValues(V = Node)() const
{ {
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to 'mappingValues'-iterate over a " new NodeException("Trying to 'mappingValues'-iterate over a "
~ nodeTypeString ~ " node", startMark_)); ~ nodeTypeString ~ " node", startMark_));
static if (is(Unqual!V == Node)) static if (is(Unqual!V == Node))
@ -1415,7 +1417,7 @@ struct Node
*/ */
int opApply(D)(D dg) if (isDelegate!D && (Parameters!D.length == 1)) int opApply(D)(D dg) if (isDelegate!D && (Parameters!D.length == 1))
{ {
enforce(isSequence, enforce(nodeID == NodeID.sequence,
new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node", new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
@ -1438,7 +1440,7 @@ struct Node
/// ditto /// ditto
int opApply(D)(D dg) const if (isDelegate!D && (Parameters!D.length == 1)) int opApply(D)(D dg) const if (isDelegate!D && (Parameters!D.length == 1))
{ {
enforce(isSequence, enforce(nodeID == NodeID.sequence,
new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node", new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
@ -1529,7 +1531,7 @@ struct Node
{ {
alias K = Parameters!DG[0]; alias K = Parameters!DG[0];
alias V = Parameters!DG[1]; alias V = Parameters!DG[1];
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node", new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
@ -1566,7 +1568,7 @@ struct Node
{ {
alias K = Parameters!DG[0]; alias K = Parameters!DG[0];
alias V = Parameters!DG[1]; alias V = Parameters!DG[1];
enforce(isMapping, enforce(nodeID == NodeID.mapping,
new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node", new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node",
startMark_)); startMark_));
@ -1703,7 +1705,7 @@ struct Node
{ {
setValue(Node[].init); setValue(Node[].init);
} }
enforce(isSequence(), enforce(nodeID == NodeID.sequence,
new NodeException("Trying to add an element to a " ~ nodeTypeString ~ " node", startMark_)); new NodeException("Trying to add an element to a " ~ nodeTypeString ~ " node", startMark_));
auto nodes = get!(Node[])(); auto nodes = get!(Node[])();
@ -1754,7 +1756,7 @@ struct Node
{ {
setValue(Node.Pair[].init); setValue(Node.Pair[].init);
} }
enforce(isMapping(), enforce(nodeID == NodeID.mapping,
new NodeException("Trying to add a key-value pair to a " ~ new NodeException("Trying to add a key-value pair to a " ~
nodeTypeString ~ " node", nodeTypeString ~ " node",
startMark_)); startMark_));
@ -1802,7 +1804,7 @@ struct Node
inout(Node*) opBinaryRight(string op, K)(K key) inout inout(Node*) opBinaryRight(string op, K)(K key) inout
if (op == "in") if (op == "in")
{ {
enforce(isMapping, new NodeException("Trying to use 'in' on a " ~ enforce(nodeID == NodeID.mapping, new NodeException("Trying to use 'in' on a " ~
nodeTypeString ~ " node", startMark_)); nodeTypeString ~ " node", startMark_));
auto idx = findPair(key); auto idx = findPair(key);
@ -1943,7 +1945,7 @@ struct Node
if(!v1){return v2 ? -1 : 0;} if(!v1){return v2 ? -1 : 0;}
if(!v2){return 1;} if(!v2){return 1;}
const typeCmp = cmp(newType, rhs.newType); const typeCmp = cmp(type, rhs.type);
if(typeCmp != 0){return typeCmp;} if(typeCmp != 0){return typeCmp;}
static int compareCollections(T)(const ref Node lhs, const ref Node rhs) static int compareCollections(T)(const ref Node lhs, const ref Node rhs)
@ -1964,38 +1966,25 @@ struct Node
return 0; return 0;
} }
if(isSequence){return compareCollections!(Node[])(this, rhs);} final switch(type)
if(isMapping) {return compareCollections!(Pair[])(this, rhs);}
if(isString)
{ {
case NodeType.string:
return std.algorithm.cmp(getValue!string, return std.algorithm.cmp(getValue!string,
rhs.getValue!string); rhs.getValue!string);
} case NodeType.integer:
if(isInt)
{
return cmp(getValue!long, rhs.getValue!long); return cmp(getValue!long, rhs.getValue!long);
} case NodeType.boolean:
if(isBool)
{
const b1 = getValue!bool; const b1 = getValue!bool;
const b2 = rhs.getValue!bool; const b2 = rhs.getValue!bool;
return b1 ? b2 ? 0 : 1 return b1 ? b2 ? 0 : 1
: b2 ? -1 : 0; : b2 ? -1 : 0;
} case NodeType.binary:
if(isBinary)
{
const b1 = getValue!(ubyte[]); const b1 = getValue!(ubyte[]);
const b2 = rhs.getValue!(ubyte[]); const b2 = rhs.getValue!(ubyte[]);
return std.algorithm.cmp(b1, b2); return std.algorithm.cmp(b1, b2);
} case NodeType.null_:
if(isNull)
{
return 0; return 0;
} case NodeType.decimal:
// Floats need special handling for NaNs .
// We consider NaN to be lower than any float.
if(isFloat)
{
const r1 = getValue!real; const r1 = getValue!real;
const r2 = rhs.getValue!real; const r2 = rhs.getValue!real;
if(isNaN(r1)) if(isNaN(r1))
@ -2012,14 +2001,19 @@ struct Node
return 0; return 0;
} }
return cmp(r1, r2); return cmp(r1, r2);
} case NodeType.timestamp:
else if(isTime)
{
const t1 = getValue!SysTime; const t1 = getValue!SysTime;
const t2 = rhs.getValue!SysTime; const t2 = rhs.getValue!SysTime;
return cmp(t1, t2); return cmp(t1, t2);
case NodeType.mapping:
return compareCollections!(Pair[])(this, rhs);
case NodeType.sequence:
return compareCollections!(Node[])(this, rhs);
case NodeType.merge:
assert(false, "Cannot compare merge nodes");
case NodeType.invalid:
assert(false, "Cannot compare invalid nodes");
} }
assert(false, "Unknown type of node for comparison : " ~ type.toString());
} }
// Ensure opCmp is symmetric for collections // Ensure opCmp is symmetric for collections
@ -2054,56 +2048,8 @@ struct Node
assert(Node(42).toHash() != Node(42, "some-tag").toHash()); assert(Node(42).toHash() != Node(42, "some-tag").toHash());
} }
package: /// Get type of the node value.
@property NodeType type() const @safe nothrow
// Get a string representation of the node tree. Used for debugging.
//
// Params: level = Level of the node in the tree.
//
// Returns: String representing the node tree.
@property string debugString(uint level = 0) const @safe
{
string indent;
foreach(i; 0 .. level){indent ~= " ";}
if(!isValid){return indent ~ "invalid";}
if(isSequence)
{
string result = indent ~ "sequence:\n";
foreach(ref node; get!(Node[]))
{
result ~= node.debugString(level + 1);
}
return result;
}
if(isMapping)
{
string result = indent ~ "mapping:\n";
foreach(ref pair; get!(Node.Pair[]))
{
result ~= indent ~ " pair\n";
result ~= pair.key.debugString(level + 2);
result ~= pair.value.debugString(level + 2);
}
return result;
}
if(isScalar)
{
return indent ~ "scalar(" ~
(convertsTo!string ? get!string : type.toString()) ~ ")\n";
}
assert(false);
}
// Get type of the node value.
@property TypeInfo type() const @safe nothrow
{
return value_.type;
}
// Get type of the node value.
@property NodeType newType() const @safe nothrow
{ {
if (value_.type is typeid(bool)) if (value_.type is typeid(bool))
{ {
@ -2145,49 +2091,88 @@ struct Node
{ {
return NodeType.decimal; return NodeType.decimal;
} }
else if (!value_.hasValue)
{
return NodeType.invalid;
}
else assert(0, text(value_.type)); else assert(0, text(value_.type));
} }
public: /// Get the kind of node this is.
// Determine if the value stored by the node is of specified type. @property NodeID nodeID() const @safe nothrow
{
final switch (type)
{
case NodeType.sequence:
return NodeID.sequence;
case NodeType.mapping:
return NodeID.mapping;
case NodeType.boolean:
case NodeType.integer:
case NodeType.binary:
case NodeType.string:
case NodeType.timestamp:
case NodeType.null_:
case NodeType.merge:
case NodeType.decimal:
return NodeID.scalar;
case NodeType.invalid:
return NodeID.invalid;
}
}
package:
// Get a string representation of the node tree. Used for debugging.
// //
// This only works for default YAML types, not for user defined types. // Params: level = Level of the node in the tree.
@property bool isType(T)() const //
// Returns: String representing the node tree.
@property string debugString(uint level = 0) const @safe
{ {
return this.type is typeid(Unqual!T); string indent;
foreach(i; 0 .. level){indent ~= " ";}
final switch (nodeID)
{
case NodeID.invalid:
return indent ~ "invalid";
case NodeID.sequence:
string result = indent ~ "sequence:\n";
foreach(ref node; get!(Node[]))
{
result ~= node.debugString(level + 1);
}
return result;
case NodeID.mapping:
string result = indent ~ "mapping:\n";
foreach(ref pair; get!(Node.Pair[]))
{
result ~= indent ~ " pair\n";
result ~= pair.key.debugString(level + 2);
result ~= pair.value.debugString(level + 2);
}
return result;
case NodeID.scalar:
return indent ~ "scalar(" ~
(convertsTo!string ? get!string : text(type)) ~ ")\n";
}
} }
// Is the value a bool?
alias isBool = isType!bool;
// Is the value a raw binary buffer? public:
alias isBinary = isType!(ubyte[]);
// Is the value an integer?
alias isInt = isType!long;
// Is the value a floating point number?
alias isFloat = isType!real;
// Is the value a string?
alias isString = isType!string;
// Is the value a timestamp?
alias isTime = isType!SysTime;
// Does given node have the same type as this node?
bool hasEqualType(const ref Node node) const @safe
{
return this.type is node.type;
}
// Return a string describing node type (sequence, mapping or scalar)
@property string nodeTypeString() const @safe nothrow @property string nodeTypeString() const @safe nothrow
{ {
assert(isScalar || isSequence || isMapping, "Unknown node type"); final switch (nodeID)
return isScalar ? "scalar" : {
isSequence ? "sequence" : case NodeID.mapping:
isMapping ? "mapping" : ""; return "mapping";
case NodeID.sequence:
return "sequence";
case NodeID.scalar:
return "scalar";
case NodeID.invalid:
return "invalid";
}
} }
// Determine if the value can be converted to specified type. // Determine if the value can be converted to specified type.
@ -2197,9 +2182,9 @@ struct Node
// Every type allowed in Value should be convertible to string. // Every type allowed in Value should be convertible to string.
static if(isSomeString!T) {return true;} static if(isSomeString!T) {return true;}
else static if(isFloatingPoint!T){return isInt() || isFloat();} else static if(isFloatingPoint!T){return type.among!(NodeType.integer, NodeType.decimal);}
else static if(isIntegral!T) {return isInt();} else static if(isIntegral!T) {return type == NodeType.integer;}
else static if(is(Unqual!T==bool)){return isBool();} else static if(is(Unqual!T==bool)){return type == NodeType.boolean;}
else {return false;} else {return false;}
} }
/** /**
@ -2209,14 +2194,14 @@ struct Node
*/ */
void setStyle(CollectionStyle style) @safe void setStyle(CollectionStyle style) @safe
{ {
enforce(!isValid || isSequence || isMapping, new NodeException( enforce(!isValid || (nodeID.among(NodeID.mapping, NodeID.sequence)), new NodeException(
"Cannot set collection style for non-collection nodes", startMark_)); "Cannot set collection style for non-collection nodes", startMark_));
collectionStyle = style; collectionStyle = style;
} }
/// Ditto /// Ditto
void setStyle(ScalarStyle style) @safe void setStyle(ScalarStyle style) @safe
{ {
enforce(!isValid || (!isSequence && !isMapping), new NodeException( enforce(!isValid || (nodeID == NodeID.scalar), new NodeException(
"Cannot set scalar style for non-scalar nodes", startMark_)); "Cannot set scalar style for non-scalar nodes", startMark_));
scalarStyle = style; scalarStyle = style;
} }
@ -2303,10 +2288,23 @@ struct Node
} }
private: private:
// Determine if the value stored by the node is of specified type.
//
// This only works for default YAML types, not for user defined types.
@property bool isType(T)() const
{
return value_.type is typeid(Unqual!T);
}
// Implementation of contains() and containsKey(). // Implementation of contains() and containsKey().
bool contains_(T, Flag!"key" key, string func)(T rhs) const bool contains_(T, Flag!"key" key, string func)(T rhs) const
{ {
static if(!key) if(isSequence) final switch (nodeID)
{
case NodeID.mapping:
return findPair!(T, key)(rhs) >= 0;
case NodeID.sequence:
static if(!key)
{ {
foreach(ref node; getValue!(Node[])) foreach(ref node; getValue!(Node[]))
{ {
@ -2314,23 +2312,22 @@ struct Node
} }
return false; return false;
} }
else
if(isMapping)
{ {
return findPair!(T, key)(rhs) >= 0;
}
throw new NodeException("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node", throw new NodeException("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node",
startMark_); startMark_);
} }
case NodeID.scalar:
case NodeID.invalid:
throw new NodeException("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node",
startMark_);
}
}
// Implementation of remove() and removeAt() // Implementation of remove() and removeAt()
void remove_(T, Flag!"key" key, string func)(T rhs) void remove_(T, Flag!"key" key, string func)(T rhs)
{ {
enforce(isSequence || isMapping,
new NodeException("Trying to " ~ func ~ "() from a " ~ nodeTypeString ~ " node",
startMark_));
static void removeElem(E, I)(ref Node node, I index) static void removeElem(E, I)(ref Node node, I index)
{ {
auto elems = node.getValue!(E[]); auto elems = node.getValue!(E[]);
@ -2339,8 +2336,13 @@ struct Node
node.setValue(elems); node.setValue(elems);
} }
if(isSequence()) final switch (nodeID)
{ {
case NodeID.mapping:
const index = findPair!(T, key)(rhs);
if(index >= 0){removeElem!Pair(this, index);}
break;
case NodeID.sequence:
static long getIndex(ref Node node, ref T rhs) static long getIndex(ref Node node, ref T rhs)
{ {
foreach(idx, ref elem; node.get!(Node[])) foreach(idx, ref elem; node.get!(Node[]))
@ -2358,13 +2360,12 @@ struct Node
// This throws if the index is not integral. // This throws if the index is not integral.
checkSequenceIndex(index); checkSequenceIndex(index);
static if(isIntegral!(typeof(index))){removeElem!Node(this, index);} static if(isIntegral!(typeof(index))){removeElem!Node(this, index); break; }
else {assert(false, "Non-integral sequence index");} else {assert(false, "Non-integral sequence index");}
} case NodeID.scalar:
else if(isMapping()) case NodeID.invalid:
{ throw new NodeException("Trying to " ~ func ~ "() from a " ~ nodeTypeString ~ " node",
const index = findPair!(T, key)(rhs); startMark_);
if(index >= 0){removeElem!Pair(this, index);}
} }
} }
@ -2380,10 +2381,10 @@ struct Node
else {node = &pair.value;} else {node = &pair.value;}
const bool typeMatch = (isFloatingPoint!T && (node.isInt || node.isFloat)) || const bool typeMatch = (isFloatingPoint!T && (node.type.among!(NodeType.integer, NodeType.decimal))) ||
(isIntegral!T && node.isInt) || (isIntegral!T && node.type == NodeType.integer) ||
(is(Unqual!T==bool) && node.isBool) || (is(Unqual!T==bool) && node.type == NodeType.boolean) ||
(isSomeString!T && node.isString) || (isSomeString!T && node.type == NodeType.string) ||
(node.isType!T); (node.isType!T);
if(typeMatch && *node == index) if(typeMatch && *node == index)
{ {
@ -2396,7 +2397,7 @@ struct Node
// Check if index is integral and in range. // Check if index is integral and in range.
void checkSequenceIndex(T)(T index) const void checkSequenceIndex(T)(T index) const
{ {
assert(isSequence, assert(nodeID == NodeID.sequence,
"checkSequenceIndex() called on a " ~ nodeTypeString ~ " node"); "checkSequenceIndex() called on a " ~ nodeTypeString ~ " node");
static if(!isIntegral!T) static if(!isIntegral!T)

View file

@ -43,7 +43,8 @@ class RepresenterException : YAMLException
Node representData(const Node data, ScalarStyle defaultScalarStyle, CollectionStyle defaultCollectionStyle) @safe Node representData(const Node data, ScalarStyle defaultScalarStyle, CollectionStyle defaultCollectionStyle) @safe
{ {
Node result; Node result;
final switch(data.newType) { final switch(data.type)
{
case NodeType.null_: case NodeType.null_:
result = representNull(); result = representNull();
break; break;
@ -73,17 +74,26 @@ Node representData(const Node data, ScalarStyle defaultScalarStyle, CollectionSt
case NodeType.sequence: case NodeType.sequence:
result = representNodes(data, defaultScalarStyle, defaultCollectionStyle); result = representNodes(data, defaultScalarStyle, defaultCollectionStyle);
break; break;
case NodeType.invalid:
assert(0);
} }
if (result.isScalar && (result.scalarStyle == ScalarStyle.invalid)) final switch (result.nodeID)
{
case NodeID.scalar:
if (result.scalarStyle == ScalarStyle.invalid)
{ {
result.scalarStyle = defaultScalarStyle; result.scalarStyle = defaultScalarStyle;
} }
break;
if ((result.isSequence || result.isMapping) && (defaultCollectionStyle != CollectionStyle.invalid)) case NodeID.sequence, NodeID.mapping:
if (defaultCollectionStyle != CollectionStyle.invalid)
{ {
result.collectionStyle = defaultCollectionStyle; result.collectionStyle = defaultCollectionStyle;
} }
case NodeID.invalid:
}
//Override tag if specified. //Override tag if specified.
if(data.tag_ !is null){result.tag_ = data.tag_;} if(data.tag_ !is null){result.tag_ = data.tag_;}
@ -367,7 +377,7 @@ Node representNodes(const Node node, ScalarStyle defaultScalarStyle, CollectionS
foreach(idx, item; nodes) foreach(idx, item; nodes)
{ {
value[idx] = representData(item, defaultScalarStyle, defaultCollectionStyle); value[idx] = representData(item, defaultScalarStyle, defaultCollectionStyle);
const isScalar = value[idx].isScalar; const isScalar = value[idx].nodeID == NodeID.scalar;
const s = value[idx].scalarStyle; const s = value[idx].scalarStyle;
if(!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain)) if(!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain))
{ {
@ -383,14 +393,14 @@ Node representNodes(const Node node, ScalarStyle defaultScalarStyle, CollectionS
bool shouldUseBlockStyle(const Node value) @safe bool shouldUseBlockStyle(const Node value) @safe
{ {
const isScalar = value.isScalar; const isScalar = value.nodeID == NodeID.scalar;
const s = value.scalarStyle; const s = value.scalarStyle;
return (!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain)); return (!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain));
} }
bool shouldUseBlockStyle(const Node.Pair value) @safe bool shouldUseBlockStyle(const Node.Pair value) @safe
{ {
const keyScalar = value.key.isScalar; const keyScalar = value.key.nodeID == NodeID.scalar;
const valScalar = value.value.isScalar; const valScalar = value.value.nodeID == NodeID.scalar;
const keyStyle = value.key.scalarStyle; const keyStyle = value.key.scalarStyle;
const valStyle = value.value.scalarStyle; const valStyle = value.value.scalarStyle;
if(!keyScalar || if(!keyScalar ||

View file

@ -113,15 +113,34 @@ struct Serializer(Range, CharType)
*/ */
static bool anchorable(ref Node node) @safe static bool anchorable(ref Node node) @safe
{ {
if(node.isScalar) if(node.nodeID == NodeID.scalar)
{ {
return node.isType!string ? node.as!string.length > 64 : return (node.type == NodeType.string) ? node.as!string.length > 64 :
node.isType!(ubyte[]) ? node.as!(ubyte[]).length > 64: (node.type == NodeType.binary) ? node.as!(ubyte[]).length > 64 :
false; false;
} }
return node.length > 2; return node.length > 2;
} }
@safe unittest
{
import std.string : representation;
auto shortString = "not much";
auto longString = "A fairly long string that would be a good idea to add an anchor to";
auto node1 = Node(shortString);
auto node2 = Node(shortString.representation.dup);
auto node3 = Node(longString);
auto node4 = Node(longString.representation.dup);
auto node5 = Node([node1]);
auto node6 = Node([node1, node2, node3, node4]);
assert(!anchorable(node1));
assert(!anchorable(node2));
assert(anchorable(node3));
assert(anchorable(node4));
assert(!anchorable(node5));
assert(anchorable(node6));
}
///Add an anchor to the node if it's anchorable and not anchored yet. ///Add an anchor to the node if it's anchorable and not anchored yet.
void anchorNode(ref Node node) @safe void anchorNode(ref Node node) @safe
{ {
@ -137,15 +156,25 @@ struct Serializer(Range, CharType)
} }
anchors_[node] = null; anchors_[node] = null;
if(node.isSequence) foreach(ref Node item; node) final switch (node.nodeID)
{ {
anchorNode(item); case NodeID.mapping:
} foreach(ref Node key, ref Node value; node)
else if(node.isMapping) foreach(ref Node key, ref Node value; node)
{ {
anchorNode(key); anchorNode(key);
anchorNode(value); anchorNode(value);
} }
break;
case NodeID.sequence:
foreach(ref Node item; node)
{
anchorNode(item);
}
break;
case NodeID.invalid:
assert(0);
case NodeID.scalar:
}
} }
///Generate and return a new anchor. ///Generate and return a new anchor.
@ -174,33 +203,9 @@ struct Serializer(Range, CharType)
} }
serializedNodes_[node] = true; serializedNodes_[node] = true;
} }
final switch (node.nodeID)
if(node.isScalar)
{
assert(node.isType!string, "Scalar node type must be string before serialized");
auto value = node.as!string;
const detectedTag = resolver_.resolve(NodeID.scalar, null, value, true);
const bool isDetected = node.tag_ == detectedTag;
emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_,
isDetected, value, node.scalarStyle));
return;
}
if(node.isSequence)
{
const defaultTag = resolver_.defaultSequenceTag;
const implicit = node.tag_ == defaultTag;
emitter_.emit(sequenceStartEvent(Mark(), Mark(), aliased, node.tag_,
implicit, node.collectionStyle));
foreach(ref Node item; node)
{
serializeNode(item);
}
emitter_.emit(sequenceEndEvent(Mark(), Mark()));
return;
}
if(node.isMapping)
{ {
case NodeID.mapping:
const defaultTag = resolver_.defaultMappingTag; const defaultTag = resolver_.defaultMappingTag;
const implicit = node.tag_ == defaultTag; const implicit = node.tag_ == defaultTag;
emitter_.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag_, emitter_.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag_,
@ -212,7 +217,28 @@ struct Serializer(Range, CharType)
} }
emitter_.emit(mappingEndEvent(Mark(), Mark())); emitter_.emit(mappingEndEvent(Mark(), Mark()));
return; return;
case NodeID.sequence:
const defaultTag = resolver_.defaultSequenceTag;
const implicit = node.tag_ == defaultTag;
emitter_.emit(sequenceStartEvent(Mark(), Mark(), aliased, node.tag_,
implicit, node.collectionStyle));
foreach(ref Node item; node)
{
serializeNode(item);
}
emitter_.emit(sequenceEndEvent(Mark(), Mark()));
return;
case NodeID.scalar:
assert(node.type == NodeType.string, "Scalar node type must be string before serialized");
auto value = node.as!string;
const detectedTag = resolver_.resolve(NodeID.scalar, null, value, true);
const bool isDetected = node.tag_ == detectedTag;
emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_,
isDetected, value, node.scalarStyle));
return;
case NodeID.invalid:
assert(0);
} }
assert(false, "This code should never be reached");
} }
} }

View file

@ -39,10 +39,10 @@ void testImplicitResolver(string dataFilename, string detectFilename) @safe
correctTag = readText(detectFilename).strip(); correctTag = readText(detectFilename).strip();
node = Loader.fromFile(dataFilename).load(); node = Loader.fromFile(dataFilename).load();
assert(node.isSequence); assert(node.nodeID == NodeID.sequence);
foreach(ref Node scalar; node) foreach(ref Node scalar; node)
{ {
assert(scalar.isScalar); assert(scalar.nodeID == NodeID.scalar);
assert(scalar.tag == correctTag); assert(scalar.tag == correctTag);
} }
} }