Rangify parser (#176)
Rangify parser merged-on-behalf-of: BBasile <BBasile@users.noreply.github.com>
This commit is contained in:
parent
26eb0913f1
commit
040d19b9bc
|
@ -96,21 +96,18 @@ final class Composer
|
||||||
*/
|
*/
|
||||||
bool checkNode() @safe
|
bool checkNode() @safe
|
||||||
{
|
{
|
||||||
//Drop the STREAM-START event.
|
// If next event is stream start, skip it
|
||||||
if(parser_.checkEvent(EventID.StreamStart))
|
parser_.skipOver!"a.id == b"(EventID.StreamStart);
|
||||||
{
|
|
||||||
parser_.getEvent();
|
|
||||||
}
|
|
||||||
|
|
||||||
//True if there are more documents available.
|
//True if there are more documents available.
|
||||||
return !parser_.checkEvent(EventID.StreamEnd);
|
return parser_.front.id != EventID.StreamEnd;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Get a YAML document as a node (the root of the document).
|
///Get a YAML document as a node (the root of the document).
|
||||||
Node getNode() @safe
|
Node getNode() @safe
|
||||||
{
|
{
|
||||||
//Get the root node of the next document.
|
//Get the root node of the next document.
|
||||||
assert(!parser_.checkEvent(EventID.StreamEnd),
|
assert(parser_.front.id != EventID.StreamEnd,
|
||||||
"Trying to get a node from Composer when there is no node to " ~
|
"Trying to get a node from Composer when there is no node to " ~
|
||||||
"get. use checkNode() to determine if there is a node.");
|
"get. use checkNode() to determine if there is a node.");
|
||||||
|
|
||||||
|
@ -120,25 +117,31 @@ final class Composer
|
||||||
///Get single YAML document, throwing if there is more than one document.
|
///Get single YAML document, throwing if there is more than one document.
|
||||||
Node getSingleNode() @safe
|
Node getSingleNode() @safe
|
||||||
{
|
{
|
||||||
assert(!parser_.checkEvent(EventID.StreamEnd),
|
assert(parser_.front.id != EventID.StreamEnd,
|
||||||
"Trying to get a node from Composer when there is no node to " ~
|
"Trying to get a node from Composer when there is no node to " ~
|
||||||
"get. use checkNode() to determine if there is a node.");
|
"get. use checkNode() to determine if there is a node.");
|
||||||
|
|
||||||
Node document = composeDocument();
|
Node document = composeDocument();
|
||||||
|
|
||||||
//Ensure that the stream contains no more documents.
|
//Ensure that the stream contains no more documents.
|
||||||
enforce(parser_.checkEvent(EventID.StreamEnd),
|
enforce(parser_.front.id == EventID.StreamEnd,
|
||||||
new ComposerException("Expected single document in the stream, " ~
|
new ComposerException("Expected single document in the stream, " ~
|
||||||
"but found another document.",
|
"but found another document.",
|
||||||
parser_.getEvent().startMark));
|
parser_.front.startMark));
|
||||||
|
|
||||||
//Drop the STREAM-END event.
|
skipExpected(EventID.StreamEnd);
|
||||||
parser_.getEvent();
|
assert(parser_.empty, "Found event after stream end");
|
||||||
|
|
||||||
return document;
|
return document;
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
|
void skipExpected(const EventID id) @safe
|
||||||
|
{
|
||||||
|
const foundExpected = parser_.skipOver!"a.id == b"(id);
|
||||||
|
assert(foundExpected, text("Expected ", id, " not found."));
|
||||||
|
}
|
||||||
///Ensure that appenders for specified nesting levels exist.
|
///Ensure that appenders for specified nesting levels exist.
|
||||||
///
|
///
|
||||||
///Params: pairAppenderLevel = Current level in the pair appender stack.
|
///Params: pairAppenderLevel = Current level in the pair appender stack.
|
||||||
|
@ -159,14 +162,12 @@ final class Composer
|
||||||
///Compose a YAML document and return its root node.
|
///Compose a YAML document and return its root node.
|
||||||
Node composeDocument() @safe
|
Node composeDocument() @safe
|
||||||
{
|
{
|
||||||
//Drop the DOCUMENT-START event.
|
skipExpected(EventID.DocumentStart);
|
||||||
parser_.getEvent();
|
|
||||||
|
|
||||||
//Compose the root node.
|
//Compose the root node.
|
||||||
Node node = composeNode(0, 0);
|
Node node = composeNode(0, 0);
|
||||||
|
|
||||||
//Drop the DOCUMENT-END event.
|
skipExpected(EventID.DocumentEnd);
|
||||||
parser_.getEvent();
|
|
||||||
|
|
||||||
anchors_.destroy();
|
anchors_.destroy();
|
||||||
return node;
|
return node;
|
||||||
|
@ -178,9 +179,10 @@ final class Composer
|
||||||
/// nodeAppenderLevel = Current level of the node appender stack.
|
/// nodeAppenderLevel = Current level of the node appender stack.
|
||||||
Node composeNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe
|
Node composeNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe
|
||||||
{
|
{
|
||||||
if(parser_.checkEvent(EventID.Alias))
|
if(parser_.front.id == EventID.Alias)
|
||||||
{
|
{
|
||||||
const event = parser_.getEvent();
|
const event = parser_.front;
|
||||||
|
parser_.popFront();
|
||||||
const anchor = event.anchor;
|
const anchor = event.anchor;
|
||||||
enforce((anchor in anchors_) !is null,
|
enforce((anchor in anchors_) !is null,
|
||||||
new ComposerException("Found undefined alias: " ~ anchor,
|
new ComposerException("Found undefined alias: " ~ anchor,
|
||||||
|
@ -196,7 +198,7 @@ final class Composer
|
||||||
return anchors_[anchor];
|
return anchors_[anchor];
|
||||||
}
|
}
|
||||||
|
|
||||||
const event = parser_.peekEvent();
|
const event = parser_.front;
|
||||||
const anchor = event.anchor;
|
const anchor = event.anchor;
|
||||||
if((anchor !is null) && (anchor in anchors_) !is null)
|
if((anchor !is null) && (anchor in anchors_) !is null)
|
||||||
{
|
{
|
||||||
|
@ -212,19 +214,19 @@ final class Composer
|
||||||
anchors_[anchor] = Node();
|
anchors_[anchor] = Node();
|
||||||
}
|
}
|
||||||
|
|
||||||
if(parser_.checkEvent(EventID.Scalar))
|
switch (parser_.front.id)
|
||||||
{
|
{
|
||||||
result = composeScalarNode();
|
case EventID.Scalar:
|
||||||
|
result = composeScalarNode();
|
||||||
|
break;
|
||||||
|
case EventID.SequenceStart:
|
||||||
|
result = composeSequenceNode(pairAppenderLevel, nodeAppenderLevel);
|
||||||
|
break;
|
||||||
|
case EventID.MappingStart:
|
||||||
|
result = composeMappingNode(pairAppenderLevel, nodeAppenderLevel);
|
||||||
|
break;
|
||||||
|
default: assert(false, "This code should never be reached");
|
||||||
}
|
}
|
||||||
else if(parser_.checkEvent(EventID.SequenceStart))
|
|
||||||
{
|
|
||||||
result = composeSequenceNode(pairAppenderLevel, nodeAppenderLevel);
|
|
||||||
}
|
|
||||||
else if(parser_.checkEvent(EventID.MappingStart))
|
|
||||||
{
|
|
||||||
result = composeMappingNode(pairAppenderLevel, nodeAppenderLevel);
|
|
||||||
}
|
|
||||||
else{assert(false, "This code should never be reached");}
|
|
||||||
|
|
||||||
if(anchor !is null)
|
if(anchor !is null)
|
||||||
{
|
{
|
||||||
|
@ -236,7 +238,8 @@ final class Composer
|
||||||
///Compose a scalar node.
|
///Compose a scalar node.
|
||||||
Node composeScalarNode() @safe
|
Node composeScalarNode() @safe
|
||||||
{
|
{
|
||||||
const event = parser_.getEvent();
|
const event = parser_.front;
|
||||||
|
parser_.popFront();
|
||||||
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value,
|
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value,
|
||||||
event.implicit);
|
event.implicit);
|
||||||
|
|
||||||
|
@ -256,17 +259,19 @@ final class Composer
|
||||||
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
|
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
|
||||||
auto nodeAppender = &(nodeAppenders_[nodeAppenderLevel]);
|
auto nodeAppender = &(nodeAppenders_[nodeAppenderLevel]);
|
||||||
|
|
||||||
const startEvent = parser_.getEvent();
|
const startEvent = parser_.front;
|
||||||
|
parser_.popFront();
|
||||||
const tag = resolver_.resolve(NodeID.Sequence, startEvent.tag, null,
|
const tag = resolver_.resolve(NodeID.Sequence, startEvent.tag, null,
|
||||||
startEvent.implicit);
|
startEvent.implicit);
|
||||||
|
|
||||||
while(!parser_.checkEvent(EventID.SequenceEnd))
|
while(parser_.front.id != EventID.SequenceEnd)
|
||||||
{
|
{
|
||||||
nodeAppender.put(composeNode(pairAppenderLevel, nodeAppenderLevel + 1));
|
nodeAppender.put(composeNode(pairAppenderLevel, nodeAppenderLevel + 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
Node node = constructor_.node(startEvent.startMark, parser_.getEvent().endMark,
|
Node node = constructor_.node(startEvent.startMark, parser_.front.endMark,
|
||||||
tag, nodeAppender.data.dup, startEvent.collectionStyle);
|
tag, nodeAppender.data.dup, startEvent.collectionStyle);
|
||||||
|
parser_.popFront();
|
||||||
nodeAppender.clear();
|
nodeAppender.clear();
|
||||||
|
|
||||||
return node;
|
return node;
|
||||||
|
@ -351,13 +356,14 @@ final class Composer
|
||||||
@safe
|
@safe
|
||||||
{
|
{
|
||||||
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
|
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
|
||||||
const startEvent = parser_.getEvent();
|
const startEvent = parser_.front;
|
||||||
|
parser_.popFront();
|
||||||
const tag = resolver_.resolve(NodeID.Mapping, startEvent.tag, null,
|
const tag = resolver_.resolve(NodeID.Mapping, startEvent.tag, null,
|
||||||
startEvent.implicit);
|
startEvent.implicit);
|
||||||
auto pairAppender = &(pairAppenders_[pairAppenderLevel]);
|
auto pairAppender = &(pairAppenders_[pairAppenderLevel]);
|
||||||
|
|
||||||
Tuple!(Node, Mark)[] toMerge;
|
Tuple!(Node, Mark)[] toMerge;
|
||||||
while(!parser_.checkEvent(EventID.MappingEnd))
|
while(parser_.front.id != EventID.MappingEnd)
|
||||||
{
|
{
|
||||||
auto pair = Node.Pair(composeNode(pairAppenderLevel + 1, nodeAppenderLevel),
|
auto pair = Node.Pair(composeNode(pairAppenderLevel + 1, nodeAppenderLevel),
|
||||||
composeNode(pairAppenderLevel + 1, nodeAppenderLevel));
|
composeNode(pairAppenderLevel + 1, nodeAppenderLevel));
|
||||||
|
@ -365,7 +371,7 @@ final class Composer
|
||||||
//Need to flatten and merge the node referred by YAMLMerge.
|
//Need to flatten and merge the node referred by YAMLMerge.
|
||||||
if(pair.key.isType!YAMLMerge)
|
if(pair.key.isType!YAMLMerge)
|
||||||
{
|
{
|
||||||
toMerge ~= tuple(pair.value, cast(Mark)parser_.peekEvent().endMark);
|
toMerge ~= tuple(pair.value, cast(Mark)parser_.front.endMark);
|
||||||
}
|
}
|
||||||
//Not YAMLMerge, just add the pair.
|
//Not YAMLMerge, just add the pair.
|
||||||
else
|
else
|
||||||
|
@ -383,10 +389,11 @@ final class Composer
|
||||||
.uniq!((x,y) => x.key == y.key)
|
.uniq!((x,y) => x.key == y.key)
|
||||||
.walkLength;
|
.walkLength;
|
||||||
enforce(numUnique == pairAppender.data.length,
|
enforce(numUnique == pairAppender.data.length,
|
||||||
new ComposerException("Duplicate key found in mapping", parser_.getEvent().startMark));
|
new ComposerException("Duplicate key found in mapping", parser_.front.startMark));
|
||||||
|
|
||||||
Node node = constructor_.node(startEvent.startMark, parser_.getEvent().endMark,
|
Node node = constructor_.node(startEvent.startMark, parser_.front.endMark,
|
||||||
tag, pairAppender.data.dup, startEvent.collectionStyle);
|
tag, pairAppender.data.dup, startEvent.collectionStyle);
|
||||||
|
parser_.popFront();
|
||||||
|
|
||||||
pairAppender.clear();
|
pairAppender.clear();
|
||||||
return node;
|
return node;
|
||||||
|
|
|
@ -237,7 +237,8 @@ struct Dumper(Range)
|
||||||
*
|
*
|
||||||
* Throws: YAMLException if unable to emit.
|
* Throws: YAMLException if unable to emit.
|
||||||
*/
|
*/
|
||||||
void emit(CharacterType = char)(Event[] events) @safe
|
void emit(CharacterType = char, T)(T events) @safe
|
||||||
|
if (isInputRange!T && is(ElementType!T == Event))
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
|
|
@ -317,22 +317,9 @@ struct Loader
|
||||||
|
|
||||||
|
|
||||||
// Parse and return all events. Used for debugging.
|
// Parse and return all events. Used for debugging.
|
||||||
Event[] parse() @safe
|
auto parse() @safe
|
||||||
{
|
{
|
||||||
try
|
return parser_;
|
||||||
{
|
|
||||||
Event[] result;
|
|
||||||
while(parser_.checkEvent())
|
|
||||||
{
|
|
||||||
result ~= parser_.getEvent();
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
catch(YAMLException e)
|
|
||||||
{
|
|
||||||
throw new YAMLException("Unable to parse YAML from stream %s : %s "
|
|
||||||
.format(name_, e.msg));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Construct default constructor/resolver if the user has not yet specified
|
// Construct default constructor/resolver if the user has not yet specified
|
||||||
|
|
|
@ -142,78 +142,46 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the next event is one of specified types.
|
* Check if any events are left. May have side effects in some cases.
|
||||||
*
|
|
||||||
* If no types are specified, checks if any events are left.
|
|
||||||
*
|
|
||||||
* Params: ids = Event IDs to check for.
|
|
||||||
*
|
|
||||||
* Returns: true if the next event is one of specified types,
|
|
||||||
* or if there are any events left if no types specified.
|
|
||||||
* false otherwise.
|
|
||||||
*/
|
*/
|
||||||
bool checkEvent(EventID[] ids...) @safe
|
bool empty() @safe
|
||||||
{
|
{
|
||||||
//Check if the next event is one of specified types.
|
ensureState();
|
||||||
if(currentEvent_.isNull && state_ !is null)
|
return currentEvent_.isNull;
|
||||||
{
|
|
||||||
currentEvent_ = state_();
|
|
||||||
}
|
|
||||||
|
|
||||||
if(!currentEvent_.isNull)
|
|
||||||
{
|
|
||||||
if(ids.length == 0){return true;}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
const nextId = currentEvent_.id;
|
|
||||||
foreach(id; ids)
|
|
||||||
{
|
|
||||||
if(nextId == id){return true;}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the next event, but keep it in the queue.
|
* Return the current event.
|
||||||
*
|
*
|
||||||
* Must not be called if there are no events left.
|
* Must not be called if there are no events left.
|
||||||
*/
|
*/
|
||||||
Event peekEvent() @safe
|
Event front() @safe
|
||||||
{
|
{
|
||||||
if(currentEvent_.isNull && state_ !is null)
|
ensureState();
|
||||||
{
|
assert(!currentEvent_.isNull, "No event left to peek");
|
||||||
currentEvent_ = state_();
|
return currentEvent_;
|
||||||
}
|
|
||||||
if(!currentEvent_.isNull){return currentEvent_;}
|
|
||||||
assert(false, "No event left to peek");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the next event, removing it from the queue.
|
* Skip to the next event.
|
||||||
*
|
*
|
||||||
* Must not be called if there are no events left.
|
* Must not be called if there are no events left.
|
||||||
*/
|
*/
|
||||||
Event getEvent() @safe
|
void popFront() @safe
|
||||||
{
|
{
|
||||||
//Get the next event and proceed further.
|
currentEvent_.id = EventID.Invalid;
|
||||||
if(currentEvent_.isNull && state_ !is null)
|
ensureState();
|
||||||
{
|
|
||||||
currentEvent_ = state_();
|
|
||||||
}
|
|
||||||
|
|
||||||
if(!currentEvent_.isNull)
|
|
||||||
{
|
|
||||||
Event result = currentEvent_;
|
|
||||||
currentEvent_.id = EventID.Invalid;
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
assert(false, "No event left to get");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
/// If current event is invalid, load the next valid one if possible.
|
||||||
|
void ensureState() @safe
|
||||||
|
{
|
||||||
|
if(currentEvent_.isNull && state_ !is null)
|
||||||
|
{
|
||||||
|
currentEvent_ = state_();
|
||||||
|
}
|
||||||
|
}
|
||||||
///Pop and return the newest state in states_.
|
///Pop and return the newest state in states_.
|
||||||
Event delegate() @safe popState() @safe
|
Event delegate() @safe popState() @safe
|
||||||
{
|
{
|
||||||
|
|
|
@ -11,6 +11,7 @@ version(unittest)
|
||||||
{
|
{
|
||||||
|
|
||||||
import dyaml.test.common;
|
import dyaml.test.common;
|
||||||
|
import dyaml.test.emitter;
|
||||||
import dyaml.token;
|
import dyaml.token;
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,12 +24,7 @@ void testParser(string dataFilename, string canonicalFilename) @safe
|
||||||
auto dataEvents = Loader.fromFile(dataFilename).parse();
|
auto dataEvents = Loader.fromFile(dataFilename).parse();
|
||||||
auto canonicalEvents = Loader.fromFile(canonicalFilename).parse();
|
auto canonicalEvents = Loader.fromFile(canonicalFilename).parse();
|
||||||
|
|
||||||
assert(dataEvents.length == canonicalEvents.length);
|
compareEvents(dataEvents, canonicalEvents);
|
||||||
|
|
||||||
foreach(e; 0 .. dataEvents.length)
|
|
||||||
{
|
|
||||||
assert(dataEvents[e].id == canonicalEvents[e].id);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -27,15 +27,11 @@ import dyaml.token;
|
||||||
/// events2 = Second event array to compare.
|
/// events2 = Second event array to compare.
|
||||||
///
|
///
|
||||||
/// Returns: true if the events are equivalent, false otherwise.
|
/// Returns: true if the events are equivalent, false otherwise.
|
||||||
bool compareEvents(Event[] events1, Event[] events2) @safe
|
bool compareEvents(T, U)(T events1, U events2)
|
||||||
|
if (isInputRange!T && isInputRange!U && is(ElementType!T == Event) && is(ElementType!U == Event))
|
||||||
{
|
{
|
||||||
if(events1.length != events2.length){return false;}
|
foreach (e1, e2; zip(events1, events2))
|
||||||
|
|
||||||
for(uint e; e < events1.length; ++e)
|
|
||||||
{
|
{
|
||||||
auto e1 = events1[e];
|
|
||||||
auto e2 = events2[e];
|
|
||||||
|
|
||||||
//Different event types.
|
//Different event types.
|
||||||
if(e1.id != e2.id){return false;}
|
if(e1.id != e2.id){return false;}
|
||||||
//Different anchor (if applicable).
|
//Different anchor (if applicable).
|
||||||
|
|
Loading…
Reference in a new issue