range-ify Scanner
This commit is contained in:
parent
aa9b4c8c2b
commit
bc7e7f9593
|
@ -249,14 +249,14 @@ struct Loader
|
||||||
return currentNode;
|
return currentNode;
|
||||||
}
|
}
|
||||||
// Scan and return all tokens. Used for debugging.
|
// Scan and return all tokens. Used for debugging.
|
||||||
Token[] scan() @safe
|
const(Token)[] scan() @safe
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
Token[] result;
|
const(Token)[] result;
|
||||||
while(scanner_.checkToken())
|
foreach (token; scanner_)
|
||||||
{
|
{
|
||||||
result ~= scanner_.getToken();
|
result ~= token;
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -270,9 +270,12 @@ struct Loader
|
||||||
// Scan all tokens, throwing them away. Used for benchmarking.
|
// Scan all tokens, throwing them away. Used for benchmarking.
|
||||||
void scanBench() @safe
|
void scanBench() @safe
|
||||||
{
|
{
|
||||||
try while(scanner_.checkToken())
|
try
|
||||||
{
|
{
|
||||||
scanner_.getToken();
|
while(!scanner_.empty)
|
||||||
|
{
|
||||||
|
scanner_.popFront();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch(YAMLException e)
|
catch(YAMLException e)
|
||||||
{
|
{
|
||||||
|
|
|
@ -222,7 +222,8 @@ final class Parser
|
||||||
///Parse stream start.
|
///Parse stream start.
|
||||||
Event parseStreamStart() @safe
|
Event parseStreamStart() @safe
|
||||||
{
|
{
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
state_ = &parseImplicitDocumentStart;
|
state_ = &parseImplicitDocumentStart;
|
||||||
return streamStartEvent(token.startMark, token.endMark);
|
return streamStartEvent(token.startMark, token.endMark);
|
||||||
}
|
}
|
||||||
|
@ -231,11 +232,11 @@ final class Parser
|
||||||
Event parseImplicitDocumentStart() @safe
|
Event parseImplicitDocumentStart() @safe
|
||||||
{
|
{
|
||||||
// Parse an implicit document.
|
// Parse an implicit document.
|
||||||
if(!scanner_.checkToken(TokenID.directive, TokenID.documentStart,
|
if(!scanner_.front.id.among!(TokenID.directive, TokenID.documentStart,
|
||||||
TokenID.streamEnd))
|
TokenID.streamEnd))
|
||||||
{
|
{
|
||||||
tagDirectives_ = defaultTagDirectives_;
|
tagDirectives_ = defaultTagDirectives_;
|
||||||
const token = scanner_.peekToken();
|
const token = scanner_.front;
|
||||||
|
|
||||||
pushState(&parseDocumentEnd);
|
pushState(&parseDocumentEnd);
|
||||||
state_ = &parseBlockNode;
|
state_ = &parseBlockNode;
|
||||||
|
@ -249,20 +250,24 @@ final class Parser
|
||||||
Event parseDocumentStart() @trusted
|
Event parseDocumentStart() @trusted
|
||||||
{
|
{
|
||||||
//Parse any extra document end indicators.
|
//Parse any extra document end indicators.
|
||||||
while(scanner_.checkToken(TokenID.documentEnd)){scanner_.getToken();}
|
while(scanner_.front.id == TokenID.documentEnd)
|
||||||
|
{
|
||||||
|
scanner_.popFront();
|
||||||
|
}
|
||||||
|
|
||||||
//Parse an explicit document.
|
//Parse an explicit document.
|
||||||
if(!scanner_.checkToken(TokenID.streamEnd))
|
if(scanner_.front.id != TokenID.streamEnd)
|
||||||
{
|
{
|
||||||
const startMark = scanner_.peekToken().startMark;
|
const startMark = scanner_.front.startMark;
|
||||||
|
|
||||||
auto tagDirectives = processDirectives();
|
auto tagDirectives = processDirectives();
|
||||||
enforce(scanner_.checkToken(TokenID.documentStart),
|
enforce(scanner_.front.id == TokenID.documentStart,
|
||||||
new ParserException("Expected document start but found " ~
|
new ParserException("Expected document start but found " ~
|
||||||
scanner_.peekToken().idString,
|
scanner_.front.idString,
|
||||||
scanner_.peekToken().startMark));
|
scanner_.front.startMark));
|
||||||
|
|
||||||
const endMark = scanner_.getToken().endMark;
|
const endMark = scanner_.front.endMark;
|
||||||
|
scanner_.popFront();
|
||||||
pushState(&parseDocumentEnd);
|
pushState(&parseDocumentEnd);
|
||||||
state_ = &parseDocumentContent;
|
state_ = &parseDocumentContent;
|
||||||
return documentStartEvent(startMark, endMark, true, YAMLVersion_, tagDirectives);
|
return documentStartEvent(startMark, endMark, true, YAMLVersion_, tagDirectives);
|
||||||
|
@ -270,7 +275,8 @@ final class Parser
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
//Parse the end of the stream.
|
//Parse the end of the stream.
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
assert(states_.data.length == 0);
|
assert(states_.data.length == 0);
|
||||||
assert(marks_.data.length == 0);
|
assert(marks_.data.length == 0);
|
||||||
state_ = null;
|
state_ = null;
|
||||||
|
@ -281,9 +287,14 @@ final class Parser
|
||||||
///Parse document end (explicit or implicit).
|
///Parse document end (explicit or implicit).
|
||||||
Event parseDocumentEnd() @safe
|
Event parseDocumentEnd() @safe
|
||||||
{
|
{
|
||||||
Mark startMark = scanner_.peekToken().startMark;
|
Mark startMark = scanner_.front.startMark;
|
||||||
const bool explicit = scanner_.checkToken(TokenID.documentEnd);
|
const bool explicit = scanner_.front.id == TokenID.documentEnd;
|
||||||
Mark endMark = explicit ? scanner_.getToken().endMark : startMark;
|
Mark endMark = startMark;
|
||||||
|
if (explicit)
|
||||||
|
{
|
||||||
|
endMark = scanner_.front.endMark;
|
||||||
|
scanner_.popFront();
|
||||||
|
}
|
||||||
|
|
||||||
state_ = &parseDocumentStart;
|
state_ = &parseDocumentStart;
|
||||||
|
|
||||||
|
@ -293,11 +304,11 @@ final class Parser
|
||||||
///Parse document content.
|
///Parse document content.
|
||||||
Event parseDocumentContent() @safe
|
Event parseDocumentContent() @safe
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.directive, TokenID.documentStart,
|
if(scanner_.front.id.among!(TokenID.directive, TokenID.documentStart,
|
||||||
TokenID.documentEnd, TokenID.streamEnd))
|
TokenID.documentEnd, TokenID.streamEnd))
|
||||||
{
|
{
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
return processEmptyScalar(scanner_.front.startMark);
|
||||||
}
|
}
|
||||||
return parseBlockNode();
|
return parseBlockNode();
|
||||||
}
|
}
|
||||||
|
@ -310,9 +321,10 @@ final class Parser
|
||||||
tagDirectives_.length = 0;
|
tagDirectives_.length = 0;
|
||||||
|
|
||||||
// Process directives.
|
// Process directives.
|
||||||
while(scanner_.checkToken(TokenID.directive))
|
while(scanner_.front.id == TokenID.directive)
|
||||||
{
|
{
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
string value = token.value.idup;
|
string value = token.value.idup;
|
||||||
if(token.directive == DirectiveType.yaml)
|
if(token.directive == DirectiveType.yaml)
|
||||||
{
|
{
|
||||||
|
@ -382,9 +394,10 @@ final class Parser
|
||||||
const Flag!"indentlessSequence" indentlessSequence = No.indentlessSequence)
|
const Flag!"indentlessSequence" indentlessSequence = No.indentlessSequence)
|
||||||
@trusted
|
@trusted
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.alias_))
|
if(scanner_.front.id == TokenID.alias_)
|
||||||
{
|
{
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
return aliasEvent(token.startMark, token.endMark,
|
return aliasEvent(token.startMark, token.endMark,
|
||||||
cast(string)token.value);
|
cast(string)token.value);
|
||||||
|
@ -400,9 +413,10 @@ final class Parser
|
||||||
//Get anchor/tag if detected. Return false otherwise.
|
//Get anchor/tag if detected. Return false otherwise.
|
||||||
bool get(const TokenID id, const Flag!"first" first, ref string target) @safe
|
bool get(const TokenID id, const Flag!"first" first, ref string target) @safe
|
||||||
{
|
{
|
||||||
if(!scanner_.checkToken(id)){return false;}
|
if(scanner_.front.id != id){return false;}
|
||||||
invalidMarks = false;
|
invalidMarks = false;
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
if(first){startMark = token.startMark;}
|
if(first){startMark = token.startMark;}
|
||||||
if(id == TokenID.tag)
|
if(id == TokenID.tag)
|
||||||
{
|
{
|
||||||
|
@ -422,24 +436,25 @@ final class Parser
|
||||||
|
|
||||||
if(invalidMarks)
|
if(invalidMarks)
|
||||||
{
|
{
|
||||||
startMark = endMark = scanner_.peekToken().startMark;
|
startMark = endMark = scanner_.front.startMark;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool implicit = (tag is null || tag == "!");
|
bool implicit = (tag is null || tag == "!");
|
||||||
|
|
||||||
if(indentlessSequence && scanner_.checkToken(TokenID.blockEntry))
|
if(indentlessSequence && scanner_.front.id == TokenID.blockEntry)
|
||||||
{
|
{
|
||||||
state_ = &parseIndentlessSequenceEntry;
|
state_ = &parseIndentlessSequenceEntry;
|
||||||
return sequenceStartEvent
|
return sequenceStartEvent
|
||||||
(startMark, scanner_.peekToken().endMark, anchor,
|
(startMark, scanner_.front.endMark, anchor,
|
||||||
tag, implicit, CollectionStyle.block);
|
tag, implicit, CollectionStyle.block);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.scalar))
|
if(scanner_.front.id == TokenID.scalar)
|
||||||
{
|
{
|
||||||
auto token = scanner_.getToken();
|
auto token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
auto value = token.style == ScalarStyle.doubleQuoted
|
auto value = token.style == ScalarStyle.doubleQuoted
|
||||||
? handleDoubleQuotedScalarEscapes(token.value)
|
? handleDoubleQuotedScalarEscapes(token.value.dup)
|
||||||
: cast(string)token.value;
|
: cast(string)token.value;
|
||||||
|
|
||||||
implicit = (token.style == ScalarStyle.plain && tag is null) || tag == "!";
|
implicit = (token.style == ScalarStyle.plain && tag is null) || tag == "!";
|
||||||
|
@ -448,33 +463,33 @@ final class Parser
|
||||||
implicit, value, token.style);
|
implicit, value, token.style);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.flowSequenceStart))
|
if(scanner_.front.id == TokenID.flowSequenceStart)
|
||||||
{
|
{
|
||||||
endMark = scanner_.peekToken().endMark;
|
endMark = scanner_.front.endMark;
|
||||||
state_ = &parseFlowSequenceEntry!(Yes.first);
|
state_ = &parseFlowSequenceEntry!(Yes.first);
|
||||||
return sequenceStartEvent(startMark, endMark, anchor, tag,
|
return sequenceStartEvent(startMark, endMark, anchor, tag,
|
||||||
implicit, CollectionStyle.flow);
|
implicit, CollectionStyle.flow);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.flowMappingStart))
|
if(scanner_.front.id == TokenID.flowMappingStart)
|
||||||
{
|
{
|
||||||
endMark = scanner_.peekToken().endMark;
|
endMark = scanner_.front.endMark;
|
||||||
state_ = &parseFlowMappingKey!(Yes.first);
|
state_ = &parseFlowMappingKey!(Yes.first);
|
||||||
return mappingStartEvent(startMark, endMark, anchor, tag,
|
return mappingStartEvent(startMark, endMark, anchor, tag,
|
||||||
implicit, CollectionStyle.flow);
|
implicit, CollectionStyle.flow);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(block && scanner_.checkToken(TokenID.blockSequenceStart))
|
if(block && scanner_.front.id == TokenID.blockSequenceStart)
|
||||||
{
|
{
|
||||||
endMark = scanner_.peekToken().endMark;
|
endMark = scanner_.front.endMark;
|
||||||
state_ = &parseBlockSequenceEntry!(Yes.first);
|
state_ = &parseBlockSequenceEntry!(Yes.first);
|
||||||
return sequenceStartEvent(startMark, endMark, anchor, tag,
|
return sequenceStartEvent(startMark, endMark, anchor, tag,
|
||||||
implicit, CollectionStyle.block);
|
implicit, CollectionStyle.block);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(block && scanner_.checkToken(TokenID.blockMappingStart))
|
if(block && scanner_.front.id == TokenID.blockMappingStart)
|
||||||
{
|
{
|
||||||
endMark = scanner_.peekToken().endMark;
|
endMark = scanner_.front.endMark;
|
||||||
state_ = &parseBlockMappingKey!(Yes.first);
|
state_ = &parseBlockMappingKey!(Yes.first);
|
||||||
return mappingStartEvent(startMark, endMark, anchor, tag,
|
return mappingStartEvent(startMark, endMark, anchor, tag,
|
||||||
implicit, CollectionStyle.block);
|
implicit, CollectionStyle.block);
|
||||||
|
@ -492,7 +507,7 @@ final class Parser
|
||||||
implicit , "");
|
implicit , "");
|
||||||
}
|
}
|
||||||
|
|
||||||
const token = scanner_.peekToken();
|
const token = scanner_.front;
|
||||||
throw new ParserException("While parsing a " ~ (block ? "block" : "flow") ~ " node",
|
throw new ParserException("While parsing a " ~ (block ? "block" : "flow") ~ " node",
|
||||||
startMark, "expected node content, but found: "
|
startMark, "expected node content, but found: "
|
||||||
~ token.idString, token.startMark);
|
~ token.idString, token.startMark);
|
||||||
|
@ -620,12 +635,17 @@ final class Parser
|
||||||
///Parse an entry of a block sequence. If first is true, this is the first entry.
|
///Parse an entry of a block sequence. If first is true, this is the first entry.
|
||||||
Event parseBlockSequenceEntry(Flag!"first" first)() @safe
|
Event parseBlockSequenceEntry(Flag!"first" first)() @safe
|
||||||
{
|
{
|
||||||
static if(first){pushMark(scanner_.getToken().startMark);}
|
static if(first)
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.blockEntry))
|
|
||||||
{
|
{
|
||||||
const token = scanner_.getToken();
|
pushMark(scanner_.front.startMark);
|
||||||
if(!scanner_.checkToken(TokenID.blockEntry, TokenID.blockEnd))
|
scanner_.popFront();
|
||||||
|
}
|
||||||
|
|
||||||
|
if(scanner_.front.id == TokenID.blockEntry)
|
||||||
|
{
|
||||||
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
|
if(!scanner_.front.id.among!(TokenID.blockEntry, TokenID.blockEnd))
|
||||||
{
|
{
|
||||||
pushState(&parseBlockSequenceEntry!(No.first));
|
pushState(&parseBlockSequenceEntry!(No.first));
|
||||||
return parseBlockNode();
|
return parseBlockNode();
|
||||||
|
@ -635,9 +655,9 @@ final class Parser
|
||||||
return processEmptyScalar(token.endMark);
|
return processEmptyScalar(token.endMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.blockEnd))
|
if(scanner_.front.id != TokenID.blockEnd)
|
||||||
{
|
{
|
||||||
const token = scanner_.peekToken();
|
const token = scanner_.front;
|
||||||
throw new ParserException("While parsing a block collection", marks_.data.back,
|
throw new ParserException("While parsing a block collection", marks_.data.back,
|
||||||
"expected block end, but found " ~ token.idString,
|
"expected block end, but found " ~ token.idString,
|
||||||
token.startMark);
|
token.startMark);
|
||||||
|
@ -645,7 +665,8 @@ final class Parser
|
||||||
|
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
popMark();
|
popMark();
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
return sequenceEndEvent(token.startMark, token.endMark);
|
return sequenceEndEvent(token.startMark, token.endMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -654,11 +675,12 @@ final class Parser
|
||||||
///Parse an entry of an indentless sequence.
|
///Parse an entry of an indentless sequence.
|
||||||
Event parseIndentlessSequenceEntry() @safe
|
Event parseIndentlessSequenceEntry() @safe
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.blockEntry))
|
if(scanner_.front.id == TokenID.blockEntry)
|
||||||
{
|
{
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.blockEntry, TokenID.key,
|
if(!scanner_.front.id.among!(TokenID.blockEntry, TokenID.key,
|
||||||
TokenID.value, TokenID.blockEnd))
|
TokenID.value, TokenID.blockEnd))
|
||||||
{
|
{
|
||||||
pushState(&parseIndentlessSequenceEntry);
|
pushState(&parseIndentlessSequenceEntry);
|
||||||
|
@ -670,7 +692,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
const token = scanner_.peekToken();
|
const token = scanner_.front;
|
||||||
return sequenceEndEvent(token.startMark, token.endMark);
|
return sequenceEndEvent(token.startMark, token.endMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -684,13 +706,18 @@ final class Parser
|
||||||
///Parse a key in a block mapping. If first is true, this is the first key.
|
///Parse a key in a block mapping. If first is true, this is the first key.
|
||||||
Event parseBlockMappingKey(Flag!"first" first)() @safe
|
Event parseBlockMappingKey(Flag!"first" first)() @safe
|
||||||
{
|
{
|
||||||
static if(first){pushMark(scanner_.getToken().startMark);}
|
static if(first)
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.key))
|
|
||||||
{
|
{
|
||||||
const token = scanner_.getToken();
|
pushMark(scanner_.front.startMark);
|
||||||
|
scanner_.popFront();
|
||||||
|
}
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.key, TokenID.value, TokenID.blockEnd))
|
if(scanner_.front.id == TokenID.key)
|
||||||
|
{
|
||||||
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
|
|
||||||
|
if(!scanner_.front.id.among!(TokenID.key, TokenID.value, TokenID.blockEnd))
|
||||||
{
|
{
|
||||||
pushState(&parseBlockMappingValue);
|
pushState(&parseBlockMappingValue);
|
||||||
return parseBlockNodeOrIndentlessSequence();
|
return parseBlockNodeOrIndentlessSequence();
|
||||||
|
@ -700,9 +727,9 @@ final class Parser
|
||||||
return processEmptyScalar(token.endMark);
|
return processEmptyScalar(token.endMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.blockEnd))
|
if(scanner_.front.id != TokenID.blockEnd)
|
||||||
{
|
{
|
||||||
const token = scanner_.peekToken();
|
const token = scanner_.front;
|
||||||
throw new ParserException("While parsing a block mapping", marks_.data.back,
|
throw new ParserException("While parsing a block mapping", marks_.data.back,
|
||||||
"expected block end, but found: " ~ token.idString,
|
"expected block end, but found: " ~ token.idString,
|
||||||
token.startMark);
|
token.startMark);
|
||||||
|
@ -710,18 +737,20 @@ final class Parser
|
||||||
|
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
popMark();
|
popMark();
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
return mappingEndEvent(token.startMark, token.endMark);
|
return mappingEndEvent(token.startMark, token.endMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse a value in a block mapping.
|
///Parse a value in a block mapping.
|
||||||
Event parseBlockMappingValue() @safe
|
Event parseBlockMappingValue() @safe
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.value))
|
if(scanner_.front.id == TokenID.value)
|
||||||
{
|
{
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.key, TokenID.value, TokenID.blockEnd))
|
if(!scanner_.front.id.among!(TokenID.key, TokenID.value, TokenID.blockEnd))
|
||||||
{
|
{
|
||||||
pushState(&parseBlockMappingKey!(No.first));
|
pushState(&parseBlockMappingKey!(No.first));
|
||||||
return parseBlockNodeOrIndentlessSequence();
|
return parseBlockNodeOrIndentlessSequence();
|
||||||
|
@ -732,7 +761,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
state_= &parseBlockMappingKey!(No.first);
|
state_= &parseBlockMappingKey!(No.first);
|
||||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
return processEmptyScalar(scanner_.front.startMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -751,40 +780,45 @@ final class Parser
|
||||||
///Parse an entry in a flow sequence. If first is true, this is the first entry.
|
///Parse an entry in a flow sequence. If first is true, this is the first entry.
|
||||||
Event parseFlowSequenceEntry(Flag!"first" first)() @safe
|
Event parseFlowSequenceEntry(Flag!"first" first)() @safe
|
||||||
{
|
{
|
||||||
static if(first){pushMark(scanner_.getToken().startMark);}
|
static if(first)
|
||||||
|
{
|
||||||
|
pushMark(scanner_.front.startMark);
|
||||||
|
scanner_.popFront();
|
||||||
|
}
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.flowSequenceEnd))
|
if(scanner_.front.id != TokenID.flowSequenceEnd)
|
||||||
{
|
{
|
||||||
static if(!first)
|
static if(!first)
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.flowEntry))
|
if(scanner_.front.id == TokenID.flowEntry)
|
||||||
{
|
{
|
||||||
scanner_.getToken();
|
scanner_.popFront();
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
const token = scanner_.peekToken();
|
const token = scanner_.front;
|
||||||
throw new ParserException("While parsing a flow sequence", marks_.data.back,
|
throw new ParserException("While parsing a flow sequence", marks_.data.back,
|
||||||
"expected ',' or ']', but got: " ~
|
"expected ',' or ']', but got: " ~
|
||||||
token.idString, token.startMark);
|
token.idString, token.startMark);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.key))
|
if(scanner_.front.id == TokenID.key)
|
||||||
{
|
{
|
||||||
const token = scanner_.peekToken();
|
const token = scanner_.front;
|
||||||
state_ = &parseFlowSequenceEntryMappingKey;
|
state_ = &parseFlowSequenceEntryMappingKey;
|
||||||
return mappingStartEvent(token.startMark, token.endMark,
|
return mappingStartEvent(token.startMark, token.endMark,
|
||||||
null, null, true, CollectionStyle.flow);
|
null, null, true, CollectionStyle.flow);
|
||||||
}
|
}
|
||||||
else if(!scanner_.checkToken(TokenID.flowSequenceEnd))
|
else if(scanner_.front.id != TokenID.flowSequenceEnd)
|
||||||
{
|
{
|
||||||
pushState(&parseFlowSequenceEntry!(No.first));
|
pushState(&parseFlowSequenceEntry!(No.first));
|
||||||
return parseFlowNode();
|
return parseFlowNode();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
popMark();
|
popMark();
|
||||||
return sequenceEndEvent(token.startMark, token.endMark);
|
return sequenceEndEvent(token.startMark, token.endMark);
|
||||||
|
@ -793,9 +827,10 @@ final class Parser
|
||||||
///Parse a key in flow context.
|
///Parse a key in flow context.
|
||||||
Event parseFlowKey(in Event delegate() @safe nextState) @safe
|
Event parseFlowKey(in Event delegate() @safe nextState) @safe
|
||||||
{
|
{
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.value, TokenID.flowEntry,
|
if(!scanner_.front.id.among!(TokenID.value, TokenID.flowEntry,
|
||||||
TokenID.flowSequenceEnd))
|
TokenID.flowSequenceEnd))
|
||||||
{
|
{
|
||||||
pushState(nextState);
|
pushState(nextState);
|
||||||
|
@ -816,10 +851,11 @@ final class Parser
|
||||||
Event parseFlowValue(TokenID checkId, in Event delegate() @safe nextState)
|
Event parseFlowValue(TokenID checkId, in Event delegate() @safe nextState)
|
||||||
@safe
|
@safe
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.value))
|
if(scanner_.front.id == TokenID.value)
|
||||||
{
|
{
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
if(!scanner_.checkToken(TokenID.flowEntry, checkId))
|
scanner_.popFront();
|
||||||
|
if(!scanner_.front.id.among(TokenID.flowEntry, checkId))
|
||||||
{
|
{
|
||||||
pushState(nextState);
|
pushState(nextState);
|
||||||
return parseFlowNode();
|
return parseFlowNode();
|
||||||
|
@ -830,7 +866,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
state_ = nextState;
|
state_ = nextState;
|
||||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
return processEmptyScalar(scanner_.front.startMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
///Parse a mapping value in an entry in a flow sequence.
|
///Parse a mapping value in an entry in a flow sequence.
|
||||||
|
@ -844,7 +880,7 @@ final class Parser
|
||||||
Event parseFlowSequenceEntryMappingEnd() @safe
|
Event parseFlowSequenceEntryMappingEnd() @safe
|
||||||
{
|
{
|
||||||
state_ = &parseFlowSequenceEntry!(No.first);
|
state_ = &parseFlowSequenceEntry!(No.first);
|
||||||
const token = scanner_.peekToken();
|
const token = scanner_.front;
|
||||||
return mappingEndEvent(token.startMark, token.startMark);
|
return mappingEndEvent(token.startMark, token.startMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -859,38 +895,43 @@ final class Parser
|
||||||
///Parse a key in a flow mapping.
|
///Parse a key in a flow mapping.
|
||||||
Event parseFlowMappingKey(Flag!"first" first)() @safe
|
Event parseFlowMappingKey(Flag!"first" first)() @safe
|
||||||
{
|
{
|
||||||
static if(first){pushMark(scanner_.getToken().startMark);}
|
static if(first)
|
||||||
|
{
|
||||||
|
pushMark(scanner_.front.startMark);
|
||||||
|
scanner_.popFront();
|
||||||
|
}
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.flowMappingEnd))
|
if(scanner_.front.id != TokenID.flowMappingEnd)
|
||||||
{
|
{
|
||||||
static if(!first)
|
static if(!first)
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.flowEntry))
|
if(scanner_.front.id == TokenID.flowEntry)
|
||||||
{
|
{
|
||||||
scanner_.getToken();
|
scanner_.popFront();
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
const token = scanner_.peekToken();
|
const token = scanner_.front;
|
||||||
throw new ParserException("While parsing a flow mapping", marks_.data.back,
|
throw new ParserException("While parsing a flow mapping", marks_.data.back,
|
||||||
"expected ',' or '}', but got: " ~
|
"expected ',' or '}', but got: " ~
|
||||||
token.idString, token.startMark);
|
token.idString, token.startMark);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.key))
|
if(scanner_.front.id == TokenID.key)
|
||||||
{
|
{
|
||||||
return parseFlowKey(&parseFlowMappingValue);
|
return parseFlowKey(&parseFlowMappingValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.flowMappingEnd))
|
if(scanner_.front.id != TokenID.flowMappingEnd)
|
||||||
{
|
{
|
||||||
pushState(&parseFlowMappingEmptyValue);
|
pushState(&parseFlowMappingEmptyValue);
|
||||||
return parseFlowNode();
|
return parseFlowNode();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const token = scanner_.getToken();
|
const token = scanner_.front;
|
||||||
|
scanner_.popFront();
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
popMark();
|
popMark();
|
||||||
return mappingEndEvent(token.startMark, token.endMark);
|
return mappingEndEvent(token.startMark, token.endMark);
|
||||||
|
@ -906,7 +947,7 @@ final class Parser
|
||||||
Event parseFlowMappingEmptyValue() @safe
|
Event parseFlowMappingEmptyValue() @safe
|
||||||
{
|
{
|
||||||
state_ = &parseFlowMappingKey!(No.first);
|
state_ = &parseFlowMappingKey!(No.first);
|
||||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
return processEmptyScalar(scanner_.front.startMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
///Return an empty scalar.
|
///Return an empty scalar.
|
||||||
|
|
|
@ -178,55 +178,28 @@ struct Scanner
|
||||||
fetchStreamStart();
|
fetchStreamStart();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if the next token is one of specified types.
|
/// Advance to the next token
|
||||||
///
|
void popFront() @safe
|
||||||
/// If no types are specified, checks if any tokens are left.
|
|
||||||
///
|
|
||||||
/// Params: ids = Token IDs to check for.
|
|
||||||
///
|
|
||||||
/// Returns: true if the next token is one of specified types, or if there are
|
|
||||||
/// any tokens left if no types specified, false otherwise.
|
|
||||||
bool checkToken(const TokenID[] ids ...) @safe
|
|
||||||
{
|
|
||||||
// Check if the next token is one of specified types.
|
|
||||||
while(needMoreTokens()) { fetchToken(); }
|
|
||||||
if(!tokens_.empty)
|
|
||||||
{
|
|
||||||
if(ids.length == 0) { return true; }
|
|
||||||
else
|
|
||||||
{
|
|
||||||
const nextId = tokens_.peek().id;
|
|
||||||
foreach(id; ids)
|
|
||||||
{
|
|
||||||
if(nextId == id) { return true; }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the next token, but keep it in the queue.
|
|
||||||
///
|
|
||||||
/// Must not be called if there are no tokens left.
|
|
||||||
ref const(Token) peekToken() @safe
|
|
||||||
{
|
|
||||||
while(needMoreTokens) { fetchToken(); }
|
|
||||||
if(!tokens_.empty) { return tokens_.peek(); }
|
|
||||||
assert(false, "No token left to peek");
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the next token, removing it from the queue.
|
|
||||||
///
|
|
||||||
/// Must not be called if there are no tokens left.
|
|
||||||
Token getToken() @safe
|
|
||||||
{
|
|
||||||
while(needMoreTokens){fetchToken();}
|
|
||||||
if(!tokens_.empty)
|
|
||||||
{
|
{
|
||||||
++tokensTaken_;
|
++tokensTaken_;
|
||||||
return tokens_.pop();
|
tokens_.pop();
|
||||||
}
|
}
|
||||||
assert(false, "No token left to get");
|
|
||||||
|
/// Return the current token
|
||||||
|
const(Token) front() @safe
|
||||||
|
{
|
||||||
|
enforce(!empty, "No token left to peek");
|
||||||
|
return tokens_.peek();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return whether there are any more tokens left.
|
||||||
|
bool empty() @safe
|
||||||
|
{
|
||||||
|
while (needMoreTokens())
|
||||||
|
{
|
||||||
|
fetchToken();
|
||||||
|
}
|
||||||
|
return tokens_.empty;
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
Loading…
Reference in a new issue