Stripped spaces.

This commit is contained in:
Ferdinand Majerech 2014-07-26 16:43:02 +02:00
parent db7fecf960
commit 388b74b332
2 changed files with 39 additions and 39 deletions

View file

@ -31,7 +31,7 @@ package:
/** /**
* The following YAML grammar is LL(1) and is parsed by a recursive descent * The following YAML grammar is LL(1) and is parsed by a recursive descent
* parser. * parser.
* *
* stream ::= STREAM-START implicit_document? explicit_document* STREAM-END * stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
* implicit_document ::= block_node DOCUMENT-END* * implicit_document ::= block_node DOCUMENT-END*
* explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
@ -67,9 +67,9 @@ package:
* flow_mapping_entry? * flow_mapping_entry?
* FLOW-MAPPING-END * FLOW-MAPPING-END
* flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
* *
* FIRST sets: * FIRST sets:
* *
* stream: { STREAM-START } * stream: { STREAM-START }
* explicit_document: { DIRECTIVE DOCUMENT-START } * explicit_document: { DIRECTIVE DOCUMENT-START }
* implicit_document: FIRST(block_node) * implicit_document: FIRST(block_node)
@ -88,7 +88,7 @@ package:
* flow_mapping: { FLOW-MAPPING-START } * flow_mapping: { FLOW-MAPPING-START }
* flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } * flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
* flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } * flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
*/ */
/** /**
@ -104,7 +104,7 @@ class ParserException : MarkedYAMLException
private alias ParserException Error; private alias ParserException Error;
///Generates events from tokens provided by a Scanner. ///Generates events from tokens provided by a Scanner.
final class Parser final class Parser
{ {
private: private:
///Default tag handle shortcuts and replacements. ///Default tag handle shortcuts and replacements.
@ -189,7 +189,7 @@ final class Parser
} }
/** /**
* Return the next event, but keep it in the queue. * Return the next event, but keep it in the queue.
* *
* Must not be called if there are no events left. * Must not be called if there are no events left.
*/ */
@ -229,7 +229,7 @@ final class Parser
///Pop and return the newest state in states_. ///Pop and return the newest state in states_.
Event delegate() popState() @trusted Event delegate() popState() @trusted
{ {
enforce(states_.length > 0, enforce(states_.length > 0,
new YAMLException("Parser: Need to pop state but no states left to pop")); new YAMLException("Parser: Need to pop state but no states left to pop"));
const result = states_.back; const result = states_.back;
states_.length = states_.length - 1; states_.length = states_.length - 1;
@ -239,7 +239,7 @@ final class Parser
///Pop and return the newest mark in marks_. ///Pop and return the newest mark in marks_.
Mark popMark() @trusted Mark popMark() @trusted
{ {
enforce(marks_.length > 0, enforce(marks_.length > 0,
new YAMLException("Parser: Need to pop mark but no marks left to pop")); new YAMLException("Parser: Need to pop mark but no marks left to pop"));
const result = marks_.back; const result = marks_.back;
marks_.length = marks_.length - 1; marks_.length = marks_.length - 1;
@ -260,19 +260,19 @@ final class Parser
return streamStartEvent(token.startMark, token.endMark, token.encoding); return streamStartEvent(token.startMark, token.endMark, token.encoding);
} }
///Parse implicit document start, unless explicit is detected: if so, parse explicit. /// Parse implicit document start, unless explicit detected: if so, parse explicit.
Event parseImplicitDocumentStart() @trusted Event parseImplicitDocumentStart() @trusted
{ {
//Parse an implicit document. // Parse an implicit document.
if(!scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart, if(!scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
TokenID.StreamEnd)) TokenID.StreamEnd))
{ {
tagDirectives_ = defaultTagDirectives_; tagDirectives_ = defaultTagDirectives_;
immutable token = scanner_.peekToken(); immutable token = scanner_.peekToken();
states_ ~= &parseDocumentEnd; states_ ~= &parseDocumentEnd;
state_ = &parseBlockNode; state_ = &parseBlockNode;
return documentStartEvent(token.startMark, token.endMark, false, null, null); return documentStartEvent(token.startMark, token.endMark, false, null, null);
} }
return parseDocumentStart(); return parseDocumentStart();
@ -292,7 +292,7 @@ final class Parser
auto tagDirectives = processDirectives(); auto tagDirectives = processDirectives();
enforce(scanner_.checkToken(TokenID.DocumentStart), enforce(scanner_.checkToken(TokenID.DocumentStart),
new Error("Expected document start but found " ~ new Error("Expected document start but found " ~
scanner_.peekToken().idString, scanner_.peekToken().idString,
scanner_.peekToken().startMark)); scanner_.peekToken().startMark));
const endMark = scanner_.getToken().endMark; const endMark = scanner_.getToken().endMark;
@ -326,7 +326,7 @@ final class Parser
///Parse document content. ///Parse document content.
Event parseDocumentContent() @safe Event parseDocumentContent() @safe
{ {
if(scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart, if(scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
TokenID.DocumentEnd, TokenID.StreamEnd)) TokenID.DocumentEnd, TokenID.StreamEnd))
{ {
state_ = popState(); state_ = popState();
@ -335,14 +335,14 @@ final class Parser
return parseBlockNode(); return parseBlockNode();
} }
///Process directives at the beginning of a document. /// Process directives at the beginning of a document.
TagDirective[] processDirectives() @system TagDirective[] processDirectives() @system
{ {
//Destroy version and tag handles from previous document. // Destroy version and tag handles from previous document.
YAMLVersion_ = null; YAMLVersion_ = null;
tagDirectives_.length = 0; tagDirectives_.length = 0;
//Process directives. // Process directives.
while(scanner_.checkToken(TokenID.Directive)) while(scanner_.checkToken(TokenID.Directive))
{ {
immutable token = scanner_.getToken(); immutable token = scanner_.getToken();
@ -363,7 +363,7 @@ final class Parser
foreach(ref pair; tagDirectives_) foreach(ref pair; tagDirectives_)
{ {
//handle // handle
const h = pair.handle; const h = pair.handle;
enforce(h != handle, new Error("Duplicate tag handle: " ~ handle, enforce(h != handle, new Error("Duplicate tag handle: " ~ handle,
token.startMark)); token.startMark));
@ -385,7 +385,7 @@ final class Parser
found = true; found = true;
break; break;
} }
if(!found){tagDirectives_ ~= defaultPair;} if(!found) {tagDirectives_ ~= defaultPair; }
} }
return value; return value;
@ -439,7 +439,7 @@ final class Parser
tagMark = token.startMark; tagMark = token.startMark;
tagHandleEnd = token.valueDivider; tagHandleEnd = token.valueDivider;
} }
endMark = token.endMark; endMark = token.endMark;
target = token.value; target = token.value;
return true; return true;
} }
@ -472,7 +472,7 @@ final class Parser
implicit = (token.style == ScalarStyle.Plain && tag is null) || tag == "!"; implicit = (token.style == ScalarStyle.Plain && tag is null) || tag == "!";
bool implicit_2 = (!implicit) && tag is null; bool implicit_2 = (!implicit) && tag is null;
state_ = popState(); state_ = popState();
return scalarEvent(startMark, token.endMark, Anchor(anchor), Tag(tag), return scalarEvent(startMark, token.endMark, Anchor(anchor), Tag(tag),
tuple(implicit, implicit_2), token.value, token.style); tuple(implicit, implicit_2), token.value, token.style);
} }
@ -480,7 +480,7 @@ final class Parser
{ {
endMark = scanner_.peekToken().endMark; endMark = scanner_.peekToken().endMark;
state_ = &parseFlowSequenceEntry!(Yes.first); state_ = &parseFlowSequenceEntry!(Yes.first);
return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag), return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
implicit, CollectionStyle.Flow); implicit, CollectionStyle.Flow);
} }
@ -488,7 +488,7 @@ final class Parser
{ {
endMark = scanner_.peekToken().endMark; endMark = scanner_.peekToken().endMark;
state_ = &parseFlowMappingKey!(Yes.first); state_ = &parseFlowMappingKey!(Yes.first);
return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag), return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
implicit, CollectionStyle.Flow); implicit, CollectionStyle.Flow);
} }
@ -496,7 +496,7 @@ final class Parser
{ {
endMark = scanner_.peekToken().endMark; endMark = scanner_.peekToken().endMark;
state_ = &parseBlockSequenceEntry!(Yes.first); state_ = &parseBlockSequenceEntry!(Yes.first);
return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag), return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
implicit, CollectionStyle.Block); implicit, CollectionStyle.Block);
} }
@ -504,7 +504,7 @@ final class Parser
{ {
endMark = scanner_.peekToken().endMark; endMark = scanner_.peekToken().endMark;
state_ = &parseBlockMappingKey!(Yes.first); state_ = &parseBlockMappingKey!(Yes.first);
return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag), return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
implicit, CollectionStyle.Block); implicit, CollectionStyle.Block);
} }
@ -512,17 +512,17 @@ final class Parser
{ {
state_ = popState(); state_ = popState();
//PyYAML uses a tuple(implicit, false) for the second last arg here, //PyYAML uses a tuple(implicit, false) for the second last arg here,
//but the second bool is never used after that - so we don't use it. //but the second bool is never used after that - so we don't use it.
//Empty scalars are allowed even if a tag or an anchor is specified. //Empty scalars are allowed even if a tag or an anchor is specified.
return scalarEvent(startMark, endMark, Anchor(anchor), Tag(tag), return scalarEvent(startMark, endMark, Anchor(anchor), Tag(tag),
tuple(implicit, false) , ""); tuple(implicit, false) , "");
} }
immutable token = scanner_.peekToken(); immutable token = scanner_.peekToken();
throw new Error("While parsing a " ~ (block ? "block" : "flow") ~ " node", throw new Error("While parsing a " ~ (block ? "block" : "flow") ~ " node",
startMark, "expected node content, but found: " startMark, "expected node content, but found: "
~ token.idString, token.startMark); ~ token.idString, token.startMark);
} }
@ -534,7 +534,7 @@ final class Parser
* starts. * starts.
* startMark = Position of the node the tag belongs to. * startMark = Position of the node the tag belongs to.
* tagMark = Position of the tag. * tagMark = Position of the tag.
*/ */
string processTag(const string tag, const uint handleEnd, string processTag(const string tag, const uint handleEnd,
const Mark startMark, const Mark tagMark) const Mark startMark, const Mark tagMark)
const @trusted const @trusted
@ -591,7 +591,7 @@ final class Parser
{ {
immutable token = scanner_.peekToken(); immutable token = scanner_.peekToken();
throw new Error("While parsing a block collection", marks_.back, throw new Error("While parsing a block collection", marks_.back,
"expected block end, but found " ~ token.idString, "expected block end, but found " ~ token.idString,
token.startMark); token.startMark);
} }
@ -610,9 +610,9 @@ final class Parser
{ {
immutable token = scanner_.getToken(); immutable token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.Key, if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.Key,
TokenID.Value, TokenID.BlockEnd)) TokenID.Value, TokenID.BlockEnd))
{ {
states_ ~= &parseIndentlessSequenceEntry; states_ ~= &parseIndentlessSequenceEntry;
return parseBlockNode(); return parseBlockNode();
} }
@ -656,7 +656,7 @@ final class Parser
{ {
immutable token = scanner_.peekToken(); immutable token = scanner_.peekToken();
throw new Error("While parsing a block mapping", marks_.back, throw new Error("While parsing a block mapping", marks_.back,
"expected block end, but found: " ~ token.idString, "expected block end, but found: " ~ token.idString,
token.startMark); token.startMark);
} }
@ -693,7 +693,7 @@ final class Parser
* flow_sequence_entry? * flow_sequence_entry?
* FLOW-SEQUENCE-END * FLOW-SEQUENCE-END
* flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
* *
* Note that while production rules for both flow_sequence_entry and * Note that while production rules for both flow_sequence_entry and
* flow_mapping_entry are equal, their interpretations are different. * flow_mapping_entry are equal, their interpretations are different.
* For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` * For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
@ -726,7 +726,7 @@ final class Parser
{ {
immutable token = scanner_.peekToken(); immutable token = scanner_.peekToken();
state_ = &parseFlowSequenceEntryMappingKey; state_ = &parseFlowSequenceEntryMappingKey;
return mappingStartEvent(token.startMark, token.endMark, return mappingStartEvent(token.startMark, token.endMark,
Anchor(), Tag(), true, CollectionStyle.Flow); Anchor(), Tag(), true, CollectionStyle.Flow);
} }
else if(!scanner_.checkToken(TokenID.FlowSequenceEnd)) else if(!scanner_.checkToken(TokenID.FlowSequenceEnd))
@ -747,7 +747,7 @@ final class Parser
{ {
immutable token = scanner_.getToken(); immutable token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.Value, TokenID.FlowEntry, if(!scanner_.checkToken(TokenID.Value, TokenID.FlowEntry,
TokenID.FlowSequenceEnd)) TokenID.FlowSequenceEnd))
{ {
states_ ~= nextState; states_ ~= nextState;
@ -776,7 +776,7 @@ final class Parser
states_ ~= nextState; states_ ~= nextState;
return parseFlowNode(); return parseFlowNode();
} }
state_ = nextState; state_ = nextState;
return processEmptyScalar(token.endMark); return processEmptyScalar(token.endMark);
} }

View file

@ -932,7 +932,7 @@ final class Scanner
Token scanDirective() @trusted pure Token scanDirective() @trusted pure
{ {
Mark startMark = reader_.mark; Mark startMark = reader_.mark;
//Skip the '%'. // Skip the '%'.
reader_.forward(); reader_.forward();