use camelCasing for enum members

This commit is contained in:
Cameron Ross 2018-08-26 21:49:14 -03:00
parent 8c637e95ec
commit 74c555d22e
No known key found for this signature in database
GPG key ID: 777897D98DC91C54
17 changed files with 370 additions and 358 deletions

View file

@ -97,17 +97,17 @@ final class Composer
bool checkNode() @safe bool checkNode() @safe
{ {
// If next event is stream start, skip it // If next event is stream start, skip it
parser_.skipOver!"a.id == b"(EventID.StreamStart); parser_.skipOver!"a.id == b"(EventID.streamStart);
//True if there are more documents available. //True if there are more documents available.
return parser_.front.id != EventID.StreamEnd; return parser_.front.id != EventID.streamEnd;
} }
///Get a YAML document as a node (the root of the document). ///Get a YAML document as a node (the root of the document).
Node getNode() @safe Node getNode() @safe
{ {
//Get the root node of the next document. //Get the root node of the next document.
assert(parser_.front.id != EventID.StreamEnd, assert(parser_.front.id != EventID.streamEnd,
"Trying to get a node from Composer when there is no node to " ~ "Trying to get a node from Composer when there is no node to " ~
"get. use checkNode() to determine if there is a node."); "get. use checkNode() to determine if there is a node.");
@ -117,19 +117,19 @@ final class Composer
///Get single YAML document, throwing if there is more than one document. ///Get single YAML document, throwing if there is more than one document.
Node getSingleNode() @safe Node getSingleNode() @safe
{ {
assert(parser_.front.id != EventID.StreamEnd, assert(parser_.front.id != EventID.streamEnd,
"Trying to get a node from Composer when there is no node to " ~ "Trying to get a node from Composer when there is no node to " ~
"get. use checkNode() to determine if there is a node."); "get. use checkNode() to determine if there is a node.");
Node document = composeDocument(); Node document = composeDocument();
//Ensure that the stream contains no more documents. //Ensure that the stream contains no more documents.
enforce(parser_.front.id == EventID.StreamEnd, enforce(parser_.front.id == EventID.streamEnd,
new ComposerException("Expected single document in the stream, " ~ new ComposerException("Expected single document in the stream, " ~
"but found another document.", "but found another document.",
parser_.front.startMark)); parser_.front.startMark));
skipExpected(EventID.StreamEnd); skipExpected(EventID.streamEnd);
assert(parser_.empty, "Found event after stream end"); assert(parser_.empty, "Found event after stream end");
return document; return document;
@ -162,12 +162,12 @@ final class Composer
///Compose a YAML document and return its root node. ///Compose a YAML document and return its root node.
Node composeDocument() @safe Node composeDocument() @safe
{ {
skipExpected(EventID.DocumentStart); skipExpected(EventID.documentStart);
//Compose the root node. //Compose the root node.
Node node = composeNode(0, 0); Node node = composeNode(0, 0);
skipExpected(EventID.DocumentEnd); skipExpected(EventID.documentEnd);
anchors_.destroy(); anchors_.destroy();
return node; return node;
@ -179,7 +179,7 @@ final class Composer
/// nodeAppenderLevel = Current level of the node appender stack. /// nodeAppenderLevel = Current level of the node appender stack.
Node composeNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe Node composeNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe
{ {
if(parser_.front.id == EventID.Alias) if(parser_.front.id == EventID.alias_)
{ {
const event = parser_.front; const event = parser_.front;
parser_.popFront(); parser_.popFront();
@ -216,13 +216,13 @@ final class Composer
switch (parser_.front.id) switch (parser_.front.id)
{ {
case EventID.Scalar: case EventID.scalar:
result = composeScalarNode(); result = composeScalarNode();
break; break;
case EventID.SequenceStart: case EventID.sequenceStart:
result = composeSequenceNode(pairAppenderLevel, nodeAppenderLevel); result = composeSequenceNode(pairAppenderLevel, nodeAppenderLevel);
break; break;
case EventID.MappingStart: case EventID.mappingStart:
result = composeMappingNode(pairAppenderLevel, nodeAppenderLevel); result = composeMappingNode(pairAppenderLevel, nodeAppenderLevel);
break; break;
default: assert(false, "This code should never be reached"); default: assert(false, "This code should never be reached");
@ -240,7 +240,7 @@ final class Composer
{ {
const event = parser_.front; const event = parser_.front;
parser_.popFront(); parser_.popFront();
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value, const tag = resolver_.resolve(NodeID.scalar, event.tag, event.value,
event.implicit); event.implicit);
Node node = constructor_.node(event.startMark, event.endMark, tag, Node node = constructor_.node(event.startMark, event.endMark, tag,
@ -261,10 +261,10 @@ final class Composer
const startEvent = parser_.front; const startEvent = parser_.front;
parser_.popFront(); parser_.popFront();
const tag = resolver_.resolve(NodeID.Sequence, startEvent.tag, null, const tag = resolver_.resolve(NodeID.sequence, startEvent.tag, null,
startEvent.implicit); startEvent.implicit);
while(parser_.front.id != EventID.SequenceEnd) while(parser_.front.id != EventID.sequenceEnd)
{ {
nodeAppender.put(composeNode(pairAppenderLevel, nodeAppenderLevel + 1)); nodeAppender.put(composeNode(pairAppenderLevel, nodeAppenderLevel + 1));
} }
@ -358,12 +358,12 @@ final class Composer
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel); ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
const startEvent = parser_.front; const startEvent = parser_.front;
parser_.popFront(); parser_.popFront();
const tag = resolver_.resolve(NodeID.Mapping, startEvent.tag, null, const tag = resolver_.resolve(NodeID.mapping, startEvent.tag, null,
startEvent.implicit); startEvent.implicit);
auto pairAppender = &(pairAppenders_[pairAppenderLevel]); auto pairAppender = &(pairAppenders_[pairAppenderLevel]);
Tuple!(Node, Mark)[] toMerge; Tuple!(Node, Mark)[] toMerge;
while(parser_.front.id != EventID.MappingEnd) while(parser_.front.id != EventID.mappingEnd)
{ {
auto pair = Node.Pair(composeNode(pairAppenderLevel + 1, nodeAppenderLevel), auto pair = Node.Pair(composeNode(pairAppenderLevel + 1, nodeAppenderLevel),
composeNode(pairAppenderLevel + 1, nodeAppenderLevel)); composeNode(pairAppenderLevel + 1, nodeAppenderLevel));

View file

@ -58,7 +58,7 @@ struct Dumper(Range)
//Preferred text width. //Preferred text width.
uint textWidth_ = 80; uint textWidth_ = 80;
//Line break to use. //Line break to use.
LineBreak lineBreak_ = LineBreak.Unix; LineBreak lineBreak_ = LineBreak.unix;
//YAML version string. //YAML version string.
string YAMLVersion_ = "1.1"; string YAMLVersion_ = "1.1";
//Tag directives to use. //Tag directives to use.
@ -329,7 +329,7 @@ struct Dumper(Range)
dumper.explicitEnd = true; dumper.explicitEnd = true;
dumper.explicitStart = true; dumper.explicitStart = true;
dumper.YAMLVersion = null; dumper.YAMLVersion = null;
dumper.lineBreak = LineBreak.Windows; dumper.lineBreak = LineBreak.windows;
dumper.dump(node); dumper.dump(node);
assert(stream.data == "--- 0\r\n...\r\n"); assert(stream.data == "--- 0\r\n...\r\n");
} }
@ -339,7 +339,7 @@ struct Dumper(Range)
dumper.explicitEnd = true; dumper.explicitEnd = true;
dumper.explicitStart = true; dumper.explicitStart = true;
dumper.YAMLVersion = null; dumper.YAMLVersion = null;
dumper.lineBreak = LineBreak.Macintosh; dumper.lineBreak = LineBreak.macintosh;
dumper.dump(node); dumper.dump(node);
assert(stream.data == "--- 0\r...\r"); assert(stream.data == "--- 0\r...\r");
} }

View file

@ -110,13 +110,13 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
enum Context enum Context
{ {
/// Root node of a document. /// Root node of a document.
Root, root,
/// Sequence. /// Sequence.
Sequence, sequence,
/// Mapping. /// Mapping.
MappingNoSimpleKey, mappingNoSimpleKey,
/// Mapping, in a simple key. /// Mapping, in a simple key.
MappingSimpleKey mappingSimpleKey,
} }
/// Current context. /// Current context.
Context context_; Context context_;
@ -157,7 +157,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Analysis result of the current scalar. ///Analysis result of the current scalar.
ScalarAnalysis analysis_; ScalarAnalysis analysis_;
///Style of the current scalar. ///Style of the current scalar.
ScalarStyle style_ = ScalarStyle.Invalid; ScalarStyle style_ = ScalarStyle.invalid;
public: public:
@disable int opCmp(ref Emitter); @disable int opCmp(ref Emitter);
@ -258,9 +258,9 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
if(events_.length == 0){return true;} if(events_.length == 0){return true;}
const event = events_.peek(); const event = events_.peek();
if(event.id == EventID.DocumentStart){return needEvents(1);} if(event.id == EventID.documentStart){return needEvents(1);}
if(event.id == EventID.SequenceStart){return needEvents(2);} if(event.id == EventID.sequenceStart){return needEvents(2);}
if(event.id == EventID.MappingStart) {return needEvents(3);} if(event.id == EventID.mappingStart) {return needEvents(3);}
return false; return false;
} }
@ -278,11 +278,11 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
while(!events_.iterationOver()) while(!events_.iterationOver())
{ {
const event = events_.next(); const event = events_.next();
static starts = [EventID.DocumentStart, EventID.SequenceStart, EventID.MappingStart]; static starts = [EventID.documentStart, EventID.sequenceStart, EventID.mappingStart];
static ends = [EventID.DocumentEnd, EventID.SequenceEnd, EventID.MappingEnd]; static ends = [EventID.documentEnd, EventID.sequenceEnd, EventID.mappingEnd];
if(starts.canFind(event.id)) {++level;} if(starts.canFind(event.id)) {++level;}
else if(ends.canFind(event.id)){--level;} else if(ends.canFind(event.id)){--level;}
else if(event.id == EventID.StreamStart){level = -1;} else if(event.id == EventID.streamStart){level = -1;}
if(level < 0) if(level < 0)
{ {
@ -324,8 +324,8 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle start of a file/stream. ///Handle start of a file/stream.
void expectStreamStart() @safe void expectStreamStart() @safe
{ {
enforce(eventTypeIs(EventID.StreamStart), enforce(eventTypeIs(EventID.streamStart),
new EmitterException("Expected StreamStart, but got " ~ event_.idString)); new EmitterException("Expected streamStart, but got " ~ event_.idString));
writeStreamStart(); writeStreamStart();
nextExpected(&expectDocumentStart!(Yes.first)); nextExpected(&expectDocumentStart!(Yes.first));
@ -342,11 +342,11 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle start of a document. ///Handle start of a document.
void expectDocumentStart(Flag!"first" first)() @safe void expectDocumentStart(Flag!"first" first)() @safe
{ {
enforce(eventTypeIs(EventID.DocumentStart) || eventTypeIs(EventID.StreamEnd), enforce(eventTypeIs(EventID.documentStart) || eventTypeIs(EventID.streamEnd),
new EmitterException("Expected DocumentStart or StreamEnd, but got " new EmitterException("Expected documentStart or streamEnd, but got "
~ event_.idString)); ~ event_.idString));
if(event_.id == EventID.DocumentStart) if(event_.id == EventID.documentStart)
{ {
const YAMLVersion = event_.value; const YAMLVersion = event_.value;
auto tagDirectives = event_.tagDirectives; auto tagDirectives = event_.tagDirectives;
@ -394,7 +394,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
} }
nextExpected(&expectRootNode); nextExpected(&expectRootNode);
} }
else if(event_.id == EventID.StreamEnd) else if(event_.id == EventID.streamEnd)
{ {
if(openEnded_) if(openEnded_)
{ {
@ -409,7 +409,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle end of a document. ///Handle end of a document.
void expectDocumentEnd() @safe void expectDocumentEnd() @safe
{ {
enforce(eventTypeIs(EventID.DocumentEnd), enforce(eventTypeIs(EventID.documentEnd),
new EmitterException("Expected DocumentEnd, but got " ~ event_.idString)); new EmitterException("Expected DocumentEnd, but got " ~ event_.idString));
writeIndent(); writeIndent();
@ -425,7 +425,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
void expectRootNode() @safe void expectRootNode() @safe
{ {
pushState(&expectDocumentEnd); pushState(&expectDocumentEnd);
expectNode(Context.Root); expectNode(Context.root);
} }
///Handle a mapping node. ///Handle a mapping node.
@ -433,13 +433,13 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
//Params: simpleKey = Are we in a simple key? //Params: simpleKey = Are we in a simple key?
void expectMappingNode(const bool simpleKey = false) @safe void expectMappingNode(const bool simpleKey = false) @safe
{ {
expectNode(simpleKey ? Context.MappingSimpleKey : Context.MappingNoSimpleKey); expectNode(simpleKey ? Context.mappingSimpleKey : Context.mappingNoSimpleKey);
} }
///Handle a sequence node. ///Handle a sequence node.
void expectSequenceNode() @safe void expectSequenceNode() @safe
{ {
expectNode(Context.Sequence); expectNode(Context.sequence);
} }
///Handle a new node. Context specifies where in the document we are. ///Handle a new node. Context specifies where in the document we are.
@ -447,17 +447,17 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
{ {
context_ = context; context_ = context;
const flowCollection = event_.collectionStyle == CollectionStyle.Flow; const flowCollection = event_.collectionStyle == CollectionStyle.flow;
switch(event_.id) switch(event_.id)
{ {
case EventID.Alias: expectAlias(); break; case EventID.alias_: expectAlias(); break;
case EventID.Scalar: case EventID.scalar:
processAnchor("&"); processAnchor("&");
processTag(); processTag();
expectScalar(); expectScalar();
break; break;
case EventID.SequenceStart: case EventID.sequenceStart:
processAnchor("&"); processAnchor("&");
processTag(); processTag();
if(flowLevel_ > 0 || canonical_ || flowCollection || checkEmptySequence()) if(flowLevel_ > 0 || canonical_ || flowCollection || checkEmptySequence())
@ -469,7 +469,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
expectBlockSequence(); expectBlockSequence();
} }
break; break;
case EventID.MappingStart: case EventID.mappingStart:
processAnchor("&"); processAnchor("&");
processTag(); processTag();
if(flowLevel_ > 0 || canonical_ || flowCollection || checkEmptyMapping()) if(flowLevel_ > 0 || canonical_ || flowCollection || checkEmptyMapping())
@ -482,8 +482,8 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
} }
break; break;
default: default:
throw new EmitterException("Expected Alias, Scalar, SequenceStart or " ~ throw new EmitterException("Expected alias_, scalar, sequenceStart or " ~
"MappingStart, but got: " ~ event_.idString); "mappingStart, but got: " ~ event_.idString);
} }
} }
///Handle an alias. ///Handle an alias.
@ -517,7 +517,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle a flow sequence item. ///Handle a flow sequence item.
void expectFlowSequenceItem(Flag!"first" first)() @safe void expectFlowSequenceItem(Flag!"first" first)() @safe
{ {
if(event_.id == EventID.SequenceEnd) if(event_.id == EventID.sequenceEnd)
{ {
indent_ = popIndent(); indent_ = popIndent();
--flowLevel_; --flowLevel_;
@ -550,7 +550,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle a key in a flow mapping. ///Handle a key in a flow mapping.
void expectFlowMappingKey(Flag!"first" first)() @safe void expectFlowMappingKey(Flag!"first" first)() @safe
{ {
if(event_.id == EventID.MappingEnd) if(event_.id == EventID.mappingEnd)
{ {
indent_ = popIndent(); indent_ = popIndent();
--flowLevel_; --flowLevel_;
@ -600,8 +600,8 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle a block sequence. ///Handle a block sequence.
void expectBlockSequence() @safe void expectBlockSequence() @safe
{ {
const indentless = (context_ == Context.MappingNoSimpleKey || const indentless = (context_ == Context.mappingNoSimpleKey ||
context_ == Context.MappingSimpleKey) && !indentation_; context_ == Context.mappingSimpleKey) && !indentation_;
increaseIndent(No.flow, indentless); increaseIndent(No.flow, indentless);
nextExpected(&expectBlockSequenceItem!(Yes.first)); nextExpected(&expectBlockSequenceItem!(Yes.first));
} }
@ -609,7 +609,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle a block sequence item. ///Handle a block sequence item.
void expectBlockSequenceItem(Flag!"first" first)() @safe void expectBlockSequenceItem(Flag!"first" first)() @safe
{ {
static if(!first) if(event_.id == EventID.SequenceEnd) static if(!first) if(event_.id == EventID.sequenceEnd)
{ {
indent_ = popIndent(); indent_ = popIndent();
nextExpected(popState()); nextExpected(popState());
@ -634,7 +634,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle a key in a block mapping. ///Handle a key in a block mapping.
void expectBlockMappingKey(Flag!"first" first)() @safe void expectBlockMappingKey(Flag!"first" first)() @safe
{ {
static if(!first) if(event_.id == EventID.MappingEnd) static if(!first) if(event_.id == EventID.mappingEnd)
{ {
indent_ = popIndent(); indent_ = popIndent();
nextExpected(popState()); nextExpected(popState());
@ -676,27 +676,27 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Check if an empty sequence is next. ///Check if an empty sequence is next.
bool checkEmptySequence() const @safe pure nothrow bool checkEmptySequence() const @safe pure nothrow
{ {
return event_.id == EventID.SequenceStart && events_.length > 0 return event_.id == EventID.sequenceStart && events_.length > 0
&& events_.peek().id == EventID.SequenceEnd; && events_.peek().id == EventID.sequenceEnd;
} }
///Check if an empty mapping is next. ///Check if an empty mapping is next.
bool checkEmptyMapping() const @safe pure nothrow bool checkEmptyMapping() const @safe pure nothrow
{ {
return event_.id == EventID.MappingStart && events_.length > 0 return event_.id == EventID.mappingStart && events_.length > 0
&& events_.peek().id == EventID.MappingEnd; && events_.peek().id == EventID.mappingEnd;
} }
///Check if an empty document is next. ///Check if an empty document is next.
bool checkEmptyDocument() const @safe pure nothrow bool checkEmptyDocument() const @safe pure nothrow
{ {
if(event_.id != EventID.DocumentStart || events_.length == 0) if(event_.id != EventID.documentStart || events_.length == 0)
{ {
return false; return false;
} }
const event = events_.peek(); const event = events_.peek();
const emptyScalar = event.id == EventID.Scalar && (event.anchor is null) && const emptyScalar = event.id == EventID.scalar && (event.anchor is null) &&
(event.tag is null) && event.implicit && event.value == ""; (event.tag is null) && event.implicit && event.value == "";
return emptyScalar; return emptyScalar;
} }
@ -706,11 +706,11 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
{ {
uint length; uint length;
const id = event_.id; const id = event_.id;
const scalar = id == EventID.Scalar; const scalar = id == EventID.scalar;
const collectionStart = id == EventID.MappingStart || const collectionStart = id == EventID.mappingStart ||
id == EventID.SequenceStart; id == EventID.sequenceStart;
if((id == EventID.Alias || scalar || collectionStart) if((id == EventID.alias_ || scalar || collectionStart)
&& (event_.anchor !is null)) && (event_.anchor !is null))
{ {
if(preparedAnchor_ is null) if(preparedAnchor_ is null)
@ -734,7 +734,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
if(length >= 128){return false;} if(length >= 128){return false;}
return id == EventID.Alias || return id == EventID.alias_ ||
(scalar && !analysis_.flags.empty && !analysis_.flags.multiline) || (scalar && !analysis_.flags.empty && !analysis_.flags.multiline) ||
checkEmptySequence() || checkEmptySequence() ||
checkEmptyMapping(); checkEmptyMapping();
@ -744,30 +744,30 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
void processScalar() @safe void processScalar() @safe
{ {
if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);} if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);}
if(style_ == ScalarStyle.Invalid) if(style_ == ScalarStyle.invalid)
{ {
style_ = chooseScalarStyle(); style_ = chooseScalarStyle();
} }
//if(analysis_.flags.multiline && (context_ != Context.MappingSimpleKey) && //if(analysis_.flags.multiline && (context_ != Context.mappingSimpleKey) &&
// ([ScalarStyle.Invalid, ScalarStyle.Plain, ScalarStyle.SingleQuoted, ScalarStyle.DoubleQuoted) // ([ScalarStyle.invalid, ScalarStyle.plain, ScalarStyle.singleQuoted, ScalarStyle.doubleQuoted)
// .canFind(style_)) // .canFind(style_))
//{ //{
// writeIndent(); // writeIndent();
//} //}
auto writer = ScalarWriter!(Range, CharType)(this, analysis_.scalar, auto writer = ScalarWriter!(Range, CharType)(this, analysis_.scalar,
context_ != Context.MappingSimpleKey); context_ != Context.mappingSimpleKey);
with(writer) final switch(style_) with(writer) final switch(style_)
{ {
case ScalarStyle.Invalid: assert(false); case ScalarStyle.invalid: assert(false);
case ScalarStyle.DoubleQuoted: writeDoubleQuoted(); break; case ScalarStyle.doubleQuoted: writeDoubleQuoted(); break;
case ScalarStyle.SingleQuoted: writeSingleQuoted(); break; case ScalarStyle.singleQuoted: writeSingleQuoted(); break;
case ScalarStyle.Folded: writeFolded(); break; case ScalarStyle.folded: writeFolded(); break;
case ScalarStyle.Literal: writeLiteral(); break; case ScalarStyle.literal: writeLiteral(); break;
case ScalarStyle.Plain: writePlain(); break; case ScalarStyle.plain: writePlain(); break;
} }
analysis_.flags.isNull = true; analysis_.flags.isNull = true;
style_ = ScalarStyle.Invalid; style_ = ScalarStyle.invalid;
} }
///Process and write an anchor/alias. ///Process and write an anchor/alias.
@ -795,11 +795,11 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
{ {
string tag = event_.tag; string tag = event_.tag;
if(event_.id == EventID.Scalar) if(event_.id == EventID.scalar)
{ {
if(style_ == ScalarStyle.Invalid){style_ = chooseScalarStyle();} if(style_ == ScalarStyle.invalid){style_ = chooseScalarStyle();}
if((!canonical_ || (tag is null)) && if((!canonical_ || (tag is null)) &&
(style_ == ScalarStyle.Plain ? event_.implicit : !event_.implicit && (tag is null))) (style_ == ScalarStyle.plain ? event_.implicit : !event_.implicit && (tag is null)))
{ {
preparedTag_ = null; preparedTag_ = null;
return; return;
@ -831,28 +831,28 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);} if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);}
const style = event_.scalarStyle; const style = event_.scalarStyle;
const invalidOrPlain = style == ScalarStyle.Invalid || style == ScalarStyle.Plain; const invalidOrPlain = style == ScalarStyle.invalid || style == ScalarStyle.plain;
const block = style == ScalarStyle.Literal || style == ScalarStyle.Folded; const block = style == ScalarStyle.literal || style == ScalarStyle.folded;
const singleQuoted = style == ScalarStyle.SingleQuoted; const singleQuoted = style == ScalarStyle.singleQuoted;
const doubleQuoted = style == ScalarStyle.DoubleQuoted; const doubleQuoted = style == ScalarStyle.doubleQuoted;
const allowPlain = flowLevel_ > 0 ? analysis_.flags.allowFlowPlain const allowPlain = flowLevel_ > 0 ? analysis_.flags.allowFlowPlain
: analysis_.flags.allowBlockPlain; : analysis_.flags.allowBlockPlain;
//simple empty or multiline scalars can't be written in plain style //simple empty or multiline scalars can't be written in plain style
const simpleNonPlain = (context_ == Context.MappingSimpleKey) && const simpleNonPlain = (context_ == Context.mappingSimpleKey) &&
(analysis_.flags.empty || analysis_.flags.multiline); (analysis_.flags.empty || analysis_.flags.multiline);
if(doubleQuoted || canonical_) if(doubleQuoted || canonical_)
{ {
return ScalarStyle.DoubleQuoted; return ScalarStyle.doubleQuoted;
} }
if(invalidOrPlain && event_.implicit && !simpleNonPlain && allowPlain) if(invalidOrPlain && event_.implicit && !simpleNonPlain && allowPlain)
{ {
return ScalarStyle.Plain; return ScalarStyle.plain;
} }
if(block && flowLevel_ == 0 && context_ != Context.MappingSimpleKey && if(block && flowLevel_ == 0 && context_ != Context.mappingSimpleKey &&
analysis_.flags.allowBlock) analysis_.flags.allowBlock)
{ {
return style; return style;
@ -860,12 +860,12 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
if((invalidOrPlain || singleQuoted) && if((invalidOrPlain || singleQuoted) &&
analysis_.flags.allowSingleQuoted && analysis_.flags.allowSingleQuoted &&
!(context_ == Context.MappingSimpleKey && analysis_.flags.multiline)) !(context_ == Context.mappingSimpleKey && analysis_.flags.multiline))
{ {
return ScalarStyle.SingleQuoted; return ScalarStyle.singleQuoted;
} }
return ScalarStyle.DoubleQuoted; return ScalarStyle.doubleQuoted;
} }
///Prepare YAML version string for output. ///Prepare YAML version string for output.
@ -1548,7 +1548,7 @@ struct ScalarWriter(Range, CharType)
///Write text as plain scalar. ///Write text as plain scalar.
void writePlain() @safe void writePlain() @safe
{ {
if(emitter_.context_ == Emitter!(Range, CharType).Context.Root){emitter_.openEnded_ = true;} if(emitter_.context_ == Emitter!(Range, CharType).Context.root){emitter_.openEnded_ = true;}
if(text_ == ""){return;} if(text_ == ""){return;}
if(!emitter_.whitespace_) if(!emitter_.whitespace_)
{ {

View file

@ -23,17 +23,17 @@ package:
///Event types. ///Event types.
enum EventID : ubyte enum EventID : ubyte
{ {
Invalid = 0, /// Invalid (uninitialized) event. invalid = 0, /// Invalid (uninitialized) event.
StreamStart, /// Stream start streamStart, /// Stream start
StreamEnd, /// Stream end streamEnd, /// Stream end
DocumentStart, /// Document start documentStart, /// Document start
DocumentEnd, /// Document end documentEnd, /// Document end
Alias, /// Alias alias_, /// Alias
Scalar, /// Scalar scalar, /// Scalar
SequenceStart, /// Sequence start sequenceStart, /// Sequence start
SequenceEnd, /// Sequence end sequenceEnd, /// Sequence end
MappingStart, /// Mapping start mappingStart, /// Mapping start
MappingEnd /// Mapping end mappingEnd /// Mapping end
} }
/** /**
@ -65,9 +65,9 @@ struct Event
TagDirective[] _tagDirectives; TagDirective[] _tagDirectives;
} }
///Event type. ///Event type.
EventID id = EventID.Invalid; EventID id = EventID.invalid;
///Style of scalar event, if this is a scalar event. ///Style of scalar event, if this is a scalar event.
ScalarStyle scalarStyle = ScalarStyle.Invalid; ScalarStyle scalarStyle = ScalarStyle.invalid;
union union
{ {
///Should the tag be implicitly resolved? ///Should the tag be implicitly resolved?
@ -80,26 +80,26 @@ struct Event
bool explicitDocument; bool explicitDocument;
} }
///Collection style, if this is a SequenceStart or MappingStart. ///Collection style, if this is a SequenceStart or MappingStart.
CollectionStyle collectionStyle = CollectionStyle.Invalid; CollectionStyle collectionStyle = CollectionStyle.invalid;
///Is this a null (uninitialized) event? ///Is this a null (uninitialized) event?
@property bool isNull() const pure @safe nothrow {return id == EventID.Invalid;} @property bool isNull() const pure @safe nothrow {return id == EventID.invalid;}
///Get string representation of the token ID. ///Get string representation of the token ID.
@property string idString() const @safe {return to!string(id);} @property string idString() const @safe {return to!string(id);}
auto ref anchor() inout @trusted pure { auto ref anchor() inout @trusted pure {
assert(id != EventID.DocumentStart, "DocumentStart events cannot have anchors."); assert(id != EventID.documentStart, "DocumentStart events cannot have anchors.");
return _anchor; return _anchor;
} }
auto ref tag() inout @trusted pure { auto ref tag() inout @trusted pure {
assert(id != EventID.DocumentStart, "DocumentStart events cannot have tags."); assert(id != EventID.documentStart, "DocumentStart events cannot have tags.");
return _tag; return _tag;
} }
auto ref tagDirectives() inout @trusted pure { auto ref tagDirectives() inout @trusted pure {
assert(id == EventID.DocumentStart, "Only DocumentStart events have tag directives."); assert(id == EventID.documentStart, "Only DocumentStart events have tag directives.");
return _tagDirectives; return _tagDirectives;
} }
@ -138,8 +138,8 @@ Event collectionStartEvent(EventID id)
(const Mark start, const Mark end, const string anchor, const string tag, (const Mark start, const Mark end, const string anchor, const string tag,
const bool implicit, const CollectionStyle style) pure @safe nothrow const bool implicit, const CollectionStyle style) pure @safe nothrow
{ {
static assert(id == EventID.SequenceStart || id == EventID.SequenceEnd || static assert(id == EventID.sequenceStart || id == EventID.sequenceEnd ||
id == EventID.MappingStart || id == EventID.MappingEnd); id == EventID.mappingStart || id == EventID.mappingEnd);
Event result; Event result;
result.startMark = start; result.startMark = start;
result.endMark = end; result.endMark = end;
@ -163,19 +163,19 @@ Event streamStartEvent(const Mark start, const Mark end)
Event result; Event result;
result.startMark = start; result.startMark = start;
result.endMark = end; result.endMark = end;
result.id = EventID.StreamStart; result.id = EventID.streamStart;
return result; return result;
} }
///Aliases for simple events. ///Aliases for simple events.
alias streamEndEvent = event!(EventID.StreamEnd); alias streamEndEvent = event!(EventID.streamEnd);
alias aliasEvent = event!(EventID.Alias); alias aliasEvent = event!(EventID.alias_);
alias sequenceEndEvent = event!(EventID.SequenceEnd); alias sequenceEndEvent = event!(EventID.sequenceEnd);
alias mappingEndEvent = event!(EventID.MappingEnd); alias mappingEndEvent = event!(EventID.mappingEnd);
///Aliases for collection start events. ///Aliases for collection start events.
alias sequenceStartEvent = collectionStartEvent!(EventID.SequenceStart); alias sequenceStartEvent = collectionStartEvent!(EventID.sequenceStart);
alias mappingStartEvent = collectionStartEvent!(EventID.MappingStart); alias mappingStartEvent = collectionStartEvent!(EventID.mappingStart);
/** /**
* Construct a document start event. * Construct a document start event.
@ -193,7 +193,7 @@ Event documentStartEvent(const Mark start, const Mark end, const bool explicit,
result.value = YAMLVersion; result.value = YAMLVersion;
result.startMark = start; result.startMark = start;
result.endMark = end; result.endMark = end;
result.id = EventID.DocumentStart; result.id = EventID.documentStart;
result.explicitDocument = explicit; result.explicitDocument = explicit;
result.tagDirectives = tagDirectives; result.tagDirectives = tagDirectives;
return result; return result;
@ -211,7 +211,7 @@ Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pu
Event result; Event result;
result.startMark = start; result.startMark = start;
result.endMark = end; result.endMark = end;
result.id = EventID.DocumentEnd; result.id = EventID.documentEnd;
result.explicitDocument = explicit; result.explicitDocument = explicit;
return result; return result;
} }
@ -227,7 +227,7 @@ Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pu
/// style = Scalar style. /// style = Scalar style.
Event scalarEvent(const Mark start, const Mark end, const string anchor, const string tag, Event scalarEvent(const Mark start, const Mark end, const string anchor, const string tag,
const bool implicit, const string value, const bool implicit, const string value,
const ScalarStyle style = ScalarStyle.Invalid) @safe pure nothrow @nogc const ScalarStyle style = ScalarStyle.invalid) @safe pure nothrow @nogc
{ {
Event result; Event result;
result.value = value; result.value = value;
@ -237,7 +237,7 @@ Event scalarEvent(const Mark start, const Mark end, const string anchor, const s
result.anchor = anchor; result.anchor = anchor;
result.tag = tag; result.tag = tag;
result.id = EventID.Scalar; result.id = EventID.scalar;
result.scalarStyle = style; result.scalarStyle = style;
result.implicit = implicit; result.implicit = implicit;
return result; return result;

View file

@ -33,13 +33,13 @@ ScalarStyle scalarStyleHack(ref const(Node) node) @safe nothrow
Node node = Loader.fromString(`"42"`).load(); // loaded from a file Node node = Loader.fromString(`"42"`).load(); // loaded from a file
if(node.isScalar) if(node.isScalar)
{ {
assert(node.scalarStyleHack() == ScalarStyle.DoubleQuoted); assert(node.scalarStyleHack() == ScalarStyle.doubleQuoted);
} }
} }
@safe unittest @safe unittest
{ {
auto node = Node(5); auto node = Node(5);
assert(node.scalarStyleHack() == ScalarStyle.Invalid); assert(node.scalarStyleHack() == ScalarStyle.invalid);
} }
/** Get the collection style a YAML node had in the file it was loaded from. /** Get the collection style a YAML node had in the file it was loaded from.
@ -56,7 +56,7 @@ CollectionStyle collectionStyleHack(ref const(Node) node) @safe nothrow
@safe unittest @safe unittest
{ {
auto node = Node([1, 2, 3, 4, 5]); auto node = Node([1, 2, 3, 4, 5]);
assert(node.collectionStyleHack() == CollectionStyle.Invalid); assert(node.collectionStyleHack() == CollectionStyle.invalid);
} }
@ -75,8 +75,8 @@ void scalarStyleHack(ref Node node, const ScalarStyle rhs) @safe nothrow
@safe unittest @safe unittest
{ {
auto node = Node(5); auto node = Node(5);
node.scalarStyleHack = ScalarStyle.DoubleQuoted; node.scalarStyleHack = ScalarStyle.doubleQuoted;
assert(node.scalarStyleHack() == ScalarStyle.DoubleQuoted); assert(node.scalarStyleHack() == ScalarStyle.doubleQuoted);
} }
/** Set the collection style node should have when written to a file. /** Set the collection style node should have when written to a file.
@ -94,6 +94,6 @@ void collectionStyleHack(ref Node node, const CollectionStyle rhs) @safe nothrow
@safe unittest @safe unittest
{ {
auto node = Node([1, 2, 3, 4, 5]); auto node = Node([1, 2, 3, 4, 5]);
node.collectionStyleHack = CollectionStyle.Block; node.collectionStyleHack = CollectionStyle.block;
assert(node.collectionStyleHack() == CollectionStyle.Block); assert(node.collectionStyleHack() == CollectionStyle.block);
} }

View file

@ -11,11 +11,11 @@ module dyaml.linebreak;
enum LineBreak enum LineBreak
{ {
///Unix line break ("\n"). ///Unix line break ("\n").
Unix, unix,
///Windows line break ("\r\n"). ///Windows line break ("\r\n").
Windows, windows,
///Macintosh line break ("\r"). ///Macintosh line break ("\r").
Macintosh macintosh
} }
package: package:
@ -25,8 +25,8 @@ string lineBreak(in LineBreak b) pure @safe nothrow
{ {
final switch(b) final switch(b)
{ {
case LineBreak.Unix: return "\n"; case LineBreak.unix: return "\n";
case LineBreak.Windows: return "\r\n"; case LineBreak.windows: return "\r\n";
case LineBreak.Macintosh: return "\r"; case LineBreak.macintosh: return "\r";
} }
} }

View file

@ -43,9 +43,9 @@ class NodeException : YAMLException
// Node kinds. // Node kinds.
package enum NodeID : ubyte package enum NodeID : ubyte
{ {
Scalar, scalar,
Sequence, sequence,
Mapping mapping
} }
/// Null YAML type. Used in nodes with _null values. /// Null YAML type. Used in nodes with _null values.
@ -181,9 +181,9 @@ struct Node
// Tag of the node. // Tag of the node.
string tag_; string tag_;
// Node scalar style. Used to remember style this node was loaded with. // Node scalar style. Used to remember style this node was loaded with.
ScalarStyle scalarStyle = ScalarStyle.Invalid; ScalarStyle scalarStyle = ScalarStyle.invalid;
// Node collection style. Used to remember style this node was loaded with. // Node collection style. Used to remember style this node was loaded with.
CollectionStyle collectionStyle = CollectionStyle.Invalid; CollectionStyle collectionStyle = CollectionStyle.invalid;
static assert(Value.sizeof <= 24, "Unexpected YAML value size"); static assert(Value.sizeof <= 24, "Unexpected YAML value size");
static assert(Node.sizeof <= 56, "Unexpected YAML node size"); static assert(Node.sizeof <= 56, "Unexpected YAML node size");
@ -2010,7 +2010,7 @@ struct Node
import dyaml.dumper; import dyaml.dumper;
auto stream = new Appender!string(); auto stream = new Appender!string();
auto node = Node([1, 2, 3, 4, 5]); auto node = Node([1, 2, 3, 4, 5]);
node.setStyle(CollectionStyle.Block); node.setStyle(CollectionStyle.block);
auto dumper = dumper(stream); auto dumper = dumper(stream);
dumper.dump(node); dumper.dump(node);
@ -2021,15 +2021,15 @@ struct Node
import dyaml.dumper; import dyaml.dumper;
auto stream = new Appender!string(); auto stream = new Appender!string();
auto node = Node(4); auto node = Node(4);
node.setStyle(ScalarStyle.Literal); node.setStyle(ScalarStyle.literal);
auto dumper = dumper(stream); auto dumper = dumper(stream);
dumper.dump(node); dumper.dump(node);
} }
@safe unittest @safe unittest
{ {
assertThrown!NodeException(Node(4).setStyle(CollectionStyle.Block)); assertThrown!NodeException(Node(4).setStyle(CollectionStyle.block));
assertThrown!NodeException(Node([4]).setStyle(ScalarStyle.Literal)); assertThrown!NodeException(Node([4]).setStyle(ScalarStyle.literal));
} }
@safe unittest @safe unittest
{ {
@ -2037,7 +2037,7 @@ struct Node
{ {
auto stream = new Appender!string(); auto stream = new Appender!string();
auto node = Node([1, 2, 3, 4, 5]); auto node = Node([1, 2, 3, 4, 5]);
node.setStyle(CollectionStyle.Block); node.setStyle(CollectionStyle.block);
auto dumper = dumper(stream); auto dumper = dumper(stream);
dumper.explicitEnd = false; dumper.explicitEnd = false;
dumper.explicitStart = false; dumper.explicitStart = false;
@ -2050,7 +2050,7 @@ struct Node
{ {
auto stream = new Appender!string(); auto stream = new Appender!string();
auto node = Node([1, 2, 3, 4, 5]); auto node = Node([1, 2, 3, 4, 5]);
node.setStyle(CollectionStyle.Flow); node.setStyle(CollectionStyle.flow);
auto dumper = dumper(stream); auto dumper = dumper(stream);
dumper.explicitEnd = false; dumper.explicitEnd = false;
dumper.explicitStart = false; dumper.explicitStart = false;
@ -2063,7 +2063,7 @@ struct Node
{ {
auto stream = new Appender!string(); auto stream = new Appender!string();
auto node = Node(1); auto node = Node(1);
node.setStyle(ScalarStyle.SingleQuoted); node.setStyle(ScalarStyle.singleQuoted);
auto dumper = dumper(stream); auto dumper = dumper(stream);
dumper.explicitEnd = false; dumper.explicitEnd = false;
dumper.explicitStart = false; dumper.explicitStart = false;
@ -2075,7 +2075,7 @@ struct Node
{ {
auto stream = new Appender!string(); auto stream = new Appender!string();
auto node = Node(1); auto node = Node(1);
node.setStyle(ScalarStyle.DoubleQuoted); node.setStyle(ScalarStyle.doubleQuoted);
auto dumper = dumper(stream); auto dumper = dumper(stream);
dumper.explicitEnd = false; dumper.explicitEnd = false;
dumper.explicitStart = false; dumper.explicitStart = false;

View file

@ -169,7 +169,7 @@ final class Parser
*/ */
void popFront() @safe void popFront() @safe
{ {
currentEvent_.id = EventID.Invalid; currentEvent_.id = EventID.invalid;
ensureState(); ensureState();
} }
@ -231,8 +231,8 @@ final class Parser
Event parseImplicitDocumentStart() @safe Event parseImplicitDocumentStart() @safe
{ {
// Parse an implicit document. // Parse an implicit document.
if(!scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart, if(!scanner_.checkToken(TokenID.directive, TokenID.documentStart,
TokenID.StreamEnd)) TokenID.streamEnd))
{ {
tagDirectives_ = defaultTagDirectives_; tagDirectives_ = defaultTagDirectives_;
const token = scanner_.peekToken(); const token = scanner_.peekToken();
@ -249,15 +249,15 @@ final class Parser
Event parseDocumentStart() @trusted Event parseDocumentStart() @trusted
{ {
//Parse any extra document end indicators. //Parse any extra document end indicators.
while(scanner_.checkToken(TokenID.DocumentEnd)){scanner_.getToken();} while(scanner_.checkToken(TokenID.documentEnd)){scanner_.getToken();}
//Parse an explicit document. //Parse an explicit document.
if(!scanner_.checkToken(TokenID.StreamEnd)) if(!scanner_.checkToken(TokenID.streamEnd))
{ {
const startMark = scanner_.peekToken().startMark; const startMark = scanner_.peekToken().startMark;
auto tagDirectives = processDirectives(); auto tagDirectives = processDirectives();
enforce(scanner_.checkToken(TokenID.DocumentStart), enforce(scanner_.checkToken(TokenID.documentStart),
new ParserException("Expected document start but found " ~ new ParserException("Expected document start but found " ~
scanner_.peekToken().idString, scanner_.peekToken().idString,
scanner_.peekToken().startMark)); scanner_.peekToken().startMark));
@ -282,7 +282,7 @@ final class Parser
Event parseDocumentEnd() @safe Event parseDocumentEnd() @safe
{ {
Mark startMark = scanner_.peekToken().startMark; Mark startMark = scanner_.peekToken().startMark;
const bool explicit = scanner_.checkToken(TokenID.DocumentEnd); const bool explicit = scanner_.checkToken(TokenID.documentEnd);
Mark endMark = explicit ? scanner_.getToken().endMark : startMark; Mark endMark = explicit ? scanner_.getToken().endMark : startMark;
state_ = &parseDocumentStart; state_ = &parseDocumentStart;
@ -293,8 +293,8 @@ final class Parser
///Parse document content. ///Parse document content.
Event parseDocumentContent() @safe Event parseDocumentContent() @safe
{ {
if(scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart, if(scanner_.checkToken(TokenID.directive, TokenID.documentStart,
TokenID.DocumentEnd, TokenID.StreamEnd)) TokenID.documentEnd, TokenID.streamEnd))
{ {
state_ = popState(); state_ = popState();
return processEmptyScalar(scanner_.peekToken().startMark); return processEmptyScalar(scanner_.peekToken().startMark);
@ -310,11 +310,11 @@ final class Parser
tagDirectives_.length = 0; tagDirectives_.length = 0;
// Process directives. // Process directives.
while(scanner_.checkToken(TokenID.Directive)) while(scanner_.checkToken(TokenID.directive))
{ {
const token = scanner_.getToken(); const token = scanner_.getToken();
string value = token.value.idup; string value = token.value.idup;
if(token.directive == DirectiveType.YAML) if(token.directive == DirectiveType.yaml)
{ {
enforce(YAMLVersion_ is null, enforce(YAMLVersion_ is null,
new ParserException("Duplicate YAML directive", token.startMark)); new ParserException("Duplicate YAML directive", token.startMark));
@ -324,7 +324,7 @@ final class Parser
token.startMark)); token.startMark));
YAMLVersion_ = value; YAMLVersion_ = value;
} }
else if(token.directive == DirectiveType.TAG) else if(token.directive == DirectiveType.tag)
{ {
auto handle = value[0 .. token.valueDivider]; auto handle = value[0 .. token.valueDivider];
@ -382,7 +382,7 @@ final class Parser
const Flag!"indentlessSequence" indentlessSequence = No.indentlessSequence) const Flag!"indentlessSequence" indentlessSequence = No.indentlessSequence)
@trusted @trusted
{ {
if(scanner_.checkToken(TokenID.Alias)) if(scanner_.checkToken(TokenID.alias_))
{ {
const token = scanner_.getToken(); const token = scanner_.getToken();
state_ = popState(); state_ = popState();
@ -404,7 +404,7 @@ final class Parser
invalidMarks = false; invalidMarks = false;
const token = scanner_.getToken(); const token = scanner_.getToken();
if(first){startMark = token.startMark;} if(first){startMark = token.startMark;}
if(id == TokenID.Tag) if(id == TokenID.tag)
{ {
tagMark = token.startMark; tagMark = token.startMark;
tagHandleEnd = token.valueDivider; tagHandleEnd = token.valueDivider;
@ -415,8 +415,8 @@ final class Parser
} }
//Anchor and/or tag can be in any order. //Anchor and/or tag can be in any order.
if(get(TokenID.Anchor, Yes.first, anchor)){get(TokenID.Tag, No.first, tag);} if(get(TokenID.anchor, Yes.first, anchor)){get(TokenID.tag, No.first, tag);}
else if(get(TokenID.Tag, Yes.first, tag)) {get(TokenID.Anchor, No.first, anchor);} else if(get(TokenID.tag, Yes.first, tag)) {get(TokenID.anchor, No.first, anchor);}
if(tag !is null){tag = processTag(tag, tagHandleEnd, startMark, tagMark);} if(tag !is null){tag = processTag(tag, tagHandleEnd, startMark, tagMark);}
@ -427,57 +427,57 @@ final class Parser
bool implicit = (tag is null || tag == "!"); bool implicit = (tag is null || tag == "!");
if(indentlessSequence && scanner_.checkToken(TokenID.BlockEntry)) if(indentlessSequence && scanner_.checkToken(TokenID.blockEntry))
{ {
state_ = &parseIndentlessSequenceEntry; state_ = &parseIndentlessSequenceEntry;
return sequenceStartEvent return sequenceStartEvent
(startMark, scanner_.peekToken().endMark, anchor, (startMark, scanner_.peekToken().endMark, anchor,
tag, implicit, CollectionStyle.Block); tag, implicit, CollectionStyle.block);
} }
if(scanner_.checkToken(TokenID.Scalar)) if(scanner_.checkToken(TokenID.scalar))
{ {
auto token = scanner_.getToken(); auto token = scanner_.getToken();
auto value = token.style == ScalarStyle.DoubleQuoted auto value = token.style == ScalarStyle.doubleQuoted
? handleDoubleQuotedScalarEscapes(token.value) ? handleDoubleQuotedScalarEscapes(token.value)
: cast(string)token.value; : cast(string)token.value;
implicit = (token.style == ScalarStyle.Plain && tag is null) || tag == "!"; implicit = (token.style == ScalarStyle.plain && tag is null) || tag == "!";
state_ = popState(); state_ = popState();
return scalarEvent(startMark, token.endMark, anchor, tag, return scalarEvent(startMark, token.endMark, anchor, tag,
implicit, value, token.style); implicit, value, token.style);
} }
if(scanner_.checkToken(TokenID.FlowSequenceStart)) if(scanner_.checkToken(TokenID.flowSequenceStart))
{ {
endMark = scanner_.peekToken().endMark; endMark = scanner_.peekToken().endMark;
state_ = &parseFlowSequenceEntry!(Yes.first); state_ = &parseFlowSequenceEntry!(Yes.first);
return sequenceStartEvent(startMark, endMark, anchor, tag, return sequenceStartEvent(startMark, endMark, anchor, tag,
implicit, CollectionStyle.Flow); implicit, CollectionStyle.flow);
} }
if(scanner_.checkToken(TokenID.FlowMappingStart)) if(scanner_.checkToken(TokenID.flowMappingStart))
{ {
endMark = scanner_.peekToken().endMark; endMark = scanner_.peekToken().endMark;
state_ = &parseFlowMappingKey!(Yes.first); state_ = &parseFlowMappingKey!(Yes.first);
return mappingStartEvent(startMark, endMark, anchor, tag, return mappingStartEvent(startMark, endMark, anchor, tag,
implicit, CollectionStyle.Flow); implicit, CollectionStyle.flow);
} }
if(block && scanner_.checkToken(TokenID.BlockSequenceStart)) if(block && scanner_.checkToken(TokenID.blockSequenceStart))
{ {
endMark = scanner_.peekToken().endMark; endMark = scanner_.peekToken().endMark;
state_ = &parseBlockSequenceEntry!(Yes.first); state_ = &parseBlockSequenceEntry!(Yes.first);
return sequenceStartEvent(startMark, endMark, anchor, tag, return sequenceStartEvent(startMark, endMark, anchor, tag,
implicit, CollectionStyle.Block); implicit, CollectionStyle.block);
} }
if(block && scanner_.checkToken(TokenID.BlockMappingStart)) if(block && scanner_.checkToken(TokenID.blockMappingStart))
{ {
endMark = scanner_.peekToken().endMark; endMark = scanner_.peekToken().endMark;
state_ = &parseBlockMappingKey!(Yes.first); state_ = &parseBlockMappingKey!(Yes.first);
return mappingStartEvent(startMark, endMark, anchor, tag, return mappingStartEvent(startMark, endMark, anchor, tag,
implicit, CollectionStyle.Block); implicit, CollectionStyle.block);
} }
if(anchor !is null || tag !is null) if(anchor !is null || tag !is null)
@ -622,10 +622,10 @@ final class Parser
{ {
static if(first){pushMark(scanner_.getToken().startMark);} static if(first){pushMark(scanner_.getToken().startMark);}
if(scanner_.checkToken(TokenID.BlockEntry)) if(scanner_.checkToken(TokenID.blockEntry))
{ {
const token = scanner_.getToken(); const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.BlockEnd)) if(!scanner_.checkToken(TokenID.blockEntry, TokenID.blockEnd))
{ {
pushState(&parseBlockSequenceEntry!(No.first)); pushState(&parseBlockSequenceEntry!(No.first));
return parseBlockNode(); return parseBlockNode();
@ -635,7 +635,7 @@ final class Parser
return processEmptyScalar(token.endMark); return processEmptyScalar(token.endMark);
} }
if(!scanner_.checkToken(TokenID.BlockEnd)) if(!scanner_.checkToken(TokenID.blockEnd))
{ {
const token = scanner_.peekToken(); const token = scanner_.peekToken();
throw new ParserException("While parsing a block collection", marks_.data.back, throw new ParserException("While parsing a block collection", marks_.data.back,
@ -654,12 +654,12 @@ final class Parser
///Parse an entry of an indentless sequence. ///Parse an entry of an indentless sequence.
Event parseIndentlessSequenceEntry() @safe Event parseIndentlessSequenceEntry() @safe
{ {
if(scanner_.checkToken(TokenID.BlockEntry)) if(scanner_.checkToken(TokenID.blockEntry))
{ {
const token = scanner_.getToken(); const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.Key, if(!scanner_.checkToken(TokenID.blockEntry, TokenID.key,
TokenID.Value, TokenID.BlockEnd)) TokenID.value, TokenID.blockEnd))
{ {
pushState(&parseIndentlessSequenceEntry); pushState(&parseIndentlessSequenceEntry);
return parseBlockNode(); return parseBlockNode();
@ -686,11 +686,11 @@ final class Parser
{ {
static if(first){pushMark(scanner_.getToken().startMark);} static if(first){pushMark(scanner_.getToken().startMark);}
if(scanner_.checkToken(TokenID.Key)) if(scanner_.checkToken(TokenID.key))
{ {
const token = scanner_.getToken(); const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd)) if(!scanner_.checkToken(TokenID.key, TokenID.value, TokenID.blockEnd))
{ {
pushState(&parseBlockMappingValue); pushState(&parseBlockMappingValue);
return parseBlockNodeOrIndentlessSequence(); return parseBlockNodeOrIndentlessSequence();
@ -700,7 +700,7 @@ final class Parser
return processEmptyScalar(token.endMark); return processEmptyScalar(token.endMark);
} }
if(!scanner_.checkToken(TokenID.BlockEnd)) if(!scanner_.checkToken(TokenID.blockEnd))
{ {
const token = scanner_.peekToken(); const token = scanner_.peekToken();
throw new ParserException("While parsing a block mapping", marks_.data.back, throw new ParserException("While parsing a block mapping", marks_.data.back,
@ -717,11 +717,11 @@ final class Parser
///Parse a value in a block mapping. ///Parse a value in a block mapping.
Event parseBlockMappingValue() @safe Event parseBlockMappingValue() @safe
{ {
if(scanner_.checkToken(TokenID.Value)) if(scanner_.checkToken(TokenID.value))
{ {
const token = scanner_.getToken(); const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd)) if(!scanner_.checkToken(TokenID.key, TokenID.value, TokenID.blockEnd))
{ {
pushState(&parseBlockMappingKey!(No.first)); pushState(&parseBlockMappingKey!(No.first));
return parseBlockNodeOrIndentlessSequence(); return parseBlockNodeOrIndentlessSequence();
@ -753,11 +753,11 @@ final class Parser
{ {
static if(first){pushMark(scanner_.getToken().startMark);} static if(first){pushMark(scanner_.getToken().startMark);}
if(!scanner_.checkToken(TokenID.FlowSequenceEnd)) if(!scanner_.checkToken(TokenID.flowSequenceEnd))
{ {
static if(!first) static if(!first)
{ {
if(scanner_.checkToken(TokenID.FlowEntry)) if(scanner_.checkToken(TokenID.flowEntry))
{ {
scanner_.getToken(); scanner_.getToken();
} }
@ -770,14 +770,14 @@ final class Parser
} }
} }
if(scanner_.checkToken(TokenID.Key)) if(scanner_.checkToken(TokenID.key))
{ {
const token = scanner_.peekToken(); const token = scanner_.peekToken();
state_ = &parseFlowSequenceEntryMappingKey; state_ = &parseFlowSequenceEntryMappingKey;
return mappingStartEvent(token.startMark, token.endMark, return mappingStartEvent(token.startMark, token.endMark,
null, null, true, CollectionStyle.Flow); null, null, true, CollectionStyle.flow);
} }
else if(!scanner_.checkToken(TokenID.FlowSequenceEnd)) else if(!scanner_.checkToken(TokenID.flowSequenceEnd))
{ {
pushState(&parseFlowSequenceEntry!(No.first)); pushState(&parseFlowSequenceEntry!(No.first));
return parseFlowNode(); return parseFlowNode();
@ -795,8 +795,8 @@ final class Parser
{ {
const token = scanner_.getToken(); const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.Value, TokenID.FlowEntry, if(!scanner_.checkToken(TokenID.value, TokenID.flowEntry,
TokenID.FlowSequenceEnd)) TokenID.flowSequenceEnd))
{ {
pushState(nextState); pushState(nextState);
return parseFlowNode(); return parseFlowNode();
@ -816,10 +816,10 @@ final class Parser
Event parseFlowValue(TokenID checkId, in Event delegate() @safe nextState) Event parseFlowValue(TokenID checkId, in Event delegate() @safe nextState)
@safe @safe
{ {
if(scanner_.checkToken(TokenID.Value)) if(scanner_.checkToken(TokenID.value))
{ {
const token = scanner_.getToken(); const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.FlowEntry, checkId)) if(!scanner_.checkToken(TokenID.flowEntry, checkId))
{ {
pushState(nextState); pushState(nextState);
return parseFlowNode(); return parseFlowNode();
@ -836,7 +836,7 @@ final class Parser
///Parse a mapping value in an entry in a flow sequence. ///Parse a mapping value in an entry in a flow sequence.
Event parseFlowSequenceEntryMappingValue() @safe Event parseFlowSequenceEntryMappingValue() @safe
{ {
return parseFlowValue(TokenID.FlowSequenceEnd, return parseFlowValue(TokenID.flowSequenceEnd,
&parseFlowSequenceEntryMappingEnd); &parseFlowSequenceEntryMappingEnd);
} }
@ -861,11 +861,11 @@ final class Parser
{ {
static if(first){pushMark(scanner_.getToken().startMark);} static if(first){pushMark(scanner_.getToken().startMark);}
if(!scanner_.checkToken(TokenID.FlowMappingEnd)) if(!scanner_.checkToken(TokenID.flowMappingEnd))
{ {
static if(!first) static if(!first)
{ {
if(scanner_.checkToken(TokenID.FlowEntry)) if(scanner_.checkToken(TokenID.flowEntry))
{ {
scanner_.getToken(); scanner_.getToken();
} }
@ -878,12 +878,12 @@ final class Parser
} }
} }
if(scanner_.checkToken(TokenID.Key)) if(scanner_.checkToken(TokenID.key))
{ {
return parseFlowKey(&parseFlowMappingValue); return parseFlowKey(&parseFlowMappingValue);
} }
if(!scanner_.checkToken(TokenID.FlowMappingEnd)) if(!scanner_.checkToken(TokenID.flowMappingEnd))
{ {
pushState(&parseFlowMappingEmptyValue); pushState(&parseFlowMappingEmptyValue);
return parseFlowNode(); return parseFlowNode();
@ -899,7 +899,7 @@ final class Parser
///Parse a value in a flow mapping. ///Parse a value in a flow mapping.
Event parseFlowMappingValue() @safe Event parseFlowMappingValue() @safe
{ {
return parseFlowValue(TokenID.FlowMappingEnd, &parseFlowMappingKey!(No.first)); return parseFlowValue(TokenID.flowMappingEnd, &parseFlowMappingKey!(No.first));
} }
///Parse an empty value in a flow mapping. ///Parse an empty value in a flow mapping.

View file

@ -50,9 +50,9 @@ final class Representer
// Representer functions indexed by types. // Representer functions indexed by types.
Node function(ref Node, Representer) @safe[TypeInfo] representers_; Node function(ref Node, Representer) @safe[TypeInfo] representers_;
// Default style for scalar nodes. // Default style for scalar nodes.
ScalarStyle defaultScalarStyle_ = ScalarStyle.Invalid; ScalarStyle defaultScalarStyle_ = ScalarStyle.invalid;
// Default style for collection nodes. // Default style for collection nodes.
CollectionStyle defaultCollectionStyle_ = CollectionStyle.Invalid; CollectionStyle defaultCollectionStyle_ = CollectionStyle.invalid;
public: public:
@disable bool opEquals(ref Representer); @disable bool opEquals(ref Representer);
@ -81,13 +81,13 @@ final class Representer
addRepresenter!SysTime(&representSysTime); addRepresenter!SysTime(&representSysTime);
} }
///Set default _style for scalars. If style is $(D ScalarStyle.Invalid), the _style is chosen automatically. ///Set default _style for scalars. If style is $(D ScalarStyle.invalid), the _style is chosen automatically.
@property void defaultScalarStyle(ScalarStyle style) pure @safe nothrow @property void defaultScalarStyle(ScalarStyle style) pure @safe nothrow
{ {
defaultScalarStyle_ = style; defaultScalarStyle_ = style;
} }
///Set default _style for collections. If style is $(D CollectionStyle.Invalid), the _style is chosen automatically. ///Set default _style for collections. If style is $(D CollectionStyle.invalid), the _style is chosen automatically.
@property void defaultCollectionStyle(CollectionStyle style) pure @safe nothrow @property void defaultCollectionStyle(CollectionStyle style) pure @safe nothrow
{ {
defaultCollectionStyle_ = style; defaultCollectionStyle_ = style;
@ -231,9 +231,9 @@ final class Representer
* Returns: The represented node. * Returns: The represented node.
*/ */
Node representScalar(string tag, string scalar, Node representScalar(string tag, string scalar,
ScalarStyle style = ScalarStyle.Invalid) @safe ScalarStyle style = ScalarStyle.invalid) @safe
{ {
if(style == ScalarStyle.Invalid){style = defaultScalarStyle_;} if(style == ScalarStyle.invalid){style = defaultScalarStyle_;}
auto newNode = Node(scalar, tag); auto newNode = Node(scalar, tag);
newNode.scalarStyle = style; newNode.scalarStyle = style;
return newNode; return newNode;
@ -285,26 +285,26 @@ final class Representer
* Throws: $(D RepresenterException) if a child could not be represented. * Throws: $(D RepresenterException) if a child could not be represented.
*/ */
Node representSequence(string tag, Node[] sequence, Node representSequence(string tag, Node[] sequence,
CollectionStyle style = CollectionStyle.Invalid) @safe CollectionStyle style = CollectionStyle.invalid) @safe
{ {
Node[] value; Node[] value;
value.length = sequence.length; value.length = sequence.length;
auto bestStyle = CollectionStyle.Flow; auto bestStyle = CollectionStyle.flow;
foreach(idx, ref item; sequence) foreach(idx, ref item; sequence)
{ {
value[idx] = representData(item); value[idx] = representData(item);
const isScalar = value[idx].isScalar; const isScalar = value[idx].isScalar;
const s = value[idx].scalarStyle; const s = value[idx].scalarStyle;
if(!isScalar || (s != ScalarStyle.Invalid && s != ScalarStyle.Plain)) if(!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain))
{ {
bestStyle = CollectionStyle.Block; bestStyle = CollectionStyle.block;
} }
} }
if(style == CollectionStyle.Invalid) if(style == CollectionStyle.invalid)
{ {
style = defaultCollectionStyle_ != CollectionStyle.Invalid style = defaultCollectionStyle_ != CollectionStyle.invalid
? defaultCollectionStyle_ ? defaultCollectionStyle_
: bestStyle; : bestStyle;
} }
@ -336,7 +336,7 @@ final class Representer
auto nodes = [Node(value.x), Node(value.y), Node(value.z)]; auto nodes = [Node(value.x), Node(value.y), Node(value.z)];
//use flow style //use flow style
return representer.representSequence("!mystruct.tag", nodes, return representer.representSequence("!mystruct.tag", nodes,
CollectionStyle.Flow); CollectionStyle.flow);
} }
auto dumper = dumper(new Appender!string); auto dumper = dumper(new Appender!string);
@ -360,12 +360,12 @@ final class Representer
* Throws: $(D RepresenterException) if a child could not be represented. * Throws: $(D RepresenterException) if a child could not be represented.
*/ */
Node representMapping(string tag, Node.Pair[] pairs, Node representMapping(string tag, Node.Pair[] pairs,
CollectionStyle style = CollectionStyle.Invalid) @safe CollectionStyle style = CollectionStyle.invalid) @safe
{ {
Node.Pair[] value; Node.Pair[] value;
value.length = pairs.length; value.length = pairs.length;
auto bestStyle = CollectionStyle.Flow; auto bestStyle = CollectionStyle.flow;
foreach(idx, ref pair; pairs) foreach(idx, ref pair; pairs)
{ {
value[idx] = Node.Pair(representData(pair.key), representData(pair.value)); value[idx] = Node.Pair(representData(pair.key), representData(pair.value));
@ -374,20 +374,20 @@ final class Representer
const keyStyle = value[idx].key.scalarStyle; const keyStyle = value[idx].key.scalarStyle;
const valStyle = value[idx].value.scalarStyle; const valStyle = value[idx].value.scalarStyle;
if(!keyScalar || if(!keyScalar ||
(keyStyle != ScalarStyle.Invalid && keyStyle != ScalarStyle.Plain)) (keyStyle != ScalarStyle.invalid && keyStyle != ScalarStyle.plain))
{ {
bestStyle = CollectionStyle.Block; bestStyle = CollectionStyle.block;
} }
if(!valScalar || if(!valScalar ||
(valStyle != ScalarStyle.Invalid && valStyle != ScalarStyle.Plain)) (valStyle != ScalarStyle.invalid && valStyle != ScalarStyle.plain))
{ {
bestStyle = CollectionStyle.Block; bestStyle = CollectionStyle.block;
} }
} }
if(style == CollectionStyle.Invalid) if(style == CollectionStyle.invalid)
{ {
style = defaultCollectionStyle_ != CollectionStyle.Invalid style = defaultCollectionStyle_ != CollectionStyle.invalid
? defaultCollectionStyle_ ? defaultCollectionStyle_
: bestStyle; : bestStyle;
} }
@ -445,11 +445,11 @@ final class Representer
if(data.tag_ !is null){result.tag_ = data.tag_;} if(data.tag_ !is null){result.tag_ = data.tag_;}
//Remember style if this was loaded before. //Remember style if this was loaded before.
if(data.scalarStyle != ScalarStyle.Invalid) if(data.scalarStyle != ScalarStyle.invalid)
{ {
result.scalarStyle = data.scalarStyle; result.scalarStyle = data.scalarStyle;
} }
if(data.collectionStyle != CollectionStyle.Invalid) if(data.collectionStyle != CollectionStyle.invalid)
{ {
result.collectionStyle = data.collectionStyle; result.collectionStyle = data.collectionStyle;
} }
@ -487,7 +487,7 @@ Node representBytes(ref Node node, Representer representer) @safe
if(value is null){return representNull(node, representer);} if(value is null){return representNull(node, representer);}
return representer.representScalar("tag:yaml.org,2002:binary", return representer.representScalar("tag:yaml.org,2002:binary",
Base64.encode(value).idup, Base64.encode(value).idup,
ScalarStyle.Literal); ScalarStyle.literal);
} }
///Represent a bool _node as a bool scalar. ///Represent a bool _node as a bool scalar.

View file

@ -138,7 +138,7 @@ final class Resolver
{ {
if((tag !is null) && tag != "!"){return tag;} if((tag !is null) && tag != "!"){return tag;}
if(kind == NodeID.Scalar) if(kind == NodeID.scalar)
{ {
if(!implicit){return defaultScalarTag_;} if(!implicit){return defaultScalarTag_;}
@ -156,8 +156,8 @@ final class Resolver
} }
return defaultScalarTag_; return defaultScalarTag_;
} }
else if(kind == NodeID.Sequence){return defaultSequenceTag_;} else if(kind == NodeID.sequence){return defaultSequenceTag_;}
else if(kind == NodeID.Mapping) {return defaultMappingTag_;} else if(kind == NodeID.mapping) {return defaultMappingTag_;}
assert(false, "This line of code should never be reached"); assert(false, "This line of code should never be reached");
} }
@safe unittest @safe unittest
@ -169,7 +169,7 @@ final class Resolver
const string expected = tag; const string expected = tag;
foreach(value; values) foreach(value; values)
{ {
const string resolved = resolver.resolve(NodeID.Scalar, null, value, true); const string resolved = resolver.resolve(NodeID.scalar, null, value, true);
if(expected != resolved) if(expected != resolved)
{ {
return false; return false;

View file

@ -113,11 +113,11 @@ final class Scanner
enum Chomping enum Chomping
{ {
/// Strip all trailing line breaks. '-' indicator. /// Strip all trailing line breaks. '-' indicator.
Strip, strip,
/// Line break of the last line is preserved, others discarded. Default. /// Line break of the last line is preserved, others discarded. Default.
Clip, clip,
/// All trailing line breaks are preserved. '+' indicator. /// All trailing line breaks are preserved. '+' indicator.
Keep keep
} }
/// Reader used to read from a file/stream. /// Reader used to read from a file/stream.
@ -495,7 +495,7 @@ final class Scanner
/// Add DOCUMENT-START or DOCUMENT-END token. /// Add DOCUMENT-START or DOCUMENT-END token.
void fetchDocumentIndicator(TokenID id)() void fetchDocumentIndicator(TokenID id)()
if(id == TokenID.DocumentStart || id == TokenID.DocumentEnd) if(id == TokenID.documentStart || id == TokenID.documentEnd)
{ {
// Set indentation to -1 . // Set indentation to -1 .
unwindIndent(-1); unwindIndent(-1);
@ -509,8 +509,8 @@ final class Scanner
} }
/// Aliases to add DOCUMENT-START or DOCUMENT-END token. /// Aliases to add DOCUMENT-START or DOCUMENT-END token.
alias fetchDocumentStart = fetchDocumentIndicator!(TokenID.DocumentStart); alias fetchDocumentStart = fetchDocumentIndicator!(TokenID.documentStart);
alias fetchDocumentEnd = fetchDocumentIndicator!(TokenID.DocumentEnd); alias fetchDocumentEnd = fetchDocumentIndicator!(TokenID.documentEnd);
/// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. /// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
void fetchFlowCollectionStart(TokenID id)() @safe void fetchFlowCollectionStart(TokenID id)() @safe
@ -527,8 +527,8 @@ final class Scanner
} }
/// Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. /// Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
alias fetchFlowSequenceStart = fetchFlowCollectionStart!(TokenID.FlowSequenceStart); alias fetchFlowSequenceStart = fetchFlowCollectionStart!(TokenID.flowSequenceStart);
alias fetchFlowMappingStart = fetchFlowCollectionStart!(TokenID.FlowMappingStart); alias fetchFlowMappingStart = fetchFlowCollectionStart!(TokenID.flowMappingStart);
/// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. /// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
void fetchFlowCollectionEnd(TokenID id)() void fetchFlowCollectionEnd(TokenID id)()
@ -545,8 +545,8 @@ final class Scanner
} }
/// Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token/ /// Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token/
alias fetchFlowSequenceEnd = fetchFlowCollectionEnd!(TokenID.FlowSequenceEnd); alias fetchFlowSequenceEnd = fetchFlowCollectionEnd!(TokenID.flowSequenceEnd);
alias fetchFlowMappingEnd = fetchFlowCollectionEnd!(TokenID.FlowMappingEnd); alias fetchFlowMappingEnd = fetchFlowCollectionEnd!(TokenID.flowMappingEnd);
/// Add FLOW-ENTRY token; /// Add FLOW-ENTRY token;
void fetchFlowEntry() @safe void fetchFlowEntry() @safe
@ -580,7 +580,7 @@ final class Scanner
/// Add BLOCK-ENTRY token. Might add BLOCK-SEQUENCE-START in the process. /// Add BLOCK-ENTRY token. Might add BLOCK-SEQUENCE-START in the process.
void fetchBlockEntry() @safe void fetchBlockEntry() @safe
{ {
if(flowLevel_ == 0) { blockChecks!("Sequence", TokenID.BlockSequenceStart)(); } if(flowLevel_ == 0) { blockChecks!("Sequence", TokenID.blockSequenceStart)(); }
// It's an error for the block entry to occur in the flow context, // It's an error for the block entry to occur in the flow context,
// but we let the parser detect this. // but we let the parser detect this.
@ -598,7 +598,7 @@ final class Scanner
/// Add KEY token. Might add BLOCK-MAPPING-START in the process. /// Add KEY token. Might add BLOCK-MAPPING-START in the process.
void fetchKey() @safe void fetchKey() @safe
{ {
if(flowLevel_ == 0) { blockChecks!("Mapping", TokenID.BlockMappingStart)(); } if(flowLevel_ == 0) { blockChecks!("Mapping", TokenID.blockMappingStart)(); }
// Reset possible simple key on the current level. // Reset possible simple key on the current level.
removePossibleSimpleKey(); removePossibleSimpleKey();
@ -665,7 +665,7 @@ final class Scanner
/// Add ALIAS or ANCHOR token. /// Add ALIAS or ANCHOR token.
void fetchAnchor_(TokenID id)() @safe void fetchAnchor_(TokenID id)() @safe
if(id == TokenID.Alias || id == TokenID.Anchor) if(id == TokenID.alias_ || id == TokenID.anchor)
{ {
// ALIAS/ANCHOR could be a simple key. // ALIAS/ANCHOR could be a simple key.
savePossibleSimpleKey(); savePossibleSimpleKey();
@ -678,8 +678,8 @@ final class Scanner
} }
/// Aliases to add ALIAS or ANCHOR token. /// Aliases to add ALIAS or ANCHOR token.
alias fetchAlias = fetchAnchor_!(TokenID.Alias); alias fetchAlias = fetchAnchor_!(TokenID.alias_);
alias fetchAnchor = fetchAnchor_!(TokenID.Anchor); alias fetchAnchor = fetchAnchor_!(TokenID.anchor);
/// Add TAG token. /// Add TAG token.
void fetchTag() @safe void fetchTag() @safe
@ -695,7 +695,7 @@ final class Scanner
/// Add block SCALAR token. /// Add block SCALAR token.
void fetchBlockScalar(ScalarStyle style)() @safe void fetchBlockScalar(ScalarStyle style)() @safe
if(style == ScalarStyle.Literal || style == ScalarStyle.Folded) if(style == ScalarStyle.literal || style == ScalarStyle.folded)
{ {
// Reset possible simple key on the current level. // Reset possible simple key on the current level.
removePossibleSimpleKey(); removePossibleSimpleKey();
@ -708,8 +708,8 @@ final class Scanner
} }
/// Aliases to add literal or folded block scalar. /// Aliases to add literal or folded block scalar.
alias fetchLiteral = fetchBlockScalar!(ScalarStyle.Literal); alias fetchLiteral = fetchBlockScalar!(ScalarStyle.literal);
alias fetchFolded = fetchBlockScalar!(ScalarStyle.Folded); alias fetchFolded = fetchBlockScalar!(ScalarStyle.folded);
/// Add quoted flow SCALAR token. /// Add quoted flow SCALAR token.
void fetchFlowScalar(ScalarStyle quotes)() void fetchFlowScalar(ScalarStyle quotes)()
@ -726,8 +726,8 @@ final class Scanner
} }
/// Aliases to add single or double quoted block scalar. /// Aliases to add single or double quoted block scalar.
alias fetchSingle = fetchFlowScalar!(ScalarStyle.SingleQuoted); alias fetchSingle = fetchFlowScalar!(ScalarStyle.singleQuoted);
alias fetchDouble = fetchFlowScalar!(ScalarStyle.DoubleQuoted); alias fetchDouble = fetchFlowScalar!(ScalarStyle.doubleQuoted);
/// Add plain SCALAR token. /// Add plain SCALAR token.
void fetchPlain() @safe void fetchPlain() @safe
@ -932,11 +932,11 @@ final class Scanner
Mark endMark = reader_.mark; Mark endMark = reader_.mark;
DirectiveType directive; DirectiveType directive;
if(name == "YAML") { directive = DirectiveType.YAML; } if(name == "YAML") { directive = DirectiveType.yaml; }
else if(name == "TAG") { directive = DirectiveType.TAG; } else if(name == "TAG") { directive = DirectiveType.tag; }
else else
{ {
directive = DirectiveType.Reserved; directive = DirectiveType.reserved;
scanToNextBreak(); scanToNextBreak();
} }
@ -1119,11 +1119,11 @@ final class Scanner
return Token.init; return Token.init;
} }
if(id == TokenID.Alias) if(id == TokenID.alias_)
{ {
return aliasToken(startMark, reader_.mark, value); return aliasToken(startMark, reader_.mark, value);
} }
if(id == TokenID.Anchor) if(id == TokenID.anchor)
{ {
return anchorToken(startMark, reader_.mark, value); return anchorToken(startMark, reader_.mark, value);
} }
@ -1279,7 +1279,7 @@ final class Scanner
// Unfortunately, folding rules are ambiguous. // Unfortunately, folding rules are ambiguous.
// This is the folding according to the specification: // This is the folding according to the specification:
if(style == ScalarStyle.Folded && lineBreak == '\n' && if(style == ScalarStyle.folded && lineBreak == '\n' &&
leadingNonSpace && !" \t"d.canFind(reader_.peekByte())) leadingNonSpace && !" \t"d.canFind(reader_.peekByte()))
{ {
// No breaks were scanned; no need to insert the space in the // No breaks were scanned; no need to insert the space in the
@ -1299,7 +1299,7 @@ final class Scanner
////this is Clark Evans's interpretation (also in the spec ////this is Clark Evans's interpretation (also in the spec
////examples): ////examples):
// //
//if(style == ScalarStyle.Folded && lineBreak == '\n') //if(style == ScalarStyle.folded && lineBreak == '\n')
//{ //{
// if(startLen == endLen) // if(startLen == endLen)
// { // {
@ -1327,14 +1327,14 @@ final class Scanner
// If chompint is Keep, we keep (commit) the last scanned line breaks // If chompint is Keep, we keep (commit) the last scanned line breaks
// (which are at the end of the scalar). Otherwise re remove them (end the // (which are at the end of the scalar). Otherwise re remove them (end the
// transaction). // transaction).
if(chomping == Chomping.Keep) { breaksTransaction.commit(); } if(chomping == Chomping.keep) { breaksTransaction.commit(); }
else { breaksTransaction.end(); } else { breaksTransaction.end(); }
if(chomping != Chomping.Strip && lineBreak != int.max) if(chomping != Chomping.strip && lineBreak != int.max)
{ {
// If chomping is Keep, we keep the line break but the first line break // If chomping is Keep, we keep the line break but the first line break
// that isn't stripped (since chomping isn't Strip in this branch) must // that isn't stripped (since chomping isn't Strip in this branch) must
// be inserted _before_ the other line breaks. // be inserted _before_ the other line breaks.
if(chomping == Chomping.Keep) if(chomping == Chomping.keep)
{ {
reader_.sliceBuilder.insert(lineBreak, startLen); reader_.sliceBuilder.insert(lineBreak, startLen);
} }
@ -1356,7 +1356,7 @@ final class Scanner
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
Tuple!(Chomping, int) scanBlockScalarIndicators(const Mark startMark) @safe Tuple!(Chomping, int) scanBlockScalarIndicators(const Mark startMark) @safe
{ {
auto chomping = Chomping.Clip; auto chomping = Chomping.clip;
int increment = int.min; int increment = int.min;
dchar c = reader_.peek(); dchar c = reader_.peek();
@ -1393,7 +1393,7 @@ final class Scanner
bool getChomping(ref dchar c, ref Chomping chomping) @safe bool getChomping(ref dchar c, ref Chomping chomping) @safe
{ {
if(!"+-"d.canFind(c)) { return false; } if(!"+-"d.canFind(c)) { return false; }
chomping = c == '+' ? Chomping.Keep : Chomping.Strip; chomping = c == '+' ? Chomping.keep : Chomping.strip;
reader_.forward(); reader_.forward();
c = reader_.peek(); c = reader_.peek();
return true; return true;
@ -1525,7 +1525,7 @@ final class Scanner
void scanFlowScalarNonSpacesToSlice(const ScalarStyle quotes, const Mark startMark) void scanFlowScalarNonSpacesToSlice(const ScalarStyle quotes, const Mark startMark)
@safe @safe
{ {
for(;;) with(ScalarStyle) for(;;)
{ {
dchar c = reader_.peek(); dchar c = reader_.peek();
@ -1556,18 +1556,18 @@ final class Scanner
reader_.sliceBuilder.write(reader_.get(numCodePoints)); reader_.sliceBuilder.write(reader_.get(numCodePoints));
c = reader_.peek(); c = reader_.peek();
if(quotes == SingleQuoted && c == '\'' && reader_.peek(1) == '\'') if(quotes == ScalarStyle.singleQuoted && c == '\'' && reader_.peek(1) == '\'')
{ {
reader_.forward(2); reader_.forward(2);
reader_.sliceBuilder.write('\''); reader_.sliceBuilder.write('\'');
} }
else if((quotes == DoubleQuoted && c == '\'') || else if((quotes == ScalarStyle.doubleQuoted && c == '\'') ||
(quotes == SingleQuoted && "\"\\"d.canFind(c))) (quotes == ScalarStyle.singleQuoted && "\"\\"d.canFind(c)))
{ {
reader_.forward(); reader_.forward();
reader_.sliceBuilder.write(c); reader_.sliceBuilder.write(c);
} }
else if(quotes == DoubleQuoted && c == '\\') else if(quotes == ScalarStyle.doubleQuoted && c == '\\')
{ {
reader_.forward(); reader_.forward();
c = reader_.peek(); c = reader_.peek();
@ -1803,7 +1803,7 @@ final class Scanner
spacesTransaction.end(); spacesTransaction.end();
char[] slice = reader_.sliceBuilder.finish(); char[] slice = reader_.sliceBuilder.finish();
return scalarToken(startMark, endMark, slice, ScalarStyle.Plain); return scalarToken(startMark, endMark, slice, ScalarStyle.plain);
} }
/// Scan spaces in a plain scalar. /// Scan spaces in a plain scalar.

View file

@ -179,7 +179,7 @@ struct Serializer(Range, CharType)
{ {
assert(node.isType!string, "Scalar node type must be string before serialized"); assert(node.isType!string, "Scalar node type must be string before serialized");
auto value = node.as!string; auto value = node.as!string;
const detectedTag = resolver_.resolve(NodeID.Scalar, null, value, true); const detectedTag = resolver_.resolve(NodeID.scalar, null, value, true);
const bool isDetected = node.tag_ == detectedTag; const bool isDetected = node.tag_ == detectedTag;
emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_, emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_,

View file

@ -11,18 +11,27 @@ module dyaml.style;
///Scalar styles. ///Scalar styles.
enum ScalarStyle : ubyte enum ScalarStyle : ubyte
{ {
Invalid = 0, /// Invalid (uninitialized) style /// Invalid (uninitialized) style
Literal, /// `|` (Literal block style) invalid = 0,
Folded, /// `>` (Folded block style) /// `|` (Literal block style)
Plain, /// Plain scalar literal,
SingleQuoted, /// Single quoted scalar /// `>` (Folded block style)
DoubleQuoted /// Double quoted scalar folded,
/// Plain scalar
plain,
/// Single quoted scalar
singleQuoted,
/// Double quoted scalar
doubleQuoted
} }
///Collection styles. ///Collection styles.
enum CollectionStyle : ubyte enum CollectionStyle : ubyte
{ {
Invalid = 0, /// Invalid (uninitialized) style /// Invalid (uninitialized) style
Block, /// Block style. invalid = 0,
Flow /// Flow style. /// Block style.
block,
/// Flow style.
flow
} }

View file

@ -98,9 +98,9 @@ private:
///Unittest status. ///Unittest status.
enum TestStatus enum TestStatus
{ {
Success, //Unittest passed. success, //Unittest passed.
Failure, //Unittest failed. failure, //Unittest failed.
Error //There's an error in the unittest. error //There's an error in the unittest.
} }
///Unittest result. ///Unittest result.
@ -166,7 +166,7 @@ Result execute(D)(const string testName, D testFunction,
writeln(testName ~ "(" ~ filenames.join(", ") ~ ")..."); writeln(testName ~ "(" ~ filenames.join(", ") ~ ")...");
} }
auto kind = TestStatus.Success; auto kind = TestStatus.success;
string info = ""; string info = "";
try try
{ {
@ -180,7 +180,7 @@ Result execute(D)(const string testName, D testFunction,
catch(Throwable e) catch(Throwable e)
{ {
info = to!string(typeid(e)) ~ "\n" ~ to!string(e); info = to!string(typeid(e)) ~ "\n" ~ to!string(e);
kind = (typeid(e) is typeid(AssertError)) ? TestStatus.Failure : TestStatus.Error; kind = (typeid(e) is typeid(AssertError)) ? TestStatus.failure : TestStatus.error;
write((verbose ? to!string(e) : to!string(kind)) ~ " "); write((verbose ? to!string(e) : to!string(kind)) ~ " ");
} }
@ -213,10 +213,10 @@ void display(Result[] results) @safe
to!string(result.kind)); to!string(result.kind));
} }
if(result.kind == TestStatus.Success){continue;} if(result.kind == TestStatus.success){continue;}
if(result.kind == TestStatus.Failure){++failures;} if(result.kind == TestStatus.failure){++failures;}
else if(result.kind == TestStatus.Error){++errors;} else if(result.kind == TestStatus.error){++errors;}
writeln(result.info); writeln(result.info);
writeln("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"); writeln("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
} }

View file

@ -35,20 +35,20 @@ bool compareEvents(T, U)(T events1, U events2)
//Different event types. //Different event types.
if(e1.id != e2.id){return false;} if(e1.id != e2.id){return false;}
//Different anchor (if applicable). //Different anchor (if applicable).
if([EventID.SequenceStart, if([EventID.sequenceStart,
EventID.MappingStart, EventID.mappingStart,
EventID.Alias, EventID.alias_,
EventID.Scalar].canFind(e1.id) EventID.scalar].canFind(e1.id)
&& e1.anchor != e2.anchor) && e1.anchor != e2.anchor)
{ {
return false; return false;
} }
//Different collection tag (if applicable). //Different collection tag (if applicable).
if([EventID.SequenceStart, EventID.MappingStart].canFind(e1.id) && e1.tag != e2.tag) if([EventID.sequenceStart, EventID.mappingStart].canFind(e1.id) && e1.tag != e2.tag)
{ {
return false; return false;
} }
if(e1.id == EventID.Scalar) if(e1.id == EventID.scalar)
{ {
//Different scalar tag (if applicable). //Different scalar tag (if applicable).
if(!(e1.implicit || e2.implicit) if(!(e1.implicit || e2.implicit)
@ -140,27 +140,27 @@ void testEmitterStyles(string dataFilename, string canonicalFilename) @safe
//must exist due to Anchor, Tags reference counts //must exist due to Anchor, Tags reference counts
auto loader = Loader.fromFile(canonicalFilename); auto loader = Loader.fromFile(canonicalFilename);
auto events = loader.parse(); auto events = loader.parse();
foreach(flowStyle; [CollectionStyle.Block, CollectionStyle.Flow]) foreach(flowStyle; [CollectionStyle.block, CollectionStyle.flow])
{ {
foreach(style; [ScalarStyle.Literal, ScalarStyle.Folded, foreach(style; [ScalarStyle.literal, ScalarStyle.folded,
ScalarStyle.DoubleQuoted, ScalarStyle.SingleQuoted, ScalarStyle.doubleQuoted, ScalarStyle.singleQuoted,
ScalarStyle.Plain]) ScalarStyle.plain])
{ {
Event[] styledEvents; Event[] styledEvents;
foreach(event; events) foreach(event; events)
{ {
if(event.id == EventID.Scalar) if(event.id == EventID.scalar)
{ {
event = scalarEvent(Mark(), Mark(), event.anchor, event.tag, event = scalarEvent(Mark(), Mark(), event.anchor, event.tag,
event.implicit, event.implicit,
event.value, style); event.value, style);
} }
else if(event.id == EventID.SequenceStart) else if(event.id == EventID.sequenceStart)
{ {
event = sequenceStartEvent(Mark(), Mark(), event.anchor, event = sequenceStartEvent(Mark(), Mark(), event.anchor,
event.tag, event.implicit, flowStyle); event.tag, event.implicit, flowStyle);
} }
else if(event.id == EventID.MappingStart) else if(event.id == EventID.mappingStart)
{ {
event = mappingStartEvent(Mark(), Mark(), event.anchor, event = mappingStartEvent(Mark(), Mark(), event.anchor,
event.tag, event.implicit, flowStyle); event.tag, event.implicit, flowStyle);

View file

@ -26,24 +26,26 @@ import dyaml.token;
void testTokens(string dataFilename, string tokensFilename) @safe void testTokens(string dataFilename, string tokensFilename) @safe
{ {
//representations of YAML tokens in tokens file. //representations of YAML tokens in tokens file.
auto replace = [TokenID.Directive : "%" , auto replace = [
TokenID.DocumentStart : "---" , TokenID.directive: "%",
TokenID.DocumentEnd : "..." , TokenID.documentStart: "---",
TokenID.Alias : "*" , TokenID.documentEnd: "...",
TokenID.Anchor : "&" , TokenID.alias_: "*",
TokenID.Tag : "!" , TokenID.anchor: "&",
TokenID.Scalar : "_" , TokenID.tag: "!",
TokenID.BlockSequenceStart : "[[" , TokenID.scalar: "_",
TokenID.BlockMappingStart : "{{" , TokenID.blockSequenceStart: "[[",
TokenID.BlockEnd : "]}" , TokenID.blockMappingStart: "{{",
TokenID.FlowSequenceStart : "[" , TokenID.blockEnd: "]}",
TokenID.FlowSequenceEnd : "]" , TokenID.flowSequenceStart: "[",
TokenID.FlowMappingStart : "{" , TokenID.flowSequenceEnd: "]",
TokenID.FlowMappingEnd : "}" , TokenID.flowMappingStart: "{",
TokenID.BlockEntry : "," , TokenID.flowMappingEnd: "}",
TokenID.FlowEntry : "," , TokenID.blockEntry: ",",
TokenID.Key : "?" , TokenID.flowEntry: ",",
TokenID.Value : ":" ]; TokenID.key: "?",
TokenID.value: ":"
];
string[] tokens1; string[] tokens1;
string[] tokens2 = readText(tokensFilename).split(); string[] tokens2 = readText(tokensFilename).split();
@ -55,7 +57,7 @@ void testTokens(string dataFilename, string tokensFilename) @safe
auto loader = Loader.fromFile(dataFilename); auto loader = Loader.fromFile(dataFilename);
foreach(token; loader.scan()) foreach(token; loader.scan())
{ {
if(token.id != TokenID.StreamStart && token.id != TokenID.StreamEnd) if(token.id != TokenID.streamStart && token.id != TokenID.streamEnd)
{ {
tokens1 ~= replace[token.id]; tokens1 ~= replace[token.id];
} }

View file

@ -22,38 +22,39 @@ package:
/// Token types. /// Token types.
enum TokenID : ubyte enum TokenID : ubyte
{ {
Invalid = 0, /// Invalid (uninitialized) token // Invalid (uninitialized) token
Directive, /// DIRECTIVE invalid = 0,
DocumentStart, /// DOCUMENT-START directive,
DocumentEnd, /// DOCUMENT-END documentStart,
StreamStart, /// STREAM-START documentEnd,
StreamEnd, /// STREAM-END streamStart,
BlockSequenceStart, /// BLOCK-SEQUENCE-START streamEnd,
BlockMappingStart, /// BLOCK-MAPPING-START blockSequenceStart,
BlockEnd, /// BLOCK-END blockMappingStart,
FlowSequenceStart, /// FLOW-SEQUENCE-START blockEnd,
FlowMappingStart, /// FLOW-MAPPING-START flowSequenceStart,
FlowSequenceEnd, /// FLOW-SEQUENCE-END flowMappingStart,
FlowMappingEnd, /// FLOW-MAPPING-END flowSequenceEnd,
Key, /// KEY flowMappingEnd,
Value, /// VALUE key,
BlockEntry, /// BLOCK-ENTRY value,
FlowEntry, /// FLOW-ENTRY blockEntry,
Alias, /// ALIAS flowEntry,
Anchor, /// ANCHOR alias_,
Tag, /// TAG anchor,
Scalar /// SCALAR tag,
scalar
} }
/// Specifies the type of a tag directive token. /// Specifies the type of a tag directive token.
enum DirectiveType : ubyte enum DirectiveType : ubyte
{ {
// YAML version directive. // YAML version directive.
YAML, yaml,
// Tag directive. // Tag directive.
TAG, tag,
// Any other directive is "reserved" for future YAML versions. // Any other directive is "reserved" for future YAML versions.
Reserved reserved
} }
/// Token produced by scanner. /// Token produced by scanner.
@ -107,7 +108,7 @@ static assert(Token.sizeof <= 32, "Token has unexpected size");
Token directiveToken(const Mark start, const Mark end, char[] value, Token directiveToken(const Mark start, const Mark end, char[] value,
DirectiveType directive, const uint nameEnd) @safe pure nothrow @nogc DirectiveType directive, const uint nameEnd) @safe pure nothrow @nogc
{ {
return Token(value, start, end, TokenID.Directive, ScalarStyle.init, Encoding.init, return Token(value, start, end, TokenID.directive, ScalarStyle.init, Encoding.init,
directive, nameEnd); directive, nameEnd);
} }
@ -128,18 +129,18 @@ Token simpleToken(TokenID id)(const Mark start, const Mark end)
/// encoding = Encoding of the stream. /// encoding = Encoding of the stream.
Token streamStartToken(const Mark start, const Mark end, const Encoding encoding) @safe pure nothrow @nogc Token streamStartToken(const Mark start, const Mark end, const Encoding encoding) @safe pure nothrow @nogc
{ {
return Token(null, start, end, TokenID.StreamStart, ScalarStyle.Invalid, encoding); return Token(null, start, end, TokenID.streamStart, ScalarStyle.invalid, encoding);
} }
/// Aliases for construction of simple token types. /// Aliases for construction of simple token types.
alias streamEndToken = simpleToken!(TokenID.StreamEnd); alias streamEndToken = simpleToken!(TokenID.streamEnd);
alias blockSequenceStartToken = simpleToken!(TokenID.BlockSequenceStart); alias blockSequenceStartToken = simpleToken!(TokenID.blockSequenceStart);
alias blockMappingStartToken = simpleToken!(TokenID.BlockMappingStart); alias blockMappingStartToken = simpleToken!(TokenID.blockMappingStart);
alias blockEndToken = simpleToken!(TokenID.BlockEnd); alias blockEndToken = simpleToken!(TokenID.blockEnd);
alias keyToken = simpleToken!(TokenID.Key); alias keyToken = simpleToken!(TokenID.key);
alias valueToken = simpleToken!(TokenID.Value); alias valueToken = simpleToken!(TokenID.value);
alias blockEntryToken = simpleToken!(TokenID.BlockEntry); alias blockEntryToken = simpleToken!(TokenID.blockEntry);
alias flowEntryToken = simpleToken!(TokenID.FlowEntry); alias flowEntryToken = simpleToken!(TokenID.flowEntry);
/// Construct a simple token with value with specified type. /// Construct a simple token with value with specified type.
/// ///
@ -152,14 +153,14 @@ alias flowEntryToken = simpleToken!(TokenID.FlowEntry);
Token simpleValueToken(TokenID id)(const Mark start, const Mark end, char[] value, Token simpleValueToken(TokenID id)(const Mark start, const Mark end, char[] value,
const uint valueDivider = uint.max) const uint valueDivider = uint.max)
{ {
return Token(value, start, end, id, ScalarStyle.Invalid, Encoding.init, return Token(value, start, end, id, ScalarStyle.invalid, Encoding.init,
DirectiveType.init, valueDivider); DirectiveType.init, valueDivider);
} }
/// Alias for construction of tag token. /// Alias for construction of tag token.
alias tagToken = simpleValueToken!(TokenID.Tag); alias tagToken = simpleValueToken!(TokenID.tag);
alias aliasToken = simpleValueToken!(TokenID.Alias); alias aliasToken = simpleValueToken!(TokenID.alias_);
alias anchorToken = simpleValueToken!(TokenID.Anchor); alias anchorToken = simpleValueToken!(TokenID.anchor);
/// Construct a scalar token. /// Construct a scalar token.
/// ///
@ -169,5 +170,5 @@ alias anchorToken = simpleValueToken!(TokenID.Anchor);
/// style = Style of the token. /// style = Style of the token.
Token scalarToken(const Mark start, const Mark end, char[] value, const ScalarStyle style) @safe pure nothrow @nogc Token scalarToken(const Mark start, const Mark end, char[] value, const ScalarStyle style) @safe pure nothrow @nogc
{ {
return Token(value, start, end, TokenID.Scalar, style); return Token(value, start, end, TokenID.scalar, style);
} }