Merge pull request #198 from Herringway/enum-casing

use camelCasing for enum members
merged-on-behalf-of: BBasile <BBasile@users.noreply.github.com>
This commit is contained in:
The Dlang Bot 2018-08-30 12:25:56 +02:00 committed by GitHub
commit 0a4057472a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 370 additions and 358 deletions

View file

@ -97,17 +97,17 @@ final class Composer
bool checkNode() @safe
{
// If next event is stream start, skip it
parser_.skipOver!"a.id == b"(EventID.StreamStart);
parser_.skipOver!"a.id == b"(EventID.streamStart);
//True if there are more documents available.
return parser_.front.id != EventID.StreamEnd;
return parser_.front.id != EventID.streamEnd;
}
///Get a YAML document as a node (the root of the document).
Node getNode() @safe
{
//Get the root node of the next document.
assert(parser_.front.id != EventID.StreamEnd,
assert(parser_.front.id != EventID.streamEnd,
"Trying to get a node from Composer when there is no node to " ~
"get. use checkNode() to determine if there is a node.");
@ -117,19 +117,19 @@ final class Composer
///Get single YAML document, throwing if there is more than one document.
Node getSingleNode() @safe
{
assert(parser_.front.id != EventID.StreamEnd,
assert(parser_.front.id != EventID.streamEnd,
"Trying to get a node from Composer when there is no node to " ~
"get. use checkNode() to determine if there is a node.");
Node document = composeDocument();
//Ensure that the stream contains no more documents.
enforce(parser_.front.id == EventID.StreamEnd,
enforce(parser_.front.id == EventID.streamEnd,
new ComposerException("Expected single document in the stream, " ~
"but found another document.",
parser_.front.startMark));
skipExpected(EventID.StreamEnd);
skipExpected(EventID.streamEnd);
assert(parser_.empty, "Found event after stream end");
return document;
@ -162,12 +162,12 @@ final class Composer
///Compose a YAML document and return its root node.
Node composeDocument() @safe
{
skipExpected(EventID.DocumentStart);
skipExpected(EventID.documentStart);
//Compose the root node.
Node node = composeNode(0, 0);
skipExpected(EventID.DocumentEnd);
skipExpected(EventID.documentEnd);
anchors_.destroy();
return node;
@ -179,7 +179,7 @@ final class Composer
/// nodeAppenderLevel = Current level of the node appender stack.
Node composeNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe
{
if(parser_.front.id == EventID.Alias)
if(parser_.front.id == EventID.alias_)
{
const event = parser_.front;
parser_.popFront();
@ -216,13 +216,13 @@ final class Composer
switch (parser_.front.id)
{
case EventID.Scalar:
case EventID.scalar:
result = composeScalarNode();
break;
case EventID.SequenceStart:
case EventID.sequenceStart:
result = composeSequenceNode(pairAppenderLevel, nodeAppenderLevel);
break;
case EventID.MappingStart:
case EventID.mappingStart:
result = composeMappingNode(pairAppenderLevel, nodeAppenderLevel);
break;
default: assert(false, "This code should never be reached");
@ -240,7 +240,7 @@ final class Composer
{
const event = parser_.front;
parser_.popFront();
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value,
const tag = resolver_.resolve(NodeID.scalar, event.tag, event.value,
event.implicit);
Node node = constructor_.node(event.startMark, event.endMark, tag,
@ -261,10 +261,10 @@ final class Composer
const startEvent = parser_.front;
parser_.popFront();
const tag = resolver_.resolve(NodeID.Sequence, startEvent.tag, null,
const tag = resolver_.resolve(NodeID.sequence, startEvent.tag, null,
startEvent.implicit);
while(parser_.front.id != EventID.SequenceEnd)
while(parser_.front.id != EventID.sequenceEnd)
{
nodeAppender.put(composeNode(pairAppenderLevel, nodeAppenderLevel + 1));
}
@ -358,12 +358,12 @@ final class Composer
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
const startEvent = parser_.front;
parser_.popFront();
const tag = resolver_.resolve(NodeID.Mapping, startEvent.tag, null,
const tag = resolver_.resolve(NodeID.mapping, startEvent.tag, null,
startEvent.implicit);
auto pairAppender = &(pairAppenders_[pairAppenderLevel]);
Tuple!(Node, Mark)[] toMerge;
while(parser_.front.id != EventID.MappingEnd)
while(parser_.front.id != EventID.mappingEnd)
{
auto pair = Node.Pair(composeNode(pairAppenderLevel + 1, nodeAppenderLevel),
composeNode(pairAppenderLevel + 1, nodeAppenderLevel));

View file

@ -58,7 +58,7 @@ struct Dumper(Range)
//Preferred text width.
uint textWidth_ = 80;
//Line break to use.
LineBreak lineBreak_ = LineBreak.Unix;
LineBreak lineBreak_ = LineBreak.unix;
//YAML version string.
string YAMLVersion_ = "1.1";
//Tag directives to use.
@ -329,7 +329,7 @@ struct Dumper(Range)
dumper.explicitEnd = true;
dumper.explicitStart = true;
dumper.YAMLVersion = null;
dumper.lineBreak = LineBreak.Windows;
dumper.lineBreak = LineBreak.windows;
dumper.dump(node);
assert(stream.data == "--- 0\r\n...\r\n");
}
@ -339,7 +339,7 @@ struct Dumper(Range)
dumper.explicitEnd = true;
dumper.explicitStart = true;
dumper.YAMLVersion = null;
dumper.lineBreak = LineBreak.Macintosh;
dumper.lineBreak = LineBreak.macintosh;
dumper.dump(node);
assert(stream.data == "--- 0\r...\r");
}

View file

@ -110,13 +110,13 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
enum Context
{
/// Root node of a document.
Root,
root,
/// Sequence.
Sequence,
sequence,
/// Mapping.
MappingNoSimpleKey,
mappingNoSimpleKey,
/// Mapping, in a simple key.
MappingSimpleKey
mappingSimpleKey,
}
/// Current context.
Context context_;
@ -157,7 +157,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Analysis result of the current scalar.
ScalarAnalysis analysis_;
///Style of the current scalar.
ScalarStyle style_ = ScalarStyle.Invalid;
ScalarStyle style_ = ScalarStyle.invalid;
public:
@disable int opCmp(ref Emitter);
@ -258,9 +258,9 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
if(events_.length == 0){return true;}
const event = events_.peek();
if(event.id == EventID.DocumentStart){return needEvents(1);}
if(event.id == EventID.SequenceStart){return needEvents(2);}
if(event.id == EventID.MappingStart) {return needEvents(3);}
if(event.id == EventID.documentStart){return needEvents(1);}
if(event.id == EventID.sequenceStart){return needEvents(2);}
if(event.id == EventID.mappingStart) {return needEvents(3);}
return false;
}
@ -278,11 +278,11 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
while(!events_.iterationOver())
{
const event = events_.next();
static starts = [EventID.DocumentStart, EventID.SequenceStart, EventID.MappingStart];
static ends = [EventID.DocumentEnd, EventID.SequenceEnd, EventID.MappingEnd];
static starts = [EventID.documentStart, EventID.sequenceStart, EventID.mappingStart];
static ends = [EventID.documentEnd, EventID.sequenceEnd, EventID.mappingEnd];
if(starts.canFind(event.id)) {++level;}
else if(ends.canFind(event.id)){--level;}
else if(event.id == EventID.StreamStart){level = -1;}
else if(event.id == EventID.streamStart){level = -1;}
if(level < 0)
{
@ -324,8 +324,8 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle start of a file/stream.
void expectStreamStart() @safe
{
enforce(eventTypeIs(EventID.StreamStart),
new EmitterException("Expected StreamStart, but got " ~ event_.idString));
enforce(eventTypeIs(EventID.streamStart),
new EmitterException("Expected streamStart, but got " ~ event_.idString));
writeStreamStart();
nextExpected(&expectDocumentStart!(Yes.first));
@ -342,11 +342,11 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle start of a document.
void expectDocumentStart(Flag!"first" first)() @safe
{
enforce(eventTypeIs(EventID.DocumentStart) || eventTypeIs(EventID.StreamEnd),
new EmitterException("Expected DocumentStart or StreamEnd, but got "
enforce(eventTypeIs(EventID.documentStart) || eventTypeIs(EventID.streamEnd),
new EmitterException("Expected documentStart or streamEnd, but got "
~ event_.idString));
if(event_.id == EventID.DocumentStart)
if(event_.id == EventID.documentStart)
{
const YAMLVersion = event_.value;
auto tagDirectives = event_.tagDirectives;
@ -394,7 +394,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
}
nextExpected(&expectRootNode);
}
else if(event_.id == EventID.StreamEnd)
else if(event_.id == EventID.streamEnd)
{
if(openEnded_)
{
@ -409,7 +409,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle end of a document.
void expectDocumentEnd() @safe
{
enforce(eventTypeIs(EventID.DocumentEnd),
enforce(eventTypeIs(EventID.documentEnd),
new EmitterException("Expected DocumentEnd, but got " ~ event_.idString));
writeIndent();
@ -425,7 +425,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
void expectRootNode() @safe
{
pushState(&expectDocumentEnd);
expectNode(Context.Root);
expectNode(Context.root);
}
///Handle a mapping node.
@ -433,13 +433,13 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
//Params: simpleKey = Are we in a simple key?
void expectMappingNode(const bool simpleKey = false) @safe
{
expectNode(simpleKey ? Context.MappingSimpleKey : Context.MappingNoSimpleKey);
expectNode(simpleKey ? Context.mappingSimpleKey : Context.mappingNoSimpleKey);
}
///Handle a sequence node.
void expectSequenceNode() @safe
{
expectNode(Context.Sequence);
expectNode(Context.sequence);
}
///Handle a new node. Context specifies where in the document we are.
@ -447,17 +447,17 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
{
context_ = context;
const flowCollection = event_.collectionStyle == CollectionStyle.Flow;
const flowCollection = event_.collectionStyle == CollectionStyle.flow;
switch(event_.id)
{
case EventID.Alias: expectAlias(); break;
case EventID.Scalar:
case EventID.alias_: expectAlias(); break;
case EventID.scalar:
processAnchor("&");
processTag();
expectScalar();
break;
case EventID.SequenceStart:
case EventID.sequenceStart:
processAnchor("&");
processTag();
if(flowLevel_ > 0 || canonical_ || flowCollection || checkEmptySequence())
@ -469,7 +469,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
expectBlockSequence();
}
break;
case EventID.MappingStart:
case EventID.mappingStart:
processAnchor("&");
processTag();
if(flowLevel_ > 0 || canonical_ || flowCollection || checkEmptyMapping())
@ -482,8 +482,8 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
}
break;
default:
throw new EmitterException("Expected Alias, Scalar, SequenceStart or " ~
"MappingStart, but got: " ~ event_.idString);
throw new EmitterException("Expected alias_, scalar, sequenceStart or " ~
"mappingStart, but got: " ~ event_.idString);
}
}
///Handle an alias.
@ -517,7 +517,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle a flow sequence item.
void expectFlowSequenceItem(Flag!"first" first)() @safe
{
if(event_.id == EventID.SequenceEnd)
if(event_.id == EventID.sequenceEnd)
{
indent_ = popIndent();
--flowLevel_;
@ -550,7 +550,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle a key in a flow mapping.
void expectFlowMappingKey(Flag!"first" first)() @safe
{
if(event_.id == EventID.MappingEnd)
if(event_.id == EventID.mappingEnd)
{
indent_ = popIndent();
--flowLevel_;
@ -600,8 +600,8 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle a block sequence.
void expectBlockSequence() @safe
{
const indentless = (context_ == Context.MappingNoSimpleKey ||
context_ == Context.MappingSimpleKey) && !indentation_;
const indentless = (context_ == Context.mappingNoSimpleKey ||
context_ == Context.mappingSimpleKey) && !indentation_;
increaseIndent(No.flow, indentless);
nextExpected(&expectBlockSequenceItem!(Yes.first));
}
@ -609,7 +609,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle a block sequence item.
void expectBlockSequenceItem(Flag!"first" first)() @safe
{
static if(!first) if(event_.id == EventID.SequenceEnd)
static if(!first) if(event_.id == EventID.sequenceEnd)
{
indent_ = popIndent();
nextExpected(popState());
@ -634,7 +634,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Handle a key in a block mapping.
void expectBlockMappingKey(Flag!"first" first)() @safe
{
static if(!first) if(event_.id == EventID.MappingEnd)
static if(!first) if(event_.id == EventID.mappingEnd)
{
indent_ = popIndent();
nextExpected(popState());
@ -676,27 +676,27 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
///Check if an empty sequence is next.
bool checkEmptySequence() const @safe pure nothrow
{
return event_.id == EventID.SequenceStart && events_.length > 0
&& events_.peek().id == EventID.SequenceEnd;
return event_.id == EventID.sequenceStart && events_.length > 0
&& events_.peek().id == EventID.sequenceEnd;
}
///Check if an empty mapping is next.
bool checkEmptyMapping() const @safe pure nothrow
{
return event_.id == EventID.MappingStart && events_.length > 0
&& events_.peek().id == EventID.MappingEnd;
return event_.id == EventID.mappingStart && events_.length > 0
&& events_.peek().id == EventID.mappingEnd;
}
///Check if an empty document is next.
bool checkEmptyDocument() const @safe pure nothrow
{
if(event_.id != EventID.DocumentStart || events_.length == 0)
if(event_.id != EventID.documentStart || events_.length == 0)
{
return false;
}
const event = events_.peek();
const emptyScalar = event.id == EventID.Scalar && (event.anchor is null) &&
const emptyScalar = event.id == EventID.scalar && (event.anchor is null) &&
(event.tag is null) && event.implicit && event.value == "";
return emptyScalar;
}
@ -706,11 +706,11 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
{
uint length;
const id = event_.id;
const scalar = id == EventID.Scalar;
const collectionStart = id == EventID.MappingStart ||
id == EventID.SequenceStart;
const scalar = id == EventID.scalar;
const collectionStart = id == EventID.mappingStart ||
id == EventID.sequenceStart;
if((id == EventID.Alias || scalar || collectionStart)
if((id == EventID.alias_ || scalar || collectionStart)
&& (event_.anchor !is null))
{
if(preparedAnchor_ is null)
@ -734,7 +734,7 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
if(length >= 128){return false;}
return id == EventID.Alias ||
return id == EventID.alias_ ||
(scalar && !analysis_.flags.empty && !analysis_.flags.multiline) ||
checkEmptySequence() ||
checkEmptyMapping();
@ -744,30 +744,30 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
void processScalar() @safe
{
if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);}
if(style_ == ScalarStyle.Invalid)
if(style_ == ScalarStyle.invalid)
{
style_ = chooseScalarStyle();
}
//if(analysis_.flags.multiline && (context_ != Context.MappingSimpleKey) &&
// ([ScalarStyle.Invalid, ScalarStyle.Plain, ScalarStyle.SingleQuoted, ScalarStyle.DoubleQuoted)
//if(analysis_.flags.multiline && (context_ != Context.mappingSimpleKey) &&
// ([ScalarStyle.invalid, ScalarStyle.plain, ScalarStyle.singleQuoted, ScalarStyle.doubleQuoted)
// .canFind(style_))
//{
// writeIndent();
//}
auto writer = ScalarWriter!(Range, CharType)(this, analysis_.scalar,
context_ != Context.MappingSimpleKey);
context_ != Context.mappingSimpleKey);
with(writer) final switch(style_)
{
case ScalarStyle.Invalid: assert(false);
case ScalarStyle.DoubleQuoted: writeDoubleQuoted(); break;
case ScalarStyle.SingleQuoted: writeSingleQuoted(); break;
case ScalarStyle.Folded: writeFolded(); break;
case ScalarStyle.Literal: writeLiteral(); break;
case ScalarStyle.Plain: writePlain(); break;
case ScalarStyle.invalid: assert(false);
case ScalarStyle.doubleQuoted: writeDoubleQuoted(); break;
case ScalarStyle.singleQuoted: writeSingleQuoted(); break;
case ScalarStyle.folded: writeFolded(); break;
case ScalarStyle.literal: writeLiteral(); break;
case ScalarStyle.plain: writePlain(); break;
}
analysis_.flags.isNull = true;
style_ = ScalarStyle.Invalid;
style_ = ScalarStyle.invalid;
}
///Process and write an anchor/alias.
@ -795,11 +795,11 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
{
string tag = event_.tag;
if(event_.id == EventID.Scalar)
if(event_.id == EventID.scalar)
{
if(style_ == ScalarStyle.Invalid){style_ = chooseScalarStyle();}
if(style_ == ScalarStyle.invalid){style_ = chooseScalarStyle();}
if((!canonical_ || (tag is null)) &&
(style_ == ScalarStyle.Plain ? event_.implicit : !event_.implicit && (tag is null)))
(style_ == ScalarStyle.plain ? event_.implicit : !event_.implicit && (tag is null)))
{
preparedTag_ = null;
return;
@ -831,28 +831,28 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);}
const style = event_.scalarStyle;
const invalidOrPlain = style == ScalarStyle.Invalid || style == ScalarStyle.Plain;
const block = style == ScalarStyle.Literal || style == ScalarStyle.Folded;
const singleQuoted = style == ScalarStyle.SingleQuoted;
const doubleQuoted = style == ScalarStyle.DoubleQuoted;
const invalidOrPlain = style == ScalarStyle.invalid || style == ScalarStyle.plain;
const block = style == ScalarStyle.literal || style == ScalarStyle.folded;
const singleQuoted = style == ScalarStyle.singleQuoted;
const doubleQuoted = style == ScalarStyle.doubleQuoted;
const allowPlain = flowLevel_ > 0 ? analysis_.flags.allowFlowPlain
: analysis_.flags.allowBlockPlain;
//simple empty or multiline scalars can't be written in plain style
const simpleNonPlain = (context_ == Context.MappingSimpleKey) &&
const simpleNonPlain = (context_ == Context.mappingSimpleKey) &&
(analysis_.flags.empty || analysis_.flags.multiline);
if(doubleQuoted || canonical_)
{
return ScalarStyle.DoubleQuoted;
return ScalarStyle.doubleQuoted;
}
if(invalidOrPlain && event_.implicit && !simpleNonPlain && allowPlain)
{
return ScalarStyle.Plain;
return ScalarStyle.plain;
}
if(block && flowLevel_ == 0 && context_ != Context.MappingSimpleKey &&
if(block && flowLevel_ == 0 && context_ != Context.mappingSimpleKey &&
analysis_.flags.allowBlock)
{
return style;
@ -860,12 +860,12 @@ struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType))
if((invalidOrPlain || singleQuoted) &&
analysis_.flags.allowSingleQuoted &&
!(context_ == Context.MappingSimpleKey && analysis_.flags.multiline))
!(context_ == Context.mappingSimpleKey && analysis_.flags.multiline))
{
return ScalarStyle.SingleQuoted;
return ScalarStyle.singleQuoted;
}
return ScalarStyle.DoubleQuoted;
return ScalarStyle.doubleQuoted;
}
///Prepare YAML version string for output.
@ -1548,7 +1548,7 @@ struct ScalarWriter(Range, CharType)
///Write text as plain scalar.
void writePlain() @safe
{
if(emitter_.context_ == Emitter!(Range, CharType).Context.Root){emitter_.openEnded_ = true;}
if(emitter_.context_ == Emitter!(Range, CharType).Context.root){emitter_.openEnded_ = true;}
if(text_ == ""){return;}
if(!emitter_.whitespace_)
{

View file

@ -23,17 +23,17 @@ package:
///Event types.
enum EventID : ubyte
{
Invalid = 0, /// Invalid (uninitialized) event.
StreamStart, /// Stream start
StreamEnd, /// Stream end
DocumentStart, /// Document start
DocumentEnd, /// Document end
Alias, /// Alias
Scalar, /// Scalar
SequenceStart, /// Sequence start
SequenceEnd, /// Sequence end
MappingStart, /// Mapping start
MappingEnd /// Mapping end
invalid = 0, /// Invalid (uninitialized) event.
streamStart, /// Stream start
streamEnd, /// Stream end
documentStart, /// Document start
documentEnd, /// Document end
alias_, /// Alias
scalar, /// Scalar
sequenceStart, /// Sequence start
sequenceEnd, /// Sequence end
mappingStart, /// Mapping start
mappingEnd /// Mapping end
}
/**
@ -65,9 +65,9 @@ struct Event
TagDirective[] _tagDirectives;
}
///Event type.
EventID id = EventID.Invalid;
EventID id = EventID.invalid;
///Style of scalar event, if this is a scalar event.
ScalarStyle scalarStyle = ScalarStyle.Invalid;
ScalarStyle scalarStyle = ScalarStyle.invalid;
union
{
///Should the tag be implicitly resolved?
@ -80,26 +80,26 @@ struct Event
bool explicitDocument;
}
///Collection style, if this is a SequenceStart or MappingStart.
CollectionStyle collectionStyle = CollectionStyle.Invalid;
CollectionStyle collectionStyle = CollectionStyle.invalid;
///Is this a null (uninitialized) event?
@property bool isNull() const pure @safe nothrow {return id == EventID.Invalid;}
@property bool isNull() const pure @safe nothrow {return id == EventID.invalid;}
///Get string representation of the token ID.
@property string idString() const @safe {return to!string(id);}
auto ref anchor() inout @trusted pure {
assert(id != EventID.DocumentStart, "DocumentStart events cannot have anchors.");
assert(id != EventID.documentStart, "DocumentStart events cannot have anchors.");
return _anchor;
}
auto ref tag() inout @trusted pure {
assert(id != EventID.DocumentStart, "DocumentStart events cannot have tags.");
assert(id != EventID.documentStart, "DocumentStart events cannot have tags.");
return _tag;
}
auto ref tagDirectives() inout @trusted pure {
assert(id == EventID.DocumentStart, "Only DocumentStart events have tag directives.");
assert(id == EventID.documentStart, "Only DocumentStart events have tag directives.");
return _tagDirectives;
}
@ -138,8 +138,8 @@ Event collectionStartEvent(EventID id)
(const Mark start, const Mark end, const string anchor, const string tag,
const bool implicit, const CollectionStyle style) pure @safe nothrow
{
static assert(id == EventID.SequenceStart || id == EventID.SequenceEnd ||
id == EventID.MappingStart || id == EventID.MappingEnd);
static assert(id == EventID.sequenceStart || id == EventID.sequenceEnd ||
id == EventID.mappingStart || id == EventID.mappingEnd);
Event result;
result.startMark = start;
result.endMark = end;
@ -163,19 +163,19 @@ Event streamStartEvent(const Mark start, const Mark end)
Event result;
result.startMark = start;
result.endMark = end;
result.id = EventID.StreamStart;
result.id = EventID.streamStart;
return result;
}
///Aliases for simple events.
alias streamEndEvent = event!(EventID.StreamEnd);
alias aliasEvent = event!(EventID.Alias);
alias sequenceEndEvent = event!(EventID.SequenceEnd);
alias mappingEndEvent = event!(EventID.MappingEnd);
alias streamEndEvent = event!(EventID.streamEnd);
alias aliasEvent = event!(EventID.alias_);
alias sequenceEndEvent = event!(EventID.sequenceEnd);
alias mappingEndEvent = event!(EventID.mappingEnd);
///Aliases for collection start events.
alias sequenceStartEvent = collectionStartEvent!(EventID.SequenceStart);
alias mappingStartEvent = collectionStartEvent!(EventID.MappingStart);
alias sequenceStartEvent = collectionStartEvent!(EventID.sequenceStart);
alias mappingStartEvent = collectionStartEvent!(EventID.mappingStart);
/**
* Construct a document start event.
@ -193,7 +193,7 @@ Event documentStartEvent(const Mark start, const Mark end, const bool explicit,
result.value = YAMLVersion;
result.startMark = start;
result.endMark = end;
result.id = EventID.DocumentStart;
result.id = EventID.documentStart;
result.explicitDocument = explicit;
result.tagDirectives = tagDirectives;
return result;
@ -211,7 +211,7 @@ Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pu
Event result;
result.startMark = start;
result.endMark = end;
result.id = EventID.DocumentEnd;
result.id = EventID.documentEnd;
result.explicitDocument = explicit;
return result;
}
@ -227,7 +227,7 @@ Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pu
/// style = Scalar style.
Event scalarEvent(const Mark start, const Mark end, const string anchor, const string tag,
const bool implicit, const string value,
const ScalarStyle style = ScalarStyle.Invalid) @safe pure nothrow @nogc
const ScalarStyle style = ScalarStyle.invalid) @safe pure nothrow @nogc
{
Event result;
result.value = value;
@ -237,7 +237,7 @@ Event scalarEvent(const Mark start, const Mark end, const string anchor, const s
result.anchor = anchor;
result.tag = tag;
result.id = EventID.Scalar;
result.id = EventID.scalar;
result.scalarStyle = style;
result.implicit = implicit;
return result;

View file

@ -33,13 +33,13 @@ ScalarStyle scalarStyleHack(ref const(Node) node) @safe nothrow
Node node = Loader.fromString(`"42"`).load(); // loaded from a file
if(node.isScalar)
{
assert(node.scalarStyleHack() == ScalarStyle.DoubleQuoted);
assert(node.scalarStyleHack() == ScalarStyle.doubleQuoted);
}
}
@safe unittest
{
auto node = Node(5);
assert(node.scalarStyleHack() == ScalarStyle.Invalid);
assert(node.scalarStyleHack() == ScalarStyle.invalid);
}
/** Get the collection style a YAML node had in the file it was loaded from.
@ -56,7 +56,7 @@ CollectionStyle collectionStyleHack(ref const(Node) node) @safe nothrow
@safe unittest
{
auto node = Node([1, 2, 3, 4, 5]);
assert(node.collectionStyleHack() == CollectionStyle.Invalid);
assert(node.collectionStyleHack() == CollectionStyle.invalid);
}
@ -75,8 +75,8 @@ void scalarStyleHack(ref Node node, const ScalarStyle rhs) @safe nothrow
@safe unittest
{
auto node = Node(5);
node.scalarStyleHack = ScalarStyle.DoubleQuoted;
assert(node.scalarStyleHack() == ScalarStyle.DoubleQuoted);
node.scalarStyleHack = ScalarStyle.doubleQuoted;
assert(node.scalarStyleHack() == ScalarStyle.doubleQuoted);
}
/** Set the collection style node should have when written to a file.
@ -94,6 +94,6 @@ void collectionStyleHack(ref Node node, const CollectionStyle rhs) @safe nothrow
@safe unittest
{
auto node = Node([1, 2, 3, 4, 5]);
node.collectionStyleHack = CollectionStyle.Block;
assert(node.collectionStyleHack() == CollectionStyle.Block);
node.collectionStyleHack = CollectionStyle.block;
assert(node.collectionStyleHack() == CollectionStyle.block);
}

View file

@ -11,11 +11,11 @@ module dyaml.linebreak;
enum LineBreak
{
///Unix line break ("\n").
Unix,
unix,
///Windows line break ("\r\n").
Windows,
windows,
///Macintosh line break ("\r").
Macintosh
macintosh
}
package:
@ -25,8 +25,8 @@ string lineBreak(in LineBreak b) pure @safe nothrow
{
final switch(b)
{
case LineBreak.Unix: return "\n";
case LineBreak.Windows: return "\r\n";
case LineBreak.Macintosh: return "\r";
case LineBreak.unix: return "\n";
case LineBreak.windows: return "\r\n";
case LineBreak.macintosh: return "\r";
}
}

View file

@ -43,9 +43,9 @@ class NodeException : YAMLException
// Node kinds.
package enum NodeID : ubyte
{
Scalar,
Sequence,
Mapping
scalar,
sequence,
mapping
}
/// Null YAML type. Used in nodes with _null values.
@ -181,9 +181,9 @@ struct Node
// Tag of the node.
string tag_;
// Node scalar style. Used to remember style this node was loaded with.
ScalarStyle scalarStyle = ScalarStyle.Invalid;
ScalarStyle scalarStyle = ScalarStyle.invalid;
// Node collection style. Used to remember style this node was loaded with.
CollectionStyle collectionStyle = CollectionStyle.Invalid;
CollectionStyle collectionStyle = CollectionStyle.invalid;
static assert(Value.sizeof <= 24, "Unexpected YAML value size");
static assert(Node.sizeof <= 56, "Unexpected YAML node size");
@ -2010,7 +2010,7 @@ struct Node
import dyaml.dumper;
auto stream = new Appender!string();
auto node = Node([1, 2, 3, 4, 5]);
node.setStyle(CollectionStyle.Block);
node.setStyle(CollectionStyle.block);
auto dumper = dumper(stream);
dumper.dump(node);
@ -2021,15 +2021,15 @@ struct Node
import dyaml.dumper;
auto stream = new Appender!string();
auto node = Node(4);
node.setStyle(ScalarStyle.Literal);
node.setStyle(ScalarStyle.literal);
auto dumper = dumper(stream);
dumper.dump(node);
}
@safe unittest
{
assertThrown!NodeException(Node(4).setStyle(CollectionStyle.Block));
assertThrown!NodeException(Node([4]).setStyle(ScalarStyle.Literal));
assertThrown!NodeException(Node(4).setStyle(CollectionStyle.block));
assertThrown!NodeException(Node([4]).setStyle(ScalarStyle.literal));
}
@safe unittest
{
@ -2037,7 +2037,7 @@ struct Node
{
auto stream = new Appender!string();
auto node = Node([1, 2, 3, 4, 5]);
node.setStyle(CollectionStyle.Block);
node.setStyle(CollectionStyle.block);
auto dumper = dumper(stream);
dumper.explicitEnd = false;
dumper.explicitStart = false;
@ -2050,7 +2050,7 @@ struct Node
{
auto stream = new Appender!string();
auto node = Node([1, 2, 3, 4, 5]);
node.setStyle(CollectionStyle.Flow);
node.setStyle(CollectionStyle.flow);
auto dumper = dumper(stream);
dumper.explicitEnd = false;
dumper.explicitStart = false;
@ -2063,7 +2063,7 @@ struct Node
{
auto stream = new Appender!string();
auto node = Node(1);
node.setStyle(ScalarStyle.SingleQuoted);
node.setStyle(ScalarStyle.singleQuoted);
auto dumper = dumper(stream);
dumper.explicitEnd = false;
dumper.explicitStart = false;
@ -2075,7 +2075,7 @@ struct Node
{
auto stream = new Appender!string();
auto node = Node(1);
node.setStyle(ScalarStyle.DoubleQuoted);
node.setStyle(ScalarStyle.doubleQuoted);
auto dumper = dumper(stream);
dumper.explicitEnd = false;
dumper.explicitStart = false;

View file

@ -169,7 +169,7 @@ final class Parser
*/
void popFront() @safe
{
currentEvent_.id = EventID.Invalid;
currentEvent_.id = EventID.invalid;
ensureState();
}
@ -231,8 +231,8 @@ final class Parser
Event parseImplicitDocumentStart() @safe
{
// Parse an implicit document.
if(!scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
TokenID.StreamEnd))
if(!scanner_.checkToken(TokenID.directive, TokenID.documentStart,
TokenID.streamEnd))
{
tagDirectives_ = defaultTagDirectives_;
const token = scanner_.peekToken();
@ -249,15 +249,15 @@ final class Parser
Event parseDocumentStart() @trusted
{
//Parse any extra document end indicators.
while(scanner_.checkToken(TokenID.DocumentEnd)){scanner_.getToken();}
while(scanner_.checkToken(TokenID.documentEnd)){scanner_.getToken();}
//Parse an explicit document.
if(!scanner_.checkToken(TokenID.StreamEnd))
if(!scanner_.checkToken(TokenID.streamEnd))
{
const startMark = scanner_.peekToken().startMark;
auto tagDirectives = processDirectives();
enforce(scanner_.checkToken(TokenID.DocumentStart),
enforce(scanner_.checkToken(TokenID.documentStart),
new ParserException("Expected document start but found " ~
scanner_.peekToken().idString,
scanner_.peekToken().startMark));
@ -282,7 +282,7 @@ final class Parser
Event parseDocumentEnd() @safe
{
Mark startMark = scanner_.peekToken().startMark;
const bool explicit = scanner_.checkToken(TokenID.DocumentEnd);
const bool explicit = scanner_.checkToken(TokenID.documentEnd);
Mark endMark = explicit ? scanner_.getToken().endMark : startMark;
state_ = &parseDocumentStart;
@ -293,8 +293,8 @@ final class Parser
///Parse document content.
Event parseDocumentContent() @safe
{
if(scanner_.checkToken(TokenID.Directive, TokenID.DocumentStart,
TokenID.DocumentEnd, TokenID.StreamEnd))
if(scanner_.checkToken(TokenID.directive, TokenID.documentStart,
TokenID.documentEnd, TokenID.streamEnd))
{
state_ = popState();
return processEmptyScalar(scanner_.peekToken().startMark);
@ -310,11 +310,11 @@ final class Parser
tagDirectives_.length = 0;
// Process directives.
while(scanner_.checkToken(TokenID.Directive))
while(scanner_.checkToken(TokenID.directive))
{
const token = scanner_.getToken();
string value = token.value.idup;
if(token.directive == DirectiveType.YAML)
if(token.directive == DirectiveType.yaml)
{
enforce(YAMLVersion_ is null,
new ParserException("Duplicate YAML directive", token.startMark));
@ -324,7 +324,7 @@ final class Parser
token.startMark));
YAMLVersion_ = value;
}
else if(token.directive == DirectiveType.TAG)
else if(token.directive == DirectiveType.tag)
{
auto handle = value[0 .. token.valueDivider];
@ -382,7 +382,7 @@ final class Parser
const Flag!"indentlessSequence" indentlessSequence = No.indentlessSequence)
@trusted
{
if(scanner_.checkToken(TokenID.Alias))
if(scanner_.checkToken(TokenID.alias_))
{
const token = scanner_.getToken();
state_ = popState();
@ -404,7 +404,7 @@ final class Parser
invalidMarks = false;
const token = scanner_.getToken();
if(first){startMark = token.startMark;}
if(id == TokenID.Tag)
if(id == TokenID.tag)
{
tagMark = token.startMark;
tagHandleEnd = token.valueDivider;
@ -415,8 +415,8 @@ final class Parser
}
//Anchor and/or tag can be in any order.
if(get(TokenID.Anchor, Yes.first, anchor)){get(TokenID.Tag, No.first, tag);}
else if(get(TokenID.Tag, Yes.first, tag)) {get(TokenID.Anchor, No.first, anchor);}
if(get(TokenID.anchor, Yes.first, anchor)){get(TokenID.tag, No.first, tag);}
else if(get(TokenID.tag, Yes.first, tag)) {get(TokenID.anchor, No.first, anchor);}
if(tag !is null){tag = processTag(tag, tagHandleEnd, startMark, tagMark);}
@ -427,57 +427,57 @@ final class Parser
bool implicit = (tag is null || tag == "!");
if(indentlessSequence && scanner_.checkToken(TokenID.BlockEntry))
if(indentlessSequence && scanner_.checkToken(TokenID.blockEntry))
{
state_ = &parseIndentlessSequenceEntry;
return sequenceStartEvent
(startMark, scanner_.peekToken().endMark, anchor,
tag, implicit, CollectionStyle.Block);
tag, implicit, CollectionStyle.block);
}
if(scanner_.checkToken(TokenID.Scalar))
if(scanner_.checkToken(TokenID.scalar))
{
auto token = scanner_.getToken();
auto value = token.style == ScalarStyle.DoubleQuoted
auto value = token.style == ScalarStyle.doubleQuoted
? handleDoubleQuotedScalarEscapes(token.value)
: cast(string)token.value;
implicit = (token.style == ScalarStyle.Plain && tag is null) || tag == "!";
implicit = (token.style == ScalarStyle.plain && tag is null) || tag == "!";
state_ = popState();
return scalarEvent(startMark, token.endMark, anchor, tag,
implicit, value, token.style);
}
if(scanner_.checkToken(TokenID.FlowSequenceStart))
if(scanner_.checkToken(TokenID.flowSequenceStart))
{
endMark = scanner_.peekToken().endMark;
state_ = &parseFlowSequenceEntry!(Yes.first);
return sequenceStartEvent(startMark, endMark, anchor, tag,
implicit, CollectionStyle.Flow);
implicit, CollectionStyle.flow);
}
if(scanner_.checkToken(TokenID.FlowMappingStart))
if(scanner_.checkToken(TokenID.flowMappingStart))
{
endMark = scanner_.peekToken().endMark;
state_ = &parseFlowMappingKey!(Yes.first);
return mappingStartEvent(startMark, endMark, anchor, tag,
implicit, CollectionStyle.Flow);
implicit, CollectionStyle.flow);
}
if(block && scanner_.checkToken(TokenID.BlockSequenceStart))
if(block && scanner_.checkToken(TokenID.blockSequenceStart))
{
endMark = scanner_.peekToken().endMark;
state_ = &parseBlockSequenceEntry!(Yes.first);
return sequenceStartEvent(startMark, endMark, anchor, tag,
implicit, CollectionStyle.Block);
implicit, CollectionStyle.block);
}
if(block && scanner_.checkToken(TokenID.BlockMappingStart))
if(block && scanner_.checkToken(TokenID.blockMappingStart))
{
endMark = scanner_.peekToken().endMark;
state_ = &parseBlockMappingKey!(Yes.first);
return mappingStartEvent(startMark, endMark, anchor, tag,
implicit, CollectionStyle.Block);
implicit, CollectionStyle.block);
}
if(anchor !is null || tag !is null)
@ -622,10 +622,10 @@ final class Parser
{
static if(first){pushMark(scanner_.getToken().startMark);}
if(scanner_.checkToken(TokenID.BlockEntry))
if(scanner_.checkToken(TokenID.blockEntry))
{
const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.BlockEnd))
if(!scanner_.checkToken(TokenID.blockEntry, TokenID.blockEnd))
{
pushState(&parseBlockSequenceEntry!(No.first));
return parseBlockNode();
@ -635,7 +635,7 @@ final class Parser
return processEmptyScalar(token.endMark);
}
if(!scanner_.checkToken(TokenID.BlockEnd))
if(!scanner_.checkToken(TokenID.blockEnd))
{
const token = scanner_.peekToken();
throw new ParserException("While parsing a block collection", marks_.data.back,
@ -654,12 +654,12 @@ final class Parser
///Parse an entry of an indentless sequence.
Event parseIndentlessSequenceEntry() @safe
{
if(scanner_.checkToken(TokenID.BlockEntry))
if(scanner_.checkToken(TokenID.blockEntry))
{
const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.Key,
TokenID.Value, TokenID.BlockEnd))
if(!scanner_.checkToken(TokenID.blockEntry, TokenID.key,
TokenID.value, TokenID.blockEnd))
{
pushState(&parseIndentlessSequenceEntry);
return parseBlockNode();
@ -686,11 +686,11 @@ final class Parser
{
static if(first){pushMark(scanner_.getToken().startMark);}
if(scanner_.checkToken(TokenID.Key))
if(scanner_.checkToken(TokenID.key))
{
const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
if(!scanner_.checkToken(TokenID.key, TokenID.value, TokenID.blockEnd))
{
pushState(&parseBlockMappingValue);
return parseBlockNodeOrIndentlessSequence();
@ -700,7 +700,7 @@ final class Parser
return processEmptyScalar(token.endMark);
}
if(!scanner_.checkToken(TokenID.BlockEnd))
if(!scanner_.checkToken(TokenID.blockEnd))
{
const token = scanner_.peekToken();
throw new ParserException("While parsing a block mapping", marks_.data.back,
@ -717,11 +717,11 @@ final class Parser
///Parse a value in a block mapping.
Event parseBlockMappingValue() @safe
{
if(scanner_.checkToken(TokenID.Value))
if(scanner_.checkToken(TokenID.value))
{
const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
if(!scanner_.checkToken(TokenID.key, TokenID.value, TokenID.blockEnd))
{
pushState(&parseBlockMappingKey!(No.first));
return parseBlockNodeOrIndentlessSequence();
@ -753,11 +753,11 @@ final class Parser
{
static if(first){pushMark(scanner_.getToken().startMark);}
if(!scanner_.checkToken(TokenID.FlowSequenceEnd))
if(!scanner_.checkToken(TokenID.flowSequenceEnd))
{
static if(!first)
{
if(scanner_.checkToken(TokenID.FlowEntry))
if(scanner_.checkToken(TokenID.flowEntry))
{
scanner_.getToken();
}
@ -770,14 +770,14 @@ final class Parser
}
}
if(scanner_.checkToken(TokenID.Key))
if(scanner_.checkToken(TokenID.key))
{
const token = scanner_.peekToken();
state_ = &parseFlowSequenceEntryMappingKey;
return mappingStartEvent(token.startMark, token.endMark,
null, null, true, CollectionStyle.Flow);
null, null, true, CollectionStyle.flow);
}
else if(!scanner_.checkToken(TokenID.FlowSequenceEnd))
else if(!scanner_.checkToken(TokenID.flowSequenceEnd))
{
pushState(&parseFlowSequenceEntry!(No.first));
return parseFlowNode();
@ -795,8 +795,8 @@ final class Parser
{
const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.Value, TokenID.FlowEntry,
TokenID.FlowSequenceEnd))
if(!scanner_.checkToken(TokenID.value, TokenID.flowEntry,
TokenID.flowSequenceEnd))
{
pushState(nextState);
return parseFlowNode();
@ -816,10 +816,10 @@ final class Parser
Event parseFlowValue(TokenID checkId, in Event delegate() @safe nextState)
@safe
{
if(scanner_.checkToken(TokenID.Value))
if(scanner_.checkToken(TokenID.value))
{
const token = scanner_.getToken();
if(!scanner_.checkToken(TokenID.FlowEntry, checkId))
if(!scanner_.checkToken(TokenID.flowEntry, checkId))
{
pushState(nextState);
return parseFlowNode();
@ -836,7 +836,7 @@ final class Parser
///Parse a mapping value in an entry in a flow sequence.
Event parseFlowSequenceEntryMappingValue() @safe
{
return parseFlowValue(TokenID.FlowSequenceEnd,
return parseFlowValue(TokenID.flowSequenceEnd,
&parseFlowSequenceEntryMappingEnd);
}
@ -861,11 +861,11 @@ final class Parser
{
static if(first){pushMark(scanner_.getToken().startMark);}
if(!scanner_.checkToken(TokenID.FlowMappingEnd))
if(!scanner_.checkToken(TokenID.flowMappingEnd))
{
static if(!first)
{
if(scanner_.checkToken(TokenID.FlowEntry))
if(scanner_.checkToken(TokenID.flowEntry))
{
scanner_.getToken();
}
@ -878,12 +878,12 @@ final class Parser
}
}
if(scanner_.checkToken(TokenID.Key))
if(scanner_.checkToken(TokenID.key))
{
return parseFlowKey(&parseFlowMappingValue);
}
if(!scanner_.checkToken(TokenID.FlowMappingEnd))
if(!scanner_.checkToken(TokenID.flowMappingEnd))
{
pushState(&parseFlowMappingEmptyValue);
return parseFlowNode();
@ -899,7 +899,7 @@ final class Parser
///Parse a value in a flow mapping.
Event parseFlowMappingValue() @safe
{
return parseFlowValue(TokenID.FlowMappingEnd, &parseFlowMappingKey!(No.first));
return parseFlowValue(TokenID.flowMappingEnd, &parseFlowMappingKey!(No.first));
}
///Parse an empty value in a flow mapping.

View file

@ -50,9 +50,9 @@ final class Representer
// Representer functions indexed by types.
Node function(ref Node, Representer) @safe[TypeInfo] representers_;
// Default style for scalar nodes.
ScalarStyle defaultScalarStyle_ = ScalarStyle.Invalid;
ScalarStyle defaultScalarStyle_ = ScalarStyle.invalid;
// Default style for collection nodes.
CollectionStyle defaultCollectionStyle_ = CollectionStyle.Invalid;
CollectionStyle defaultCollectionStyle_ = CollectionStyle.invalid;
public:
@disable bool opEquals(ref Representer);
@ -81,13 +81,13 @@ final class Representer
addRepresenter!SysTime(&representSysTime);
}
///Set default _style for scalars. If style is $(D ScalarStyle.Invalid), the _style is chosen automatically.
///Set default _style for scalars. If style is $(D ScalarStyle.invalid), the _style is chosen automatically.
@property void defaultScalarStyle(ScalarStyle style) pure @safe nothrow
{
defaultScalarStyle_ = style;
}
///Set default _style for collections. If style is $(D CollectionStyle.Invalid), the _style is chosen automatically.
///Set default _style for collections. If style is $(D CollectionStyle.invalid), the _style is chosen automatically.
@property void defaultCollectionStyle(CollectionStyle style) pure @safe nothrow
{
defaultCollectionStyle_ = style;
@ -231,9 +231,9 @@ final class Representer
* Returns: The represented node.
*/
Node representScalar(string tag, string scalar,
ScalarStyle style = ScalarStyle.Invalid) @safe
ScalarStyle style = ScalarStyle.invalid) @safe
{
if(style == ScalarStyle.Invalid){style = defaultScalarStyle_;}
if(style == ScalarStyle.invalid){style = defaultScalarStyle_;}
auto newNode = Node(scalar, tag);
newNode.scalarStyle = style;
return newNode;
@ -285,26 +285,26 @@ final class Representer
* Throws: $(D RepresenterException) if a child could not be represented.
*/
Node representSequence(string tag, Node[] sequence,
CollectionStyle style = CollectionStyle.Invalid) @safe
CollectionStyle style = CollectionStyle.invalid) @safe
{
Node[] value;
value.length = sequence.length;
auto bestStyle = CollectionStyle.Flow;
auto bestStyle = CollectionStyle.flow;
foreach(idx, ref item; sequence)
{
value[idx] = representData(item);
const isScalar = value[idx].isScalar;
const s = value[idx].scalarStyle;
if(!isScalar || (s != ScalarStyle.Invalid && s != ScalarStyle.Plain))
if(!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain))
{
bestStyle = CollectionStyle.Block;
bestStyle = CollectionStyle.block;
}
}
if(style == CollectionStyle.Invalid)
if(style == CollectionStyle.invalid)
{
style = defaultCollectionStyle_ != CollectionStyle.Invalid
style = defaultCollectionStyle_ != CollectionStyle.invalid
? defaultCollectionStyle_
: bestStyle;
}
@ -336,7 +336,7 @@ final class Representer
auto nodes = [Node(value.x), Node(value.y), Node(value.z)];
//use flow style
return representer.representSequence("!mystruct.tag", nodes,
CollectionStyle.Flow);
CollectionStyle.flow);
}
auto dumper = dumper(new Appender!string);
@ -360,12 +360,12 @@ final class Representer
* Throws: $(D RepresenterException) if a child could not be represented.
*/
Node representMapping(string tag, Node.Pair[] pairs,
CollectionStyle style = CollectionStyle.Invalid) @safe
CollectionStyle style = CollectionStyle.invalid) @safe
{
Node.Pair[] value;
value.length = pairs.length;
auto bestStyle = CollectionStyle.Flow;
auto bestStyle = CollectionStyle.flow;
foreach(idx, ref pair; pairs)
{
value[idx] = Node.Pair(representData(pair.key), representData(pair.value));
@ -374,20 +374,20 @@ final class Representer
const keyStyle = value[idx].key.scalarStyle;
const valStyle = value[idx].value.scalarStyle;
if(!keyScalar ||
(keyStyle != ScalarStyle.Invalid && keyStyle != ScalarStyle.Plain))
(keyStyle != ScalarStyle.invalid && keyStyle != ScalarStyle.plain))
{
bestStyle = CollectionStyle.Block;
bestStyle = CollectionStyle.block;
}
if(!valScalar ||
(valStyle != ScalarStyle.Invalid && valStyle != ScalarStyle.Plain))
(valStyle != ScalarStyle.invalid && valStyle != ScalarStyle.plain))
{
bestStyle = CollectionStyle.Block;
bestStyle = CollectionStyle.block;
}
}
if(style == CollectionStyle.Invalid)
if(style == CollectionStyle.invalid)
{
style = defaultCollectionStyle_ != CollectionStyle.Invalid
style = defaultCollectionStyle_ != CollectionStyle.invalid
? defaultCollectionStyle_
: bestStyle;
}
@ -445,11 +445,11 @@ final class Representer
if(data.tag_ !is null){result.tag_ = data.tag_;}
//Remember style if this was loaded before.
if(data.scalarStyle != ScalarStyle.Invalid)
if(data.scalarStyle != ScalarStyle.invalid)
{
result.scalarStyle = data.scalarStyle;
}
if(data.collectionStyle != CollectionStyle.Invalid)
if(data.collectionStyle != CollectionStyle.invalid)
{
result.collectionStyle = data.collectionStyle;
}
@ -487,7 +487,7 @@ Node representBytes(ref Node node, Representer representer) @safe
if(value is null){return representNull(node, representer);}
return representer.representScalar("tag:yaml.org,2002:binary",
Base64.encode(value).idup,
ScalarStyle.Literal);
ScalarStyle.literal);
}
///Represent a bool _node as a bool scalar.

View file

@ -138,7 +138,7 @@ final class Resolver
{
if((tag !is null) && tag != "!"){return tag;}
if(kind == NodeID.Scalar)
if(kind == NodeID.scalar)
{
if(!implicit){return defaultScalarTag_;}
@ -156,8 +156,8 @@ final class Resolver
}
return defaultScalarTag_;
}
else if(kind == NodeID.Sequence){return defaultSequenceTag_;}
else if(kind == NodeID.Mapping) {return defaultMappingTag_;}
else if(kind == NodeID.sequence){return defaultSequenceTag_;}
else if(kind == NodeID.mapping) {return defaultMappingTag_;}
assert(false, "This line of code should never be reached");
}
@safe unittest
@ -169,7 +169,7 @@ final class Resolver
const string expected = tag;
foreach(value; values)
{
const string resolved = resolver.resolve(NodeID.Scalar, null, value, true);
const string resolved = resolver.resolve(NodeID.scalar, null, value, true);
if(expected != resolved)
{
return false;

View file

@ -113,11 +113,11 @@ final class Scanner
enum Chomping
{
/// Strip all trailing line breaks. '-' indicator.
Strip,
strip,
/// Line break of the last line is preserved, others discarded. Default.
Clip,
clip,
/// All trailing line breaks are preserved. '+' indicator.
Keep
keep
}
/// Reader used to read from a file/stream.
@ -495,7 +495,7 @@ final class Scanner
/// Add DOCUMENT-START or DOCUMENT-END token.
void fetchDocumentIndicator(TokenID id)()
if(id == TokenID.DocumentStart || id == TokenID.DocumentEnd)
if(id == TokenID.documentStart || id == TokenID.documentEnd)
{
// Set indentation to -1 .
unwindIndent(-1);
@ -509,8 +509,8 @@ final class Scanner
}
/// Aliases to add DOCUMENT-START or DOCUMENT-END token.
alias fetchDocumentStart = fetchDocumentIndicator!(TokenID.DocumentStart);
alias fetchDocumentEnd = fetchDocumentIndicator!(TokenID.DocumentEnd);
alias fetchDocumentStart = fetchDocumentIndicator!(TokenID.documentStart);
alias fetchDocumentEnd = fetchDocumentIndicator!(TokenID.documentEnd);
/// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
void fetchFlowCollectionStart(TokenID id)() @safe
@ -527,8 +527,8 @@ final class Scanner
}
/// Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
alias fetchFlowSequenceStart = fetchFlowCollectionStart!(TokenID.FlowSequenceStart);
alias fetchFlowMappingStart = fetchFlowCollectionStart!(TokenID.FlowMappingStart);
alias fetchFlowSequenceStart = fetchFlowCollectionStart!(TokenID.flowSequenceStart);
alias fetchFlowMappingStart = fetchFlowCollectionStart!(TokenID.flowMappingStart);
/// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
void fetchFlowCollectionEnd(TokenID id)()
@ -545,8 +545,8 @@ final class Scanner
}
/// Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token/
alias fetchFlowSequenceEnd = fetchFlowCollectionEnd!(TokenID.FlowSequenceEnd);
alias fetchFlowMappingEnd = fetchFlowCollectionEnd!(TokenID.FlowMappingEnd);
alias fetchFlowSequenceEnd = fetchFlowCollectionEnd!(TokenID.flowSequenceEnd);
alias fetchFlowMappingEnd = fetchFlowCollectionEnd!(TokenID.flowMappingEnd);
/// Add FLOW-ENTRY token;
void fetchFlowEntry() @safe
@ -580,7 +580,7 @@ final class Scanner
/// Add BLOCK-ENTRY token. Might add BLOCK-SEQUENCE-START in the process.
void fetchBlockEntry() @safe
{
if(flowLevel_ == 0) { blockChecks!("Sequence", TokenID.BlockSequenceStart)(); }
if(flowLevel_ == 0) { blockChecks!("Sequence", TokenID.blockSequenceStart)(); }
// It's an error for the block entry to occur in the flow context,
// but we let the parser detect this.
@ -598,7 +598,7 @@ final class Scanner
/// Add KEY token. Might add BLOCK-MAPPING-START in the process.
void fetchKey() @safe
{
if(flowLevel_ == 0) { blockChecks!("Mapping", TokenID.BlockMappingStart)(); }
if(flowLevel_ == 0) { blockChecks!("Mapping", TokenID.blockMappingStart)(); }
// Reset possible simple key on the current level.
removePossibleSimpleKey();
@ -665,7 +665,7 @@ final class Scanner
/// Add ALIAS or ANCHOR token.
void fetchAnchor_(TokenID id)() @safe
if(id == TokenID.Alias || id == TokenID.Anchor)
if(id == TokenID.alias_ || id == TokenID.anchor)
{
// ALIAS/ANCHOR could be a simple key.
savePossibleSimpleKey();
@ -678,8 +678,8 @@ final class Scanner
}
/// Aliases to add ALIAS or ANCHOR token.
alias fetchAlias = fetchAnchor_!(TokenID.Alias);
alias fetchAnchor = fetchAnchor_!(TokenID.Anchor);
alias fetchAlias = fetchAnchor_!(TokenID.alias_);
alias fetchAnchor = fetchAnchor_!(TokenID.anchor);
/// Add TAG token.
void fetchTag() @safe
@ -695,7 +695,7 @@ final class Scanner
/// Add block SCALAR token.
void fetchBlockScalar(ScalarStyle style)() @safe
if(style == ScalarStyle.Literal || style == ScalarStyle.Folded)
if(style == ScalarStyle.literal || style == ScalarStyle.folded)
{
// Reset possible simple key on the current level.
removePossibleSimpleKey();
@ -708,8 +708,8 @@ final class Scanner
}
/// Aliases to add literal or folded block scalar.
alias fetchLiteral = fetchBlockScalar!(ScalarStyle.Literal);
alias fetchFolded = fetchBlockScalar!(ScalarStyle.Folded);
alias fetchLiteral = fetchBlockScalar!(ScalarStyle.literal);
alias fetchFolded = fetchBlockScalar!(ScalarStyle.folded);
/// Add quoted flow SCALAR token.
void fetchFlowScalar(ScalarStyle quotes)()
@ -726,8 +726,8 @@ final class Scanner
}
/// Aliases to add single or double quoted block scalar.
alias fetchSingle = fetchFlowScalar!(ScalarStyle.SingleQuoted);
alias fetchDouble = fetchFlowScalar!(ScalarStyle.DoubleQuoted);
alias fetchSingle = fetchFlowScalar!(ScalarStyle.singleQuoted);
alias fetchDouble = fetchFlowScalar!(ScalarStyle.doubleQuoted);
/// Add plain SCALAR token.
void fetchPlain() @safe
@ -932,11 +932,11 @@ final class Scanner
Mark endMark = reader_.mark;
DirectiveType directive;
if(name == "YAML") { directive = DirectiveType.YAML; }
else if(name == "TAG") { directive = DirectiveType.TAG; }
if(name == "YAML") { directive = DirectiveType.yaml; }
else if(name == "TAG") { directive = DirectiveType.tag; }
else
{
directive = DirectiveType.Reserved;
directive = DirectiveType.reserved;
scanToNextBreak();
}
@ -1119,11 +1119,11 @@ final class Scanner
return Token.init;
}
if(id == TokenID.Alias)
if(id == TokenID.alias_)
{
return aliasToken(startMark, reader_.mark, value);
}
if(id == TokenID.Anchor)
if(id == TokenID.anchor)
{
return anchorToken(startMark, reader_.mark, value);
}
@ -1279,7 +1279,7 @@ final class Scanner
// Unfortunately, folding rules are ambiguous.
// This is the folding according to the specification:
if(style == ScalarStyle.Folded && lineBreak == '\n' &&
if(style == ScalarStyle.folded && lineBreak == '\n' &&
leadingNonSpace && !" \t"d.canFind(reader_.peekByte()))
{
// No breaks were scanned; no need to insert the space in the
@ -1299,7 +1299,7 @@ final class Scanner
////this is Clark Evans's interpretation (also in the spec
////examples):
//
//if(style == ScalarStyle.Folded && lineBreak == '\n')
//if(style == ScalarStyle.folded && lineBreak == '\n')
//{
// if(startLen == endLen)
// {
@ -1327,14 +1327,14 @@ final class Scanner
// If chompint is Keep, we keep (commit) the last scanned line breaks
// (which are at the end of the scalar). Otherwise re remove them (end the
// transaction).
if(chomping == Chomping.Keep) { breaksTransaction.commit(); }
if(chomping == Chomping.keep) { breaksTransaction.commit(); }
else { breaksTransaction.end(); }
if(chomping != Chomping.Strip && lineBreak != int.max)
if(chomping != Chomping.strip && lineBreak != int.max)
{
// If chomping is Keep, we keep the line break but the first line break
// that isn't stripped (since chomping isn't Strip in this branch) must
// be inserted _before_ the other line breaks.
if(chomping == Chomping.Keep)
if(chomping == Chomping.keep)
{
reader_.sliceBuilder.insert(lineBreak, startLen);
}
@ -1356,7 +1356,7 @@ final class Scanner
/// In case of an error, error_ is set. Use throwIfError() to handle this.
Tuple!(Chomping, int) scanBlockScalarIndicators(const Mark startMark) @safe
{
auto chomping = Chomping.Clip;
auto chomping = Chomping.clip;
int increment = int.min;
dchar c = reader_.peek();
@ -1393,7 +1393,7 @@ final class Scanner
bool getChomping(ref dchar c, ref Chomping chomping) @safe
{
if(!"+-"d.canFind(c)) { return false; }
chomping = c == '+' ? Chomping.Keep : Chomping.Strip;
chomping = c == '+' ? Chomping.keep : Chomping.strip;
reader_.forward();
c = reader_.peek();
return true;
@ -1525,7 +1525,7 @@ final class Scanner
void scanFlowScalarNonSpacesToSlice(const ScalarStyle quotes, const Mark startMark)
@safe
{
for(;;) with(ScalarStyle)
for(;;)
{
dchar c = reader_.peek();
@ -1556,18 +1556,18 @@ final class Scanner
reader_.sliceBuilder.write(reader_.get(numCodePoints));
c = reader_.peek();
if(quotes == SingleQuoted && c == '\'' && reader_.peek(1) == '\'')
if(quotes == ScalarStyle.singleQuoted && c == '\'' && reader_.peek(1) == '\'')
{
reader_.forward(2);
reader_.sliceBuilder.write('\'');
}
else if((quotes == DoubleQuoted && c == '\'') ||
(quotes == SingleQuoted && "\"\\"d.canFind(c)))
else if((quotes == ScalarStyle.doubleQuoted && c == '\'') ||
(quotes == ScalarStyle.singleQuoted && "\"\\"d.canFind(c)))
{
reader_.forward();
reader_.sliceBuilder.write(c);
}
else if(quotes == DoubleQuoted && c == '\\')
else if(quotes == ScalarStyle.doubleQuoted && c == '\\')
{
reader_.forward();
c = reader_.peek();
@ -1803,7 +1803,7 @@ final class Scanner
spacesTransaction.end();
char[] slice = reader_.sliceBuilder.finish();
return scalarToken(startMark, endMark, slice, ScalarStyle.Plain);
return scalarToken(startMark, endMark, slice, ScalarStyle.plain);
}
/// Scan spaces in a plain scalar.

View file

@ -179,7 +179,7 @@ struct Serializer(Range, CharType)
{
assert(node.isType!string, "Scalar node type must be string before serialized");
auto value = node.as!string;
const detectedTag = resolver_.resolve(NodeID.Scalar, null, value, true);
const detectedTag = resolver_.resolve(NodeID.scalar, null, value, true);
const bool isDetected = node.tag_ == detectedTag;
emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_,

View file

@ -11,18 +11,27 @@ module dyaml.style;
///Scalar styles.
enum ScalarStyle : ubyte
{
Invalid = 0, /// Invalid (uninitialized) style
Literal, /// `|` (Literal block style)
Folded, /// `>` (Folded block style)
Plain, /// Plain scalar
SingleQuoted, /// Single quoted scalar
DoubleQuoted /// Double quoted scalar
/// Invalid (uninitialized) style
invalid = 0,
/// `|` (Literal block style)
literal,
/// `>` (Folded block style)
folded,
/// Plain scalar
plain,
/// Single quoted scalar
singleQuoted,
/// Double quoted scalar
doubleQuoted
}
///Collection styles.
enum CollectionStyle : ubyte
{
Invalid = 0, /// Invalid (uninitialized) style
Block, /// Block style.
Flow /// Flow style.
/// Invalid (uninitialized) style
invalid = 0,
/// Block style.
block,
/// Flow style.
flow
}

View file

@ -98,9 +98,9 @@ private:
///Unittest status.
enum TestStatus
{
Success, //Unittest passed.
Failure, //Unittest failed.
Error //There's an error in the unittest.
success, //Unittest passed.
failure, //Unittest failed.
error //There's an error in the unittest.
}
///Unittest result.
@ -166,7 +166,7 @@ Result execute(D)(const string testName, D testFunction,
writeln(testName ~ "(" ~ filenames.join(", ") ~ ")...");
}
auto kind = TestStatus.Success;
auto kind = TestStatus.success;
string info = "";
try
{
@ -180,7 +180,7 @@ Result execute(D)(const string testName, D testFunction,
catch(Throwable e)
{
info = to!string(typeid(e)) ~ "\n" ~ to!string(e);
kind = (typeid(e) is typeid(AssertError)) ? TestStatus.Failure : TestStatus.Error;
kind = (typeid(e) is typeid(AssertError)) ? TestStatus.failure : TestStatus.error;
write((verbose ? to!string(e) : to!string(kind)) ~ " ");
}
@ -213,10 +213,10 @@ void display(Result[] results) @safe
to!string(result.kind));
}
if(result.kind == TestStatus.Success){continue;}
if(result.kind == TestStatus.success){continue;}
if(result.kind == TestStatus.Failure){++failures;}
else if(result.kind == TestStatus.Error){++errors;}
if(result.kind == TestStatus.failure){++failures;}
else if(result.kind == TestStatus.error){++errors;}
writeln(result.info);
writeln("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
}

View file

@ -35,20 +35,20 @@ bool compareEvents(T, U)(T events1, U events2)
//Different event types.
if(e1.id != e2.id){return false;}
//Different anchor (if applicable).
if([EventID.SequenceStart,
EventID.MappingStart,
EventID.Alias,
EventID.Scalar].canFind(e1.id)
if([EventID.sequenceStart,
EventID.mappingStart,
EventID.alias_,
EventID.scalar].canFind(e1.id)
&& e1.anchor != e2.anchor)
{
return false;
}
//Different collection tag (if applicable).
if([EventID.SequenceStart, EventID.MappingStart].canFind(e1.id) && e1.tag != e2.tag)
if([EventID.sequenceStart, EventID.mappingStart].canFind(e1.id) && e1.tag != e2.tag)
{
return false;
}
if(e1.id == EventID.Scalar)
if(e1.id == EventID.scalar)
{
//Different scalar tag (if applicable).
if(!(e1.implicit || e2.implicit)
@ -140,27 +140,27 @@ void testEmitterStyles(string dataFilename, string canonicalFilename) @safe
//must exist due to Anchor, Tags reference counts
auto loader = Loader.fromFile(canonicalFilename);
auto events = loader.parse();
foreach(flowStyle; [CollectionStyle.Block, CollectionStyle.Flow])
foreach(flowStyle; [CollectionStyle.block, CollectionStyle.flow])
{
foreach(style; [ScalarStyle.Literal, ScalarStyle.Folded,
ScalarStyle.DoubleQuoted, ScalarStyle.SingleQuoted,
ScalarStyle.Plain])
foreach(style; [ScalarStyle.literal, ScalarStyle.folded,
ScalarStyle.doubleQuoted, ScalarStyle.singleQuoted,
ScalarStyle.plain])
{
Event[] styledEvents;
foreach(event; events)
{
if(event.id == EventID.Scalar)
if(event.id == EventID.scalar)
{
event = scalarEvent(Mark(), Mark(), event.anchor, event.tag,
event.implicit,
event.value, style);
}
else if(event.id == EventID.SequenceStart)
else if(event.id == EventID.sequenceStart)
{
event = sequenceStartEvent(Mark(), Mark(), event.anchor,
event.tag, event.implicit, flowStyle);
}
else if(event.id == EventID.MappingStart)
else if(event.id == EventID.mappingStart)
{
event = mappingStartEvent(Mark(), Mark(), event.anchor,
event.tag, event.implicit, flowStyle);

View file

@ -26,24 +26,26 @@ import dyaml.token;
void testTokens(string dataFilename, string tokensFilename) @safe
{
//representations of YAML tokens in tokens file.
auto replace = [TokenID.Directive : "%" ,
TokenID.DocumentStart : "---" ,
TokenID.DocumentEnd : "..." ,
TokenID.Alias : "*" ,
TokenID.Anchor : "&" ,
TokenID.Tag : "!" ,
TokenID.Scalar : "_" ,
TokenID.BlockSequenceStart : "[[" ,
TokenID.BlockMappingStart : "{{" ,
TokenID.BlockEnd : "]}" ,
TokenID.FlowSequenceStart : "[" ,
TokenID.FlowSequenceEnd : "]" ,
TokenID.FlowMappingStart : "{" ,
TokenID.FlowMappingEnd : "}" ,
TokenID.BlockEntry : "," ,
TokenID.FlowEntry : "," ,
TokenID.Key : "?" ,
TokenID.Value : ":" ];
auto replace = [
TokenID.directive: "%",
TokenID.documentStart: "---",
TokenID.documentEnd: "...",
TokenID.alias_: "*",
TokenID.anchor: "&",
TokenID.tag: "!",
TokenID.scalar: "_",
TokenID.blockSequenceStart: "[[",
TokenID.blockMappingStart: "{{",
TokenID.blockEnd: "]}",
TokenID.flowSequenceStart: "[",
TokenID.flowSequenceEnd: "]",
TokenID.flowMappingStart: "{",
TokenID.flowMappingEnd: "}",
TokenID.blockEntry: ",",
TokenID.flowEntry: ",",
TokenID.key: "?",
TokenID.value: ":"
];
string[] tokens1;
string[] tokens2 = readText(tokensFilename).split();
@ -55,7 +57,7 @@ void testTokens(string dataFilename, string tokensFilename) @safe
auto loader = Loader.fromFile(dataFilename);
foreach(token; loader.scan())
{
if(token.id != TokenID.StreamStart && token.id != TokenID.StreamEnd)
if(token.id != TokenID.streamStart && token.id != TokenID.streamEnd)
{
tokens1 ~= replace[token.id];
}

View file

@ -22,38 +22,39 @@ package:
/// Token types.
enum TokenID : ubyte
{
Invalid = 0, /// Invalid (uninitialized) token
Directive, /// DIRECTIVE
DocumentStart, /// DOCUMENT-START
DocumentEnd, /// DOCUMENT-END
StreamStart, /// STREAM-START
StreamEnd, /// STREAM-END
BlockSequenceStart, /// BLOCK-SEQUENCE-START
BlockMappingStart, /// BLOCK-MAPPING-START
BlockEnd, /// BLOCK-END
FlowSequenceStart, /// FLOW-SEQUENCE-START
FlowMappingStart, /// FLOW-MAPPING-START
FlowSequenceEnd, /// FLOW-SEQUENCE-END
FlowMappingEnd, /// FLOW-MAPPING-END
Key, /// KEY
Value, /// VALUE
BlockEntry, /// BLOCK-ENTRY
FlowEntry, /// FLOW-ENTRY
Alias, /// ALIAS
Anchor, /// ANCHOR
Tag, /// TAG
Scalar /// SCALAR
// Invalid (uninitialized) token
invalid = 0,
directive,
documentStart,
documentEnd,
streamStart,
streamEnd,
blockSequenceStart,
blockMappingStart,
blockEnd,
flowSequenceStart,
flowMappingStart,
flowSequenceEnd,
flowMappingEnd,
key,
value,
blockEntry,
flowEntry,
alias_,
anchor,
tag,
scalar
}
/// Specifies the type of a tag directive token.
enum DirectiveType : ubyte
{
// YAML version directive.
YAML,
yaml,
// Tag directive.
TAG,
tag,
// Any other directive is "reserved" for future YAML versions.
Reserved
reserved
}
/// Token produced by scanner.
@ -107,7 +108,7 @@ static assert(Token.sizeof <= 32, "Token has unexpected size");
Token directiveToken(const Mark start, const Mark end, char[] value,
DirectiveType directive, const uint nameEnd) @safe pure nothrow @nogc
{
return Token(value, start, end, TokenID.Directive, ScalarStyle.init, Encoding.init,
return Token(value, start, end, TokenID.directive, ScalarStyle.init, Encoding.init,
directive, nameEnd);
}
@ -128,18 +129,18 @@ Token simpleToken(TokenID id)(const Mark start, const Mark end)
/// encoding = Encoding of the stream.
Token streamStartToken(const Mark start, const Mark end, const Encoding encoding) @safe pure nothrow @nogc
{
return Token(null, start, end, TokenID.StreamStart, ScalarStyle.Invalid, encoding);
return Token(null, start, end, TokenID.streamStart, ScalarStyle.invalid, encoding);
}
/// Aliases for construction of simple token types.
alias streamEndToken = simpleToken!(TokenID.StreamEnd);
alias blockSequenceStartToken = simpleToken!(TokenID.BlockSequenceStart);
alias blockMappingStartToken = simpleToken!(TokenID.BlockMappingStart);
alias blockEndToken = simpleToken!(TokenID.BlockEnd);
alias keyToken = simpleToken!(TokenID.Key);
alias valueToken = simpleToken!(TokenID.Value);
alias blockEntryToken = simpleToken!(TokenID.BlockEntry);
alias flowEntryToken = simpleToken!(TokenID.FlowEntry);
alias streamEndToken = simpleToken!(TokenID.streamEnd);
alias blockSequenceStartToken = simpleToken!(TokenID.blockSequenceStart);
alias blockMappingStartToken = simpleToken!(TokenID.blockMappingStart);
alias blockEndToken = simpleToken!(TokenID.blockEnd);
alias keyToken = simpleToken!(TokenID.key);
alias valueToken = simpleToken!(TokenID.value);
alias blockEntryToken = simpleToken!(TokenID.blockEntry);
alias flowEntryToken = simpleToken!(TokenID.flowEntry);
/// Construct a simple token with value with specified type.
///
@ -152,14 +153,14 @@ alias flowEntryToken = simpleToken!(TokenID.FlowEntry);
Token simpleValueToken(TokenID id)(const Mark start, const Mark end, char[] value,
const uint valueDivider = uint.max)
{
return Token(value, start, end, id, ScalarStyle.Invalid, Encoding.init,
return Token(value, start, end, id, ScalarStyle.invalid, Encoding.init,
DirectiveType.init, valueDivider);
}
/// Alias for construction of tag token.
alias tagToken = simpleValueToken!(TokenID.Tag);
alias aliasToken = simpleValueToken!(TokenID.Alias);
alias anchorToken = simpleValueToken!(TokenID.Anchor);
alias tagToken = simpleValueToken!(TokenID.tag);
alias aliasToken = simpleValueToken!(TokenID.alias_);
alias anchorToken = simpleValueToken!(TokenID.anchor);
/// Construct a scalar token.
///
@ -169,5 +170,5 @@ alias anchorToken = simpleValueToken!(TokenID.Anchor);
/// style = Style of the token.
Token scalarToken(const Mark start, const Mark end, char[] value, const ScalarStyle style) @safe pure nothrow @nogc
{
return Token(value, start, end, TokenID.Scalar, style);
return Token(value, start, end, TokenID.scalar, style);
}