Using std.typecons.Flag where possible, and more in->const.
This commit is contained in:
parent
19d88d156a
commit
edf3e2a799
|
@ -22,6 +22,7 @@ import std.exception;
|
|||
import std.stdio;
|
||||
import std.regex;
|
||||
import std.string;
|
||||
import std.typecons;
|
||||
import std.utf;
|
||||
|
||||
import dyaml.node;
|
||||
|
@ -88,7 +89,8 @@ final class Constructor
|
|||
*
|
||||
* Params: defaultConstructors = Use constructors for default YAML tags?
|
||||
*/
|
||||
this(in bool defaultConstructors = true) @safe nothrow
|
||||
this(const Flag!"useDefaultConstructors" defaultConstructors = Yes.useDefaultConstructors)
|
||||
@safe nothrow
|
||||
{
|
||||
if(!defaultConstructors){return;}
|
||||
|
||||
|
@ -364,6 +366,7 @@ final class Constructor
|
|||
"Constructor function for tag " ~ tag.get ~ " is already "
|
||||
"specified. Can't specify another one.");
|
||||
|
||||
|
||||
return (ref Node n)
|
||||
{
|
||||
static if(Node.Value.allowed!T){return Node.Value(ctor(n));}
|
||||
|
|
|
@ -136,9 +136,9 @@ struct Dumper
|
|||
///Tag directives to use.
|
||||
TagDirective[] tags_ = null;
|
||||
///Always write document start?
|
||||
bool explicitStart_ = false;
|
||||
Flag!"explicitStart" explicitStart_ = No.explicitStart;
|
||||
///Always write document end?
|
||||
bool explicitEnd_ = false;
|
||||
Flag!"explicitEnd" explicitEnd_ = No.explicitEnd;
|
||||
|
||||
///Name of the output file or stream, used in error messages.
|
||||
string name_ = "<unknown>";
|
||||
|
@ -238,13 +238,13 @@ struct Dumper
|
|||
///Always explicitly write document start?
|
||||
@property void explicitStart(bool explicit) pure @safe nothrow
|
||||
{
|
||||
explicitStart_ = explicit;
|
||||
explicitStart_ = explicit ? Yes.explicitStart : No.explicitStart;
|
||||
}
|
||||
|
||||
///Always explicitly write document end?
|
||||
@property void explicitEnd(bool explicit) pure @safe nothrow
|
||||
{
|
||||
explicitEnd_ = explicit;
|
||||
explicitEnd_ = explicit ? Yes.explicitEnd : No.explicitEnd;
|
||||
}
|
||||
|
||||
///Specify YAML version string. "1.1" by default.
|
||||
|
|
|
@ -332,7 +332,7 @@ struct Emitter
|
|||
|
||||
encoding_ = event_.encoding;
|
||||
writeStreamStart();
|
||||
state_ = &expectDocumentStart!true;
|
||||
state_ = &expectDocumentStart!(Yes.first);
|
||||
}
|
||||
|
||||
///Expect nothing, throwing if we still have something.
|
||||
|
@ -344,7 +344,7 @@ struct Emitter
|
|||
//Document handlers.
|
||||
|
||||
///Handle start of a document.
|
||||
void expectDocumentStart(bool first)() @trusted
|
||||
void expectDocumentStart(Flag!"first" first)() @trusted
|
||||
{
|
||||
enforce(eventTypeIs(EventID.DocumentStart) || eventTypeIs(EventID.StreamEnd),
|
||||
new Error("Expected DocumentStart or StreamEnd, but got "
|
||||
|
@ -423,7 +423,7 @@ struct Emitter
|
|||
writeIndent();
|
||||
}
|
||||
stream_.flush();
|
||||
state_ = &expectDocumentStart!false;
|
||||
state_ = &expectDocumentStart!(No.first);
|
||||
}
|
||||
|
||||
///Handle the root node of a document.
|
||||
|
@ -516,11 +516,11 @@ struct Emitter
|
|||
writeIndicator("[", Yes.needWhitespace, Yes.whitespace);
|
||||
++flowLevel_;
|
||||
increaseIndent(Yes.flow);
|
||||
state_ = &expectFlowSequenceItem!true;
|
||||
state_ = &expectFlowSequenceItem!(Yes.first);
|
||||
}
|
||||
|
||||
///Handle a flow sequence item.
|
||||
void expectFlowSequenceItem(bool first)() @trusted
|
||||
void expectFlowSequenceItem(Flag!"first" first)() @trusted
|
||||
{
|
||||
if(event_.id == EventID.SequenceEnd)
|
||||
{
|
||||
|
@ -537,7 +537,7 @@ struct Emitter
|
|||
}
|
||||
static if(!first){writeIndicator(",", No.needWhitespace);}
|
||||
if(canonical_ || column_ > bestWidth_){writeIndent();}
|
||||
states_ ~= &expectFlowSequenceItem!false;
|
||||
states_ ~= &expectFlowSequenceItem!(No.first);
|
||||
expectSequenceNode();
|
||||
}
|
||||
|
||||
|
@ -549,11 +549,11 @@ struct Emitter
|
|||
writeIndicator("{", Yes.needWhitespace, Yes.whitespace);
|
||||
++flowLevel_;
|
||||
increaseIndent(Yes.flow);
|
||||
state_ = &expectFlowMappingKey!true;
|
||||
state_ = &expectFlowMappingKey!(Yes.first);
|
||||
}
|
||||
|
||||
///Handle a key in a flow mapping.
|
||||
void expectFlowMappingKey(bool first)() @trusted
|
||||
void expectFlowMappingKey(Flag!"first" first)() @trusted
|
||||
{
|
||||
if(event_.id == EventID.MappingEnd)
|
||||
{
|
||||
|
@ -587,7 +587,7 @@ struct Emitter
|
|||
void expectFlowMappingSimpleValue() @trusted
|
||||
{
|
||||
writeIndicator(":", No.needWhitespace);
|
||||
states_ ~= &expectFlowMappingKey!false;
|
||||
states_ ~= &expectFlowMappingKey!(No.first);
|
||||
expectMappingNode();
|
||||
}
|
||||
|
||||
|
@ -596,7 +596,7 @@ struct Emitter
|
|||
{
|
||||
if(canonical_ || column_ > bestWidth_){writeIndent();}
|
||||
writeIndicator(":", Yes.needWhitespace);
|
||||
states_ ~= &expectFlowMappingKey!false;
|
||||
states_ ~= &expectFlowMappingKey!(No.first);
|
||||
expectMappingNode();
|
||||
}
|
||||
|
||||
|
@ -608,11 +608,11 @@ struct Emitter
|
|||
const indentless = (context_ == Context.MappingNoSimpleKey ||
|
||||
context_ == Context.MappingSimpleKey) && !indentation_;
|
||||
increaseIndent(No.flow, indentless);
|
||||
state_ = &expectBlockSequenceItem!true;
|
||||
state_ = &expectBlockSequenceItem!(Yes.first);
|
||||
}
|
||||
|
||||
///Handle a block sequence item.
|
||||
void expectBlockSequenceItem(bool first)() @trusted
|
||||
void expectBlockSequenceItem(Flag!"first" first)() @trusted
|
||||
{
|
||||
static if(!first) if(event_.id == EventID.SequenceEnd)
|
||||
{
|
||||
|
@ -623,7 +623,7 @@ struct Emitter
|
|||
|
||||
writeIndent();
|
||||
writeIndicator("-", Yes.needWhitespace, No.whitespace, Yes.indentation);
|
||||
states_ ~= &expectBlockSequenceItem!false;
|
||||
states_ ~= &expectBlockSequenceItem!(No.first);
|
||||
expectSequenceNode();
|
||||
}
|
||||
|
||||
|
@ -633,11 +633,11 @@ struct Emitter
|
|||
void expectBlockMapping() @safe
|
||||
{
|
||||
increaseIndent(No.flow);
|
||||
state_ = &expectBlockMappingKey!true;
|
||||
state_ = &expectBlockMappingKey!(Yes.first);
|
||||
}
|
||||
|
||||
///Handle a key in a block mapping.
|
||||
void expectBlockMappingKey(bool first)() @trusted
|
||||
void expectBlockMappingKey(Flag!"first" first)() @trusted
|
||||
{
|
||||
static if(!first) if(event_.id == EventID.MappingEnd)
|
||||
{
|
||||
|
@ -663,7 +663,7 @@ struct Emitter
|
|||
void expectBlockMappingSimpleValue() @trusted
|
||||
{
|
||||
writeIndicator(":", No.needWhitespace);
|
||||
states_ ~= &expectBlockMappingKey!false;
|
||||
states_ ~= &expectBlockMappingKey!(No.first);
|
||||
expectMappingNode();
|
||||
}
|
||||
|
||||
|
@ -672,7 +672,7 @@ struct Emitter
|
|||
{
|
||||
writeIndent();
|
||||
writeIndicator(":", Yes.needWhitespace, No.whitespace, Yes.indentation);
|
||||
states_ ~= &expectBlockMappingKey!false;
|
||||
states_ ~= &expectBlockMappingKey!(No.first);
|
||||
expectMappingNode();
|
||||
}
|
||||
|
||||
|
|
|
@ -95,6 +95,8 @@ struct Event
|
|||
|
||||
///Get string representation of the token ID.
|
||||
@property string idString() const @system {return to!string(id);}
|
||||
|
||||
static assert(Event.sizeof <= 48, "Event struct larger than expected");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -178,7 +180,7 @@ alias collectionStartEvent!(EventID.MappingStart) mappingStartEvent;
|
|||
* YAMLVersion = YAML version string of the document.
|
||||
* tagDirectives = Tag directives of the document.
|
||||
*/
|
||||
Event documentStartEvent(const Mark start, const Mark end, bool explicit, string YAMLVersion,
|
||||
Event documentStartEvent(const Mark start, const Mark end, const bool explicit, string YAMLVersion,
|
||||
TagDirective[] tagDirectives) pure @trusted nothrow
|
||||
{
|
||||
Event result;
|
||||
|
@ -198,7 +200,7 @@ Event documentStartEvent(const Mark start, const Mark end, bool explicit, string
|
|||
* end = End position of the event in the file/stream.
|
||||
* explicit = Is this an explicit document end?
|
||||
*/
|
||||
Event documentEndEvent(const Mark start, const Mark end, bool explicit) pure @trusted nothrow
|
||||
Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pure @trusted nothrow
|
||||
{
|
||||
Event result;
|
||||
result.startMark = start;
|
||||
|
|
32
dyaml/node.d
32
dyaml/node.d
|
@ -155,7 +155,7 @@ struct Node
|
|||
///Equality test with another Pair.
|
||||
bool opEquals(const ref Pair rhs) const @safe
|
||||
{
|
||||
return cmp!true(rhs) == 0;
|
||||
return cmp!(Yes.useTag)(rhs) == 0;
|
||||
}
|
||||
|
||||
private:
|
||||
|
@ -165,7 +165,7 @@ struct Node
|
|||
* useTag determines whether or not we consider node tags
|
||||
* in the comparison.
|
||||
*/
|
||||
int cmp(bool useTag)(ref const(Pair) rhs) const @safe
|
||||
int cmp(Flag!"useTag" useTag)(ref const(Pair) rhs) const @safe
|
||||
{
|
||||
const keyCmp = key.cmp!useTag(rhs.key);
|
||||
return keyCmp != 0 ? keyCmp
|
||||
|
@ -493,7 +493,7 @@ struct Node
|
|||
*/
|
||||
bool opEquals(T)(const auto ref T rhs) const @safe
|
||||
{
|
||||
return equals!true(rhs);
|
||||
return equals!(Yes.useTag)(rhs);
|
||||
}
|
||||
unittest
|
||||
{
|
||||
|
@ -605,7 +605,7 @@ struct Node
|
|||
else if(isFloat()){return to!T(value_.get!(const real));}
|
||||
}
|
||||
else static if(isIntegral!T) if(isInt())
|
||||
{
|
||||
{
|
||||
const temp = value_.get!(const long);
|
||||
enforce(temp >= T.min && temp <= T.max,
|
||||
new Error("Integer value of type " ~ typeid(T).toString ~
|
||||
|
@ -676,7 +676,7 @@ struct Node
|
|||
else if(isFloat()){return to!T(value_.get!(const real));}
|
||||
}
|
||||
else static if(isIntegral!T) if(isInt())
|
||||
{
|
||||
{
|
||||
const temp = value_.get!(const long);
|
||||
enforce(temp >= T.min && temp <= T.max,
|
||||
new Error("Integer value of type " ~ typeid(T).toString ~
|
||||
|
@ -1056,7 +1056,7 @@ struct Node
|
|||
V tempValue = pair.value.as!V;
|
||||
result = dg(tempKey, tempValue);
|
||||
}
|
||||
|
||||
|
||||
if(result){break;}
|
||||
}
|
||||
return result;
|
||||
|
@ -1134,7 +1134,7 @@ struct Node
|
|||
|
||||
auto nodes = get!(Node[])();
|
||||
static if(is(Unqual!T == Node)){nodes ~= value;}
|
||||
else {nodes ~= Node(value);}
|
||||
else {nodes ~= Node(value);}
|
||||
value_ = Value(nodes);
|
||||
}
|
||||
unittest
|
||||
|
@ -1278,7 +1278,7 @@ struct Node
|
|||
///Compare with another _node.
|
||||
int opCmp(ref const Node node) const @safe
|
||||
{
|
||||
return cmp!true(node);
|
||||
return cmp!(Yes.useTag)(node);
|
||||
}
|
||||
|
||||
//Compute hash of the node.
|
||||
|
@ -1341,7 +1341,7 @@ struct Node
|
|||
*
|
||||
* useTag determines whether or not to consider tags in node-node comparisons.
|
||||
*/
|
||||
bool equals(bool useTag, T)(ref T rhs) const @safe
|
||||
bool equals(Flag!"useTag" useTag, T)(ref T rhs) const @safe
|
||||
{
|
||||
static if(is(Unqual!T == Node))
|
||||
{
|
||||
|
@ -1373,7 +1373,7 @@ struct Node
|
|||
*
|
||||
* useTag determines whether or not to consider tags in the comparison.
|
||||
*/
|
||||
int cmp(bool useTag)(const ref Node rhs) const @trusted
|
||||
int cmp(Flag!"useTag" useTag)(const ref Node rhs) const @trusted
|
||||
{
|
||||
//Compare tags - if equal or both null, we need to compare further.
|
||||
static if(useTag)
|
||||
|
@ -1591,7 +1591,7 @@ struct Node
|
|||
|
||||
if(isMapping)
|
||||
{
|
||||
return findPair!(T, !key)(rhs) >= 0;
|
||||
return findPair!(T, key)(rhs) >= 0;
|
||||
}
|
||||
|
||||
throw new Error("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node",
|
||||
|
@ -1637,20 +1637,20 @@ struct Node
|
|||
}
|
||||
else if(isMapping())
|
||||
{
|
||||
const index = findPair!(T, !key)(rhs);
|
||||
const index = findPair!(T, key)(rhs);
|
||||
if(index >= 0){removeElem!Pair(this, index);}
|
||||
}
|
||||
}
|
||||
|
||||
//Get index of pair with key (or value, if value is true) matching index.
|
||||
sizediff_t findPair(T, bool value = false)(const ref T index) const @safe
|
||||
//Get index of pair with key (or value, if key is false) matching index.
|
||||
sizediff_t findPair(T, Flag!"key" key = Yes.key)(const ref T index) const @safe
|
||||
{
|
||||
const pairs = value_.get!(const Pair[])();
|
||||
const(Node)* node;
|
||||
foreach(idx, ref const(Pair) pair; pairs)
|
||||
{
|
||||
static if(value){node = &pair.value;}
|
||||
else{node = &pair.key;}
|
||||
static if(key){node = &pair.key;}
|
||||
else {node = &pair.value;}
|
||||
|
||||
|
||||
bool typeMatch = (isFloatingPoint!T && (node.isInt || node.isFloat)) ||
|
||||
|
|
|
@ -411,7 +411,8 @@ final class Parser
|
|||
*/
|
||||
|
||||
///Parse a node.
|
||||
Event parseNode(in bool block, in bool indentlessSequence = false) @safe
|
||||
Event parseNode(const Flag!"block" block,
|
||||
const Flag!"indentlessSequence" indentlessSequence = No.indentlessSequence) @safe
|
||||
{
|
||||
if(scanner_.checkToken(TokenID.Alias))
|
||||
{
|
||||
|
@ -426,12 +427,12 @@ final class Parser
|
|||
bool invalidMarks = true;
|
||||
|
||||
//Get anchor/tag if detected. Return false otherwise.
|
||||
bool get(TokenID id, bool start, ref string target)
|
||||
bool get(const TokenID id, const Flag!"first" first, ref string target)
|
||||
{
|
||||
if(!scanner_.checkToken(id)){return false;}
|
||||
invalidMarks = false;
|
||||
immutable token = scanner_.getToken();
|
||||
if(start){startMark = token.startMark;}
|
||||
if(first){startMark = token.startMark;}
|
||||
if(id == TokenID.Tag){tagMark = token.startMark;}
|
||||
endMark = token.endMark;
|
||||
target = token.value;
|
||||
|
@ -439,8 +440,8 @@ final class Parser
|
|||
}
|
||||
|
||||
//Anchor and/or tag can be in any order.
|
||||
if(get(TokenID.Anchor, true, anchor)){get(TokenID.Tag, false, tag);}
|
||||
else if(get(TokenID.Tag, true, tag)) {get(TokenID.Anchor, false, anchor);}
|
||||
if(get(TokenID.Anchor, Yes.first, anchor)){get(TokenID.Tag, No.first, tag);}
|
||||
else if(get(TokenID.Tag, Yes.first, tag)) {get(TokenID.Anchor, No.first, anchor);}
|
||||
|
||||
if(tag !is null){tag = processTag(tag, startMark, tagMark);}
|
||||
|
||||
|
@ -454,9 +455,9 @@ final class Parser
|
|||
if(indentlessSequence && scanner_.checkToken(TokenID.BlockEntry))
|
||||
{
|
||||
state_ = &parseIndentlessSequenceEntry;
|
||||
return sequenceStartEvent(startMark, scanner_.peekToken().endMark,
|
||||
Anchor(anchor), Tag(tag), implicit,
|
||||
CollectionStyle.Block);
|
||||
return sequenceStartEvent
|
||||
(startMark, scanner_.peekToken().endMark, Anchor(anchor),
|
||||
Tag(tag), implicit, CollectionStyle.Block);
|
||||
}
|
||||
|
||||
if(scanner_.checkToken(TokenID.Scalar))
|
||||
|
@ -473,7 +474,7 @@ final class Parser
|
|||
if(scanner_.checkToken(TokenID.FlowSequenceStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseFlowSequenceEntry!true;
|
||||
state_ = &parseFlowSequenceEntry!(Yes.first);
|
||||
return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Flow);
|
||||
}
|
||||
|
@ -481,7 +482,7 @@ final class Parser
|
|||
if(scanner_.checkToken(TokenID.FlowMappingStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseFlowMappingKey!true;
|
||||
state_ = &parseFlowMappingKey!(Yes.first);
|
||||
return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Flow);
|
||||
}
|
||||
|
@ -489,7 +490,7 @@ final class Parser
|
|||
if(block && scanner_.checkToken(TokenID.BlockSequenceStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseBlockSequenceEntry!true;
|
||||
state_ = &parseBlockSequenceEntry!(Yes.first);
|
||||
return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Block);
|
||||
}
|
||||
|
@ -497,7 +498,7 @@ final class Parser
|
|||
if(block && scanner_.checkToken(TokenID.BlockMappingStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseBlockMappingKey!true;
|
||||
state_ = &parseBlockMappingKey!(Yes.first);
|
||||
return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Block);
|
||||
}
|
||||
|
@ -557,14 +558,14 @@ final class Parser
|
|||
}
|
||||
|
||||
///Wrappers to parse nodes.
|
||||
Event parseBlockNode() @safe {return parseNode(true);}
|
||||
Event parseFlowNode() @safe {return parseNode(false);}
|
||||
Event parseBlockNodeOrIndentlessSequence() @safe {return parseNode(true, true);}
|
||||
Event parseBlockNode() @safe {return parseNode(Yes.block);}
|
||||
Event parseFlowNode() @safe {return parseNode(No.block);}
|
||||
Event parseBlockNodeOrIndentlessSequence() @safe {return parseNode(Yes.block, Yes.indentlessSequence);}
|
||||
|
||||
///block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
||||
|
||||
///Parse an entry of a block sequence. If first is true, this is the first entry.
|
||||
Event parseBlockSequenceEntry(bool first)() @trusted
|
||||
Event parseBlockSequenceEntry(Flag!"first" first)() @trusted
|
||||
{
|
||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||
|
||||
|
@ -573,11 +574,11 @@ final class Parser
|
|||
immutable token = scanner_.getToken();
|
||||
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.BlockEnd))
|
||||
{
|
||||
states_~= &parseBlockSequenceEntry!false;
|
||||
states_~= &parseBlockSequenceEntry!(No.first);
|
||||
return parseBlockNode();
|
||||
}
|
||||
|
||||
state_ = &parseBlockSequenceEntry!false;
|
||||
state_ = &parseBlockSequenceEntry!(No.first);
|
||||
return processEmptyScalar(token.endMark);
|
||||
}
|
||||
|
||||
|
@ -628,7 +629,7 @@ final class Parser
|
|||
*/
|
||||
|
||||
///Parse a key in a block mapping. If first is true, this is the first key.
|
||||
Event parseBlockMappingKey(bool first)() @trusted
|
||||
Event parseBlockMappingKey(Flag!"first" first)() @trusted
|
||||
{
|
||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||
|
||||
|
@ -669,15 +670,15 @@ final class Parser
|
|||
|
||||
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
|
||||
{
|
||||
states_ ~= &parseBlockMappingKey!false;
|
||||
states_ ~= &parseBlockMappingKey!(No.first);
|
||||
return parseBlockNodeOrIndentlessSequence();
|
||||
}
|
||||
|
||||
state_ = &parseBlockMappingKey!false;
|
||||
state_ = &parseBlockMappingKey!(No.first);
|
||||
return processEmptyScalar(token.endMark);
|
||||
}
|
||||
|
||||
state_= &parseBlockMappingKey!false;
|
||||
state_= &parseBlockMappingKey!(No.first);
|
||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
||||
}
|
||||
|
||||
|
@ -695,7 +696,7 @@ final class Parser
|
|||
*/
|
||||
|
||||
///Parse an entry in a flow sequence. If first is true, this is the first entry.
|
||||
Event parseFlowSequenceEntry(bool first)() @trusted
|
||||
Event parseFlowSequenceEntry(Flag!"first" first)() @trusted
|
||||
{
|
||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||
|
||||
|
@ -725,7 +726,7 @@ final class Parser
|
|||
}
|
||||
else if(!scanner_.checkToken(TokenID.FlowSequenceEnd))
|
||||
{
|
||||
states_ ~= &parseFlowSequenceEntry!false;
|
||||
states_ ~= &parseFlowSequenceEntry!(No.first);
|
||||
return parseFlowNode();
|
||||
}
|
||||
}
|
||||
|
@ -789,7 +790,7 @@ final class Parser
|
|||
///Parse end of a mapping in a flow sequence entry.
|
||||
Event parseFlowSequenceEntryMappingEnd() @safe
|
||||
{
|
||||
state_ = &parseFlowSequenceEntry!false;
|
||||
state_ = &parseFlowSequenceEntry!(No.first);
|
||||
immutable token = scanner_.peekToken();
|
||||
return mappingEndEvent(token.startMark, token.startMark);
|
||||
}
|
||||
|
@ -803,7 +804,7 @@ final class Parser
|
|||
*/
|
||||
|
||||
///Parse a key in a flow mapping.
|
||||
Event parseFlowMappingKey(bool first)() @trusted
|
||||
Event parseFlowMappingKey(Flag!"first" first)() @trusted
|
||||
{
|
||||
static if(first){marks_ ~= scanner_.getToken().startMark;}
|
||||
|
||||
|
@ -845,13 +846,13 @@ final class Parser
|
|||
///Parse a value in a flow mapping.
|
||||
Event parseFlowMappingValue() @safe
|
||||
{
|
||||
return parseFlowValue(TokenID.FlowMappingEnd, &parseFlowMappingKey!false);
|
||||
return parseFlowValue(TokenID.FlowMappingEnd, &parseFlowMappingKey!(No.first));
|
||||
}
|
||||
|
||||
///Parse an empty value in a flow mapping.
|
||||
Event parseFlowMappingEmptyValue() @safe
|
||||
{
|
||||
state_ = &parseFlowMappingKey!false;
|
||||
state_ = &parseFlowMappingKey!(No.first);
|
||||
return processEmptyScalar(scanner_.peekToken().startMark);
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import std.exception;
|
|||
import std.format;
|
||||
import std.math;
|
||||
import std.stream;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.exception;
|
||||
import dyaml.node;
|
||||
|
@ -66,7 +67,8 @@ final class Representer
|
|||
* disabled to use custom representer
|
||||
* functions for default types.
|
||||
*/
|
||||
this(bool useDefaultRepresenters = true) @safe
|
||||
this(const Flag!"useDefaultRepresenters" useDefaultRepresenters = Yes.useDefaultRepresenters)
|
||||
@safe
|
||||
{
|
||||
if(!useDefaultRepresenters){return;}
|
||||
addRepresenter!YAMLNull(&representNull);
|
||||
|
|
|
@ -61,7 +61,8 @@ final class Resolver
|
|||
*
|
||||
* Params: defaultImplicitResolvers = Use default YAML implicit resolvers?
|
||||
*/
|
||||
this(bool defaultImplicitResolvers = true) @safe
|
||||
this(Flag!"useDefaultImplicitResolvers" defaultImplicitResolvers = Yes.useDefaultImplicitResolvers)
|
||||
@safe
|
||||
{
|
||||
defaultScalarTag_ = Tag("tag:yaml.org,2002:str");
|
||||
defaultSequenceTag_ = Tag("tag:yaml.org,2002:seq");
|
||||
|
@ -142,7 +143,8 @@ final class Resolver
|
|||
*
|
||||
* Returns: Resolved tag.
|
||||
*/
|
||||
Tag resolve(NodeID kind, Tag tag, string value, bool implicit) @safe
|
||||
Tag resolve(const NodeID kind, const Tag tag, const string value,
|
||||
const bool implicit) @safe
|
||||
{
|
||||
if(!tag.isNull() && tag.get() != "!"){return tag;}
|
||||
|
||||
|
|
|
@ -39,9 +39,9 @@ struct Serializer
|
|||
Resolver resolver_;
|
||||
|
||||
///Do all document starts have to be specified explicitly?
|
||||
bool explicitStart_;
|
||||
Flag!"explicitStart" explicitStart_;
|
||||
///Do all document ends have to be specified explicitly?
|
||||
bool explicitEnd_;
|
||||
Flag!"explicitEnd" explicitEnd_;
|
||||
///YAML version string.
|
||||
string YAMLVersion_;
|
||||
|
||||
|
@ -69,14 +69,15 @@ struct Serializer
|
|||
* tagDirectives = Tag directives to emit.
|
||||
*/
|
||||
this(ref Emitter emitter, Resolver resolver, Encoding encoding,
|
||||
in bool explicitStart, in bool explicitEnd, string YAMLVersion,
|
||||
const Flag!"explicitStart" explicitStart,
|
||||
const Flag!"explicitEnd" explicitEnd, string YAMLVersion,
|
||||
TagDirective[] tagDirectives) @trusted
|
||||
{
|
||||
emitter_ = &emitter;
|
||||
resolver_ = resolver;
|
||||
emitter_ = &emitter;
|
||||
resolver_ = resolver;
|
||||
explicitStart_ = explicitStart;
|
||||
explicitEnd_ = explicitEnd;
|
||||
YAMLVersion_ = YAMLVersion;
|
||||
explicitEnd_ = explicitEnd;
|
||||
YAMLVersion_ = YAMLVersion;
|
||||
tagDirectives_ = tagDirectives;
|
||||
|
||||
emitter_.emit(streamStartEvent(Mark(), Mark(), encoding));
|
||||
|
|
|
@ -11,6 +11,7 @@ import std.datetime;
|
|||
import std.exception;
|
||||
import std.path;
|
||||
import std.string;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.tag;
|
||||
import dyaml.testcommon;
|
||||
|
@ -402,7 +403,7 @@ void testConstructor(bool verbose, string dataFilename, string codeDummy)
|
|||
size_t i = 0;
|
||||
foreach(node; loader)
|
||||
{
|
||||
if(!node.equals!false(exp[i]))
|
||||
if(!node.equals!(No.useTag)(exp[i]))
|
||||
{
|
||||
if(verbose)
|
||||
{
|
||||
|
|
|
@ -9,6 +9,7 @@ module dyaml.testrepresenter;
|
|||
|
||||
import std.path;
|
||||
import std.exception;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.testcommon;
|
||||
import dyaml.testconstructor;
|
||||
|
@ -69,7 +70,7 @@ void testRepresenterTypes(bool verbose, string codeFilename)
|
|||
assert(expectedNodes.length == readNodes.length);
|
||||
foreach(n; 0 .. expectedNodes.length)
|
||||
{
|
||||
assert(expectedNodes[n].equals!false(readNodes[n]));
|
||||
assert(expectedNodes[n].equals!(No.useTag)(readNodes[n]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue