Merge pull request #103 from Herringway/kill-whitespace-errors-dead
Kill whitespace errors dead, finally
This commit is contained in:
commit
02cbf4d459
|
@ -108,7 +108,7 @@ final class Composer
|
|||
Node getNode() @safe
|
||||
{
|
||||
//Get the root node of the next document.
|
||||
assert(!parser_.checkEvent(EventID.StreamEnd),
|
||||
assert(!parser_.checkEvent(EventID.StreamEnd),
|
||||
"Trying to get a node from Composer when there is no node to " ~
|
||||
"get. use checkNode() to determine if there is a node.");
|
||||
|
||||
|
@ -118,7 +118,7 @@ final class Composer
|
|||
///Get single YAML document, throwing if there is more than one document.
|
||||
Node getSingleNode() @trusted
|
||||
{
|
||||
assert(!parser_.checkEvent(EventID.StreamEnd),
|
||||
assert(!parser_.checkEvent(EventID.StreamEnd),
|
||||
"Trying to get a node from Composer when there is no node to " ~
|
||||
"get. use checkNode() to determine if there is a node.");
|
||||
|
||||
|
@ -141,7 +141,7 @@ final class Composer
|
|||
///
|
||||
///Params: pairAppenderLevel = Current level in the pair appender stack.
|
||||
/// nodeAppenderLevel = Current level the node appender stack.
|
||||
void ensureAppendersExist(const uint pairAppenderLevel, const uint nodeAppenderLevel)
|
||||
void ensureAppendersExist(const uint pairAppenderLevel, const uint nodeAppenderLevel)
|
||||
@trusted
|
||||
{
|
||||
while(pairAppenders_.length <= pairAppenderLevel)
|
||||
|
@ -235,10 +235,10 @@ final class Composer
|
|||
Node composeScalarNode() @safe
|
||||
{
|
||||
immutable event = parser_.getEvent();
|
||||
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value,
|
||||
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value,
|
||||
event.implicit);
|
||||
|
||||
Node node = constructor_.node(event.startMark, event.endMark, tag,
|
||||
Node node = constructor_.node(event.startMark, event.endMark, tag,
|
||||
event.value, event.scalarStyle);
|
||||
|
||||
return node;
|
||||
|
@ -248,14 +248,14 @@ final class Composer
|
|||
///
|
||||
/// Params: pairAppenderLevel = Current level of the pair appender stack.
|
||||
/// nodeAppenderLevel = Current level of the node appender stack.
|
||||
Node composeSequenceNode(const uint pairAppenderLevel, const uint nodeAppenderLevel)
|
||||
Node composeSequenceNode(const uint pairAppenderLevel, const uint nodeAppenderLevel)
|
||||
@system
|
||||
{
|
||||
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
|
||||
auto nodeAppender = &(nodeAppenders_[nodeAppenderLevel]);
|
||||
|
||||
immutable startEvent = parser_.getEvent();
|
||||
const tag = resolver_.resolve(NodeID.Sequence, startEvent.tag, null,
|
||||
const tag = resolver_.resolve(NodeID.Sequence, startEvent.tag, null,
|
||||
startEvent.implicit);
|
||||
|
||||
while(!parser_.checkEvent(EventID.SequenceEnd))
|
||||
|
@ -319,7 +319,7 @@ final class Composer
|
|||
}
|
||||
foreach(node; toMerge)
|
||||
{
|
||||
merge(*pairAppender, flatten(node, startMark, endMark,
|
||||
merge(*pairAppender, flatten(node, startMark, endMark,
|
||||
pairAppenderLevel + 1, nodeAppenderLevel));
|
||||
}
|
||||
}
|
||||
|
@ -327,7 +327,7 @@ final class Composer
|
|||
else if(root.isSequence) foreach(ref Node node; root)
|
||||
{
|
||||
if(!node.isType!(Node.Pair[])){error(node);}
|
||||
merge(*pairAppender, flatten(node, startMark, endMark,
|
||||
merge(*pairAppender, flatten(node, startMark, endMark,
|
||||
pairAppenderLevel + 1, nodeAppenderLevel));
|
||||
}
|
||||
else
|
||||
|
@ -350,14 +350,14 @@ final class Composer
|
|||
{
|
||||
ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel);
|
||||
immutable startEvent = parser_.getEvent();
|
||||
const tag = resolver_.resolve(NodeID.Mapping, startEvent.tag, null,
|
||||
const tag = resolver_.resolve(NodeID.Mapping, startEvent.tag, null,
|
||||
startEvent.implicit);
|
||||
auto pairAppender = &(pairAppenders_[pairAppenderLevel]);
|
||||
|
||||
Tuple!(Node, Mark)[] toMerge;
|
||||
while(!parser_.checkEvent(EventID.MappingEnd))
|
||||
{
|
||||
auto pair = Node.Pair(composeNode(pairAppenderLevel + 1, nodeAppenderLevel),
|
||||
auto pair = Node.Pair(composeNode(pairAppenderLevel + 1, nodeAppenderLevel),
|
||||
composeNode(pairAppenderLevel + 1, nodeAppenderLevel));
|
||||
|
||||
//Need to flatten and merge the node referred by YAMLMerge.
|
||||
|
@ -373,7 +373,7 @@ final class Composer
|
|||
}
|
||||
foreach(node; toMerge)
|
||||
{
|
||||
merge(*pairAppender, flatten(node[0], startEvent.startMark, node[1],
|
||||
merge(*pairAppender, flatten(node[0], startEvent.startMark, node[1],
|
||||
pairAppenderLevel + 1, nodeAppenderLevel));
|
||||
}
|
||||
|
||||
|
|
|
@ -343,13 +343,13 @@ final class Constructor
|
|||
|
||||
|
||||
/// Construct a _null _node.
|
||||
YAMLNull constructNull(ref Node node) @safe pure nothrow @nogc
|
||||
YAMLNull constructNull(ref Node node) @safe pure nothrow @nogc
|
||||
{
|
||||
return YAMLNull();
|
||||
}
|
||||
|
||||
/// Construct a merge _node - a _node that merges another _node into a mapping.
|
||||
YAMLMerge constructMerge(ref Node node) @safe pure nothrow @nogc
|
||||
YAMLMerge constructMerge(ref Node node) @safe pure nothrow @nogc
|
||||
{
|
||||
return YAMLMerge();
|
||||
}
|
||||
|
|
|
@ -123,7 +123,7 @@ struct Dumper
|
|||
//catch(StreamException e)
|
||||
catch(Exception e)
|
||||
{
|
||||
throw new YAMLException("Unable to open file " ~ filename ~
|
||||
throw new YAMLException("Unable to open file " ~ filename ~
|
||||
" for YAML dumping: " ~ e.msg);
|
||||
}
|
||||
// need to destroy the File we constructed.
|
||||
|
@ -171,7 +171,7 @@ struct Dumper
|
|||
///Set indentation width. 2 by default. Must not be zero.
|
||||
@property void indent(uint indent) pure @safe nothrow
|
||||
in
|
||||
{
|
||||
{
|
||||
assert(indent != 0, "Can't use zero YAML indent width");
|
||||
}
|
||||
body
|
||||
|
@ -195,7 +195,7 @@ struct Dumper
|
|||
@property void encoding(Encoding encoding) pure @safe nothrow
|
||||
{
|
||||
encoding_ = encoding;
|
||||
}
|
||||
}
|
||||
|
||||
///Always explicitly write document start?
|
||||
@property void explicitStart(bool explicit) pure @safe nothrow
|
||||
|
@ -216,10 +216,10 @@ struct Dumper
|
|||
}
|
||||
|
||||
/**
|
||||
* Specify tag directives.
|
||||
* Specify tag directives.
|
||||
*
|
||||
* A tag directive specifies a shorthand notation for specifying _tags.
|
||||
* Each tag directive associates a handle with a prefix. This allows for
|
||||
* Each tag directive associates a handle with a prefix. This allows for
|
||||
* compact tag notation.
|
||||
*
|
||||
* Each handle specified MUST start and end with a '!' character
|
||||
|
@ -262,13 +262,13 @@ struct Dumper
|
|||
/**
|
||||
* Dump one or more YAML _documents to the file/stream.
|
||||
*
|
||||
* Note that while you can call dump() multiple times on the same
|
||||
* Note that while you can call dump() multiple times on the same
|
||||
* dumper, you will end up writing multiple YAML "files" to the same
|
||||
* file/stream.
|
||||
*
|
||||
* Params: documents = Documents to _dump (root nodes of the _documents).
|
||||
*
|
||||
* Throws: YAMLException on error (e.g. invalid nodes,
|
||||
* Throws: YAMLException on error (e.g. invalid nodes,
|
||||
* unable to write to file/stream).
|
||||
*/
|
||||
void dump(Node[] documents ...) @trusted
|
||||
|
@ -285,7 +285,7 @@ struct Dumper
|
|||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to dump YAML to stream "
|
||||
throw new YAMLException("Unable to dump YAML to stream "
|
||||
~ name_ ~ " : " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
@ -310,7 +310,7 @@ struct Dumper
|
|||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to emit YAML to stream "
|
||||
throw new YAMLException("Unable to emit YAML to stream "
|
||||
~ name_ ~ " : " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -68,13 +68,13 @@ private mixin FastCharSearch!"\n\u0085\u2028\u2029"d newlineSearch_;
|
|||
private alias canFind = std.algorithm.canFind;
|
||||
|
||||
//Emits YAML events into a file/stream.
|
||||
struct Emitter
|
||||
struct Emitter
|
||||
{
|
||||
private:
|
||||
alias dyaml.tagdirective.TagDirective TagDirective;
|
||||
|
||||
///Default tag handle shortcuts and replacements.
|
||||
static TagDirective[] defaultTagDirectives_ =
|
||||
static TagDirective[] defaultTagDirectives_ =
|
||||
[TagDirective("!", "!"), TagDirective("!!", "tag:yaml.org,2002:")];
|
||||
|
||||
///Stream to write to.
|
||||
|
@ -165,7 +165,7 @@ struct Emitter
|
|||
* indent = Indentation width.
|
||||
* lineBreak = Line break character/s.
|
||||
*/
|
||||
this(YStream stream, const bool canonical, const int indent, const int width,
|
||||
this(YStream stream, const bool canonical, const int indent, const int width,
|
||||
const LineBreak lineBreak) @trusted
|
||||
in{assert(stream.writeable, "Can't emit YAML to a non-writable stream");}
|
||||
body
|
||||
|
@ -200,9 +200,9 @@ struct Emitter
|
|||
|
||||
private:
|
||||
///Pop and return the newest state in states_.
|
||||
void delegate() popState() @trusted
|
||||
void delegate() popState() @trusted
|
||||
{
|
||||
enforce(states_.length > 0,
|
||||
enforce(states_.length > 0,
|
||||
new YAMLException("Emitter: Need to pop a state but there are no states left"));
|
||||
const result = states_.back;
|
||||
states_.length = states_.length - 1;
|
||||
|
@ -212,7 +212,7 @@ struct Emitter
|
|||
///Pop and return the newest indent in indents_.
|
||||
int popIndent() @trusted
|
||||
{
|
||||
enforce(indents_.length > 0,
|
||||
enforce(indents_.length > 0,
|
||||
new YAMLException("Emitter: Need to pop an indent level but there" ~
|
||||
" are no indent levels left"));
|
||||
const result = indents_.back;
|
||||
|
@ -261,7 +261,7 @@ struct Emitter
|
|||
{
|
||||
int level = 0;
|
||||
|
||||
//Rather ugly, but good enough for now.
|
||||
//Rather ugly, but good enough for now.
|
||||
//Couldn't be bothered writing a range as events_ should eventually
|
||||
//become a Phobos queue/linked list.
|
||||
events_.startIteration();
|
||||
|
@ -347,7 +347,7 @@ struct Emitter
|
|||
writeIndicator("...", Yes.needWhitespace);
|
||||
writeIndent();
|
||||
}
|
||||
|
||||
|
||||
if(YAMLVersion !is null)
|
||||
{
|
||||
writeVersionDirective(prepareVersion(YAMLVersion));
|
||||
|
@ -360,23 +360,23 @@ struct Emitter
|
|||
|
||||
foreach(ref pair; tagDirectives_)
|
||||
{
|
||||
writeTagDirective(prepareTagHandle(pair.handle),
|
||||
writeTagDirective(prepareTagHandle(pair.handle),
|
||||
prepareTagPrefix(pair.prefix));
|
||||
}
|
||||
}
|
||||
|
||||
bool eq(ref TagDirective a, ref TagDirective b){return a.handle == b.handle;}
|
||||
//Add any default tag directives that have not been overriden.
|
||||
foreach(ref def; defaultTagDirectives_)
|
||||
foreach(ref def; defaultTagDirectives_)
|
||||
{
|
||||
if(!std.algorithm.canFind!eq(tagDirectives_, def))
|
||||
{
|
||||
tagDirectives_ ~= def;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const implicit = first && !event_.explicitDocument && !canonical_ &&
|
||||
YAMLVersion is null && tagDirectives is null &&
|
||||
YAMLVersion is null && tagDirectives is null &&
|
||||
!checkEmptyDocument();
|
||||
if(!implicit)
|
||||
{
|
||||
|
@ -669,14 +669,14 @@ struct Emitter
|
|||
///Check if an empty sequence is next.
|
||||
bool checkEmptySequence() const @trusted pure nothrow
|
||||
{
|
||||
return event_.id == EventID.SequenceStart && events_.length > 0
|
||||
return event_.id == EventID.SequenceStart && events_.length > 0
|
||||
&& events_.peek().id == EventID.SequenceEnd;
|
||||
}
|
||||
|
||||
///Check if an empty mapping is next.
|
||||
bool checkEmptyMapping() const @trusted pure nothrow
|
||||
{
|
||||
return event_.id == EventID.MappingStart && events_.length > 0
|
||||
return event_.id == EventID.MappingStart && events_.length > 0
|
||||
&& events_.peek().id == EventID.MappingEnd;
|
||||
}
|
||||
|
||||
|
@ -695,12 +695,12 @@ struct Emitter
|
|||
}
|
||||
|
||||
///Check if a simple key is next.
|
||||
bool checkSimpleKey() @trusted
|
||||
bool checkSimpleKey() @trusted
|
||||
{
|
||||
uint length = 0;
|
||||
const id = event_.id;
|
||||
const scalar = id == EventID.Scalar;
|
||||
const collectionStart = id == EventID.MappingStart ||
|
||||
const collectionStart = id == EventID.MappingStart ||
|
||||
id == EventID.SequenceStart;
|
||||
|
||||
if((id == EventID.Alias || scalar || collectionStart)
|
||||
|
@ -727,9 +727,9 @@ struct Emitter
|
|||
|
||||
if(length >= 128){return false;}
|
||||
|
||||
return id == EventID.Alias ||
|
||||
return id == EventID.Alias ||
|
||||
(scalar && !analysis_.flags.empty && !analysis_.flags.multiline) ||
|
||||
checkEmptySequence() ||
|
||||
checkEmptySequence() ||
|
||||
checkEmptyMapping();
|
||||
}
|
||||
|
||||
|
@ -742,7 +742,7 @@ struct Emitter
|
|||
style_ = chooseScalarStyle();
|
||||
}
|
||||
|
||||
//if(analysis_.flags.multiline && (context_ != Context.MappingSimpleKey) &&
|
||||
//if(analysis_.flags.multiline && (context_ != Context.MappingSimpleKey) &&
|
||||
// ([ScalarStyle.Invalid, ScalarStyle.Plain, ScalarStyle.SingleQuoted, ScalarStyle.DoubleQuoted)
|
||||
// .canFind(style_))
|
||||
//{
|
||||
|
@ -829,10 +829,10 @@ struct Emitter
|
|||
const singleQuoted = style == ScalarStyle.SingleQuoted;
|
||||
const doubleQuoted = style == ScalarStyle.DoubleQuoted;
|
||||
|
||||
const allowPlain = flowLevel_ > 0 ? analysis_.flags.allowFlowPlain
|
||||
const allowPlain = flowLevel_ > 0 ? analysis_.flags.allowFlowPlain
|
||||
: analysis_.flags.allowBlockPlain;
|
||||
//simple empty or multiline scalars can't be written in plain style
|
||||
const simpleNonPlain = (context_ == Context.MappingSimpleKey) &&
|
||||
const simpleNonPlain = (context_ == Context.MappingSimpleKey) &&
|
||||
(analysis_.flags.empty || analysis_.flags.multiline);
|
||||
|
||||
if(doubleQuoted || canonical_)
|
||||
|
@ -845,14 +845,14 @@ struct Emitter
|
|||
return ScalarStyle.Plain;
|
||||
}
|
||||
|
||||
if(block && flowLevel_ == 0 && context_ != Context.MappingSimpleKey &&
|
||||
if(block && flowLevel_ == 0 && context_ != Context.MappingSimpleKey &&
|
||||
analysis_.flags.allowBlock)
|
||||
{
|
||||
return style;
|
||||
}
|
||||
|
||||
if((invalidOrPlain || singleQuoted) &&
|
||||
analysis_.flags.allowSingleQuoted &&
|
||||
if((invalidOrPlain || singleQuoted) &&
|
||||
analysis_.flags.allowSingleQuoted &&
|
||||
!(context_ == Context.MappingSimpleKey && analysis_.flags.multiline))
|
||||
{
|
||||
return ScalarStyle.SingleQuoted;
|
||||
|
@ -942,7 +942,7 @@ struct Emitter
|
|||
foreach(ref pair; tagDirectives_)
|
||||
{
|
||||
auto prefix = pair.prefix;
|
||||
if(tagString.startsWith(prefix) &&
|
||||
if(tagString.startsWith(prefix) &&
|
||||
(prefix != "!" || prefix.length < tagString.length))
|
||||
{
|
||||
handle = pair.handle;
|
||||
|
@ -956,7 +956,7 @@ struct Emitter
|
|||
size_t end = 0;
|
||||
foreach(const dchar c; suffix)
|
||||
{
|
||||
if(isAlphaNum(c) || "-;/?:@&=+$,_.~*\'()[]"d.canFind(c) ||
|
||||
if(isAlphaNum(c) || "-;/?:@&=+$,_.~*\'()[]"d.canFind(c) ||
|
||||
(c == '!' && handle != "!"))
|
||||
{
|
||||
++end;
|
||||
|
@ -1008,11 +1008,11 @@ struct Emitter
|
|||
return analysis;
|
||||
}
|
||||
|
||||
//Indicators and special characters (All false by default).
|
||||
//Indicators and special characters (All false by default).
|
||||
bool blockIndicators, flowIndicators, lineBreaks, specialCharacters;
|
||||
|
||||
//Important whitespace combinations (All false by default).
|
||||
bool leadingSpace, leadingBreak, trailingSpace, trailingBreak,
|
||||
bool leadingSpace, leadingBreak, trailingSpace, trailingBreak,
|
||||
breakSpace, spaceBreak;
|
||||
|
||||
//Check document indicators.
|
||||
|
@ -1020,12 +1020,12 @@ struct Emitter
|
|||
{
|
||||
blockIndicators = flowIndicators = true;
|
||||
}
|
||||
|
||||
|
||||
//First character or preceded by a whitespace.
|
||||
bool preceededByWhitespace = true;
|
||||
|
||||
//Last character or followed by a whitespace.
|
||||
bool followedByWhitespace = scalar.length == 1 ||
|
||||
bool followedByWhitespace = scalar.length == 1 ||
|
||||
" \t\0\n\r\u0085\u2028\u2029"d.canFind(scalar[1]);
|
||||
|
||||
//The previous character is a space/break (false by default).
|
||||
|
@ -1077,7 +1077,7 @@ struct Emitter
|
|||
{
|
||||
specialCharacters = true;
|
||||
}
|
||||
|
||||
|
||||
//Detect important whitespace combinations.
|
||||
if(c == ' ')
|
||||
{
|
||||
|
@ -1103,16 +1103,16 @@ struct Emitter
|
|||
mixin FastCharSearch! "\0\n\r\u0085\u2028\u2029 \t"d spaceSearch;
|
||||
//Prepare for the next character.
|
||||
preceededByWhitespace = spaceSearch.canFind(c);
|
||||
followedByWhitespace = index + 2 >= scalar.length ||
|
||||
followedByWhitespace = index + 2 >= scalar.length ||
|
||||
spaceSearch.canFind(scalar[index + 2]);
|
||||
}
|
||||
|
||||
with(analysis.flags)
|
||||
{
|
||||
//Let's decide what styles are allowed.
|
||||
allowFlowPlain = allowBlockPlain = allowSingleQuoted
|
||||
allowFlowPlain = allowBlockPlain = allowSingleQuoted
|
||||
= allowDoubleQuoted = allowBlock = true;
|
||||
|
||||
|
||||
//Leading and trailing whitespaces are bad for plain scalars.
|
||||
if(leadingSpace || leadingBreak || trailingSpace || trailingBreak)
|
||||
{
|
||||
|
@ -1311,11 +1311,11 @@ struct ScalarWriter
|
|||
resetTextPosition();
|
||||
|
||||
do
|
||||
{
|
||||
{
|
||||
const dchar c = nextChar();
|
||||
if(spaces_)
|
||||
{
|
||||
if(c != ' ' && tooWide() && split_ &&
|
||||
if(c != ' ' && tooWide() && split_ &&
|
||||
startByte_ != 0 && endByte_ != text_.length)
|
||||
{
|
||||
writeIndent(Flag!"ResetSpace".no);
|
||||
|
@ -1359,11 +1359,11 @@ struct ScalarWriter
|
|||
resetTextPosition();
|
||||
emitter_.writeIndicator("\"", Yes.needWhitespace);
|
||||
do
|
||||
{
|
||||
{
|
||||
const dchar c = nextChar();
|
||||
//handle special characters
|
||||
if(c == dcharNone || "\"\\\u0085\u2028\u2029\uFEFF"d.canFind(c) ||
|
||||
!((c >= '\x20' && c <= '\x7E') ||
|
||||
!((c >= '\x20' && c <= '\x7E') ||
|
||||
((c >= '\xA0' && c <= '\uD7FF') || (c >= '\uE000' && c <= '\uFFFD'))))
|
||||
{
|
||||
if(startChar_ < endChar_)
|
||||
|
@ -1392,9 +1392,9 @@ struct ScalarWriter
|
|||
startByte_ = nextEndByte_;
|
||||
}
|
||||
}
|
||||
if((endByte_ > 0 && endByte_ < text_.length - strideBack(text_, text_.length))
|
||||
&& (c == ' ' || startChar_ >= endChar_)
|
||||
&& (emitter_.column_ + endChar_ - startChar_ > emitter_.bestWidth_)
|
||||
if((endByte_ > 0 && endByte_ < text_.length - strideBack(text_, text_.length))
|
||||
&& (c == ' ' || startChar_ >= endChar_)
|
||||
&& (emitter_.column_ + endChar_ - startChar_ > emitter_.bestWidth_)
|
||||
&& split_)
|
||||
{
|
||||
//text_[2:1] is ok in Python but not in D, so we have to use min()
|
||||
|
@ -1425,7 +1425,7 @@ struct ScalarWriter
|
|||
resetTextPosition();
|
||||
|
||||
do
|
||||
{
|
||||
{
|
||||
const dchar c = nextChar();
|
||||
if(breaks_)
|
||||
{
|
||||
|
@ -1469,7 +1469,7 @@ struct ScalarWriter
|
|||
resetTextPosition();
|
||||
|
||||
do
|
||||
{
|
||||
{
|
||||
const dchar c = nextChar();
|
||||
if(breaks_)
|
||||
{
|
||||
|
@ -1503,7 +1503,7 @@ struct ScalarWriter
|
|||
resetTextPosition();
|
||||
|
||||
do
|
||||
{
|
||||
{
|
||||
const dchar c = nextChar();
|
||||
if(spaces_)
|
||||
{
|
||||
|
@ -1561,7 +1561,7 @@ struct ScalarWriter
|
|||
///Is the current line too wide?
|
||||
@property bool tooWide() const pure @safe nothrow
|
||||
{
|
||||
return startChar_ + 1 == endChar_ &&
|
||||
return startChar_ + 1 == endChar_ &&
|
||||
emitter_.column_ > emitter_.bestWidth_;
|
||||
}
|
||||
|
||||
|
@ -1571,7 +1571,7 @@ struct ScalarWriter
|
|||
size_t hintsIdx = 0;
|
||||
if(text_.length == 0){return hintsIdx;}
|
||||
|
||||
dchar lastChar(const string str, ref size_t end)
|
||||
dchar lastChar(const string str, ref size_t end)
|
||||
{
|
||||
size_t idx = end = end - strideBack(str, end);
|
||||
return decode(text_, idx);
|
||||
|
|
|
@ -88,4 +88,3 @@ static this()
|
|||
'\u2028': 'L',
|
||||
'\u2029': 'P'];
|
||||
}
|
||||
|
||||
|
|
|
@ -120,7 +120,7 @@ template ExceptionCtors()
|
|||
template MarkedExceptionCtors()
|
||||
{
|
||||
public:
|
||||
this(string context, const Mark contextMark, string problem,
|
||||
this(string context, const Mark contextMark, string problem,
|
||||
const Mark problemMark, string file = __FILE__, size_t line = __LINE__)
|
||||
@safe pure nothrow
|
||||
{
|
||||
|
|
|
@ -24,7 +24,7 @@ package:
|
|||
* Params: chars = String to search in.
|
||||
* tableSize = Maximum number of bytes used by the table.
|
||||
*
|
||||
* Generated method:
|
||||
* Generated method:
|
||||
* bool canFind(dchar c)
|
||||
*
|
||||
* Determines if a character is in the string.
|
||||
|
@ -57,7 +57,7 @@ string searchCode(dstring chars, uint tableSize)()
|
|||
return specialChars.map!(c => q{cast(uint)c == %s}.format(cast(uint)c)).join(q{ || });
|
||||
}
|
||||
|
||||
const caseInTable =
|
||||
const caseInTable =
|
||||
q{
|
||||
if(c < %s)
|
||||
{
|
||||
|
@ -68,21 +68,21 @@ string searchCode(dstring chars, uint tableSize)()
|
|||
string code;
|
||||
if(tableSize)
|
||||
{
|
||||
code ~=
|
||||
code ~=
|
||||
q{
|
||||
static immutable ubyte[%s] table_ = [
|
||||
%s];
|
||||
}.format(tableSize, table[].map!(c => c ? q{true} : q{false}).join(q{, }));
|
||||
}
|
||||
code ~=
|
||||
code ~=
|
||||
q{
|
||||
bool canFind(const dchar c) @safe pure nothrow @nogc
|
||||
bool canFind(const dchar c) @safe pure nothrow @nogc
|
||||
{
|
||||
%s
|
||||
|
||||
return %s;
|
||||
}
|
||||
}.format(tableSize ? caseInTable : "",
|
||||
}.format(tableSize ? caseInTable : "",
|
||||
specialChars.length ? specialCharsCode() : q{false});
|
||||
|
||||
return code;
|
||||
|
|
|
@ -75,4 +75,3 @@ struct Flags(names ...) if(names.length <= 8)
|
|||
flags.empty = false;
|
||||
assert(flags.empty == false && flags.multiline == false);
|
||||
}
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ struct Loader
|
|||
name_ = filename;
|
||||
try
|
||||
{
|
||||
this(std.file.read(filename));
|
||||
this(std.file.read(filename));
|
||||
}
|
||||
catch(FileException e)
|
||||
{
|
||||
|
|
|
@ -1026,7 +1026,7 @@ struct Node
|
|||
|
||||
/** Return a range object iterating over a sequence, getting each
|
||||
* element as T.
|
||||
*
|
||||
*
|
||||
* If T is Node, simply iterate over the nodes in the sequence.
|
||||
* Otherwise, convert each node to T during iteration.
|
||||
*
|
||||
|
@ -1052,10 +1052,10 @@ struct Node
|
|||
/* Input range functionality. */
|
||||
bool empty() @property { return position >= subnodes.length; }
|
||||
|
||||
void popFront()
|
||||
{
|
||||
void popFront()
|
||||
{
|
||||
enforce(!empty, "Attempted to popFront an empty sequence");
|
||||
position++;
|
||||
position++;
|
||||
}
|
||||
|
||||
T front() @property
|
||||
|
@ -1071,14 +1071,14 @@ struct Node
|
|||
Range save() { return this; }
|
||||
|
||||
/* Bidirectional range functionality. */
|
||||
void popBack()
|
||||
{
|
||||
void popBack()
|
||||
{
|
||||
enforce(!empty, "Attempted to popBack an empty sequence");
|
||||
subnodes = subnodes[0 .. $ - 1];
|
||||
subnodes = subnodes[0 .. $ - 1];
|
||||
}
|
||||
|
||||
T back()
|
||||
{
|
||||
T back()
|
||||
{
|
||||
enforce(!empty, "Attempted to take the back of an empty sequence");
|
||||
static if (is(Unqual!T == Node))
|
||||
return subnodes[$ - 1];
|
||||
|
|
|
@ -273,16 +273,16 @@ ValidateResult validateUTF8NoGC(const(char[]) str) @safe pure nothrow @nogc
|
|||
///
|
||||
/// Params:
|
||||
///
|
||||
/// validated = If ture, assume str is a valid UTF-8 string and don't generate any
|
||||
/// validated = If ture, assume str is a valid UTF-8 string and don't generate any
|
||||
/// error-checking code. If validated is true, str $(B must) be a valid
|
||||
/// character, otherwise undefined behavior will occur. Also affects the
|
||||
/// return type.
|
||||
/// str = Will decode the first code point from this string.
|
||||
/// str = Will decode the first code point from this string.
|
||||
/// index = Index in str where the code point starts. Will be updated to point to
|
||||
/// the next code point.
|
||||
///
|
||||
/// Returns: If validated is true, the decoded character.
|
||||
/// Otherwise a struct with a 'decoded' member - the decoded character, and a
|
||||
/// Otherwise a struct with a 'decoded' member - the decoded character, and a
|
||||
/// 'string errorMessage' member that is null on success and otherwise stores
|
||||
/// the error message.
|
||||
auto decodeUTF8NoGC(Flag!"validated" validated)(const(char[]) str, ref size_t index)
|
||||
|
|
|
@ -102,7 +102,7 @@ class ParserException : MarkedYAMLException
|
|||
|
||||
/// Generates events from tokens provided by a Scanner.
|
||||
///
|
||||
/// While Parser receives tokens with non-const character slices, the events it
|
||||
/// While Parser receives tokens with non-const character slices, the events it
|
||||
/// produces are immutable strings, which are usually the same slices, cast to string.
|
||||
/// Parser is the last layer of D:YAML that may possibly do any modifications to these
|
||||
/// slices.
|
||||
|
@ -110,7 +110,7 @@ final class Parser
|
|||
{
|
||||
private:
|
||||
///Default tag handle shortcuts and replacements.
|
||||
static TagDirective[] defaultTagDirectives_ =
|
||||
static TagDirective[] defaultTagDirectives_ =
|
||||
[TagDirective("!", "!"), TagDirective("!!", "tag:yaml.org,2002:")];
|
||||
|
||||
///Scanner providing YAML tokens.
|
||||
|
|
|
@ -18,14 +18,14 @@ import std.traits;
|
|||
package:
|
||||
|
||||
/// Simple queue implemented as a singly linked list with a tail pointer.
|
||||
///
|
||||
///
|
||||
/// Needed in some D:YAML code that needs a queue-like structure without too much
|
||||
/// reallocation that goes with an array.
|
||||
///
|
||||
///
|
||||
/// This should be replaced once Phobos has a decent queue/linked list.
|
||||
///
|
||||
///
|
||||
/// Uses manual allocation through malloc/free.
|
||||
///
|
||||
///
|
||||
/// Also has some features uncommon for a queue, e.g. iteration. Couldn't bother with
|
||||
/// implementing a range, as this is used only as a placeholder until Phobos gets a
|
||||
/// decent replacement.
|
||||
|
@ -88,7 +88,7 @@ struct Queue(T)
|
|||
body
|
||||
{
|
||||
const previous = cursor_;
|
||||
cursor_ = cursor_.next_;
|
||||
cursor_ = cursor_.next_;
|
||||
return previous.payload_;
|
||||
}
|
||||
|
||||
|
@ -160,7 +160,7 @@ struct Queue(T)
|
|||
}
|
||||
|
||||
/// Return the next element in the queue.
|
||||
ref inout(T) peek() @safe pure nothrow inout @nogc
|
||||
ref inout(T) peek() @safe pure nothrow inout @nogc
|
||||
in
|
||||
{
|
||||
assert(!empty, "Trying to peek at an element in an empty queue");
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* YAML node _representer. Prepares YAML nodes for output. A tutorial can be
|
||||
* found $(LINK2 ../tutorials/custom_types.html, here).
|
||||
* YAML node _representer. Prepares YAML nodes for output. A tutorial can be
|
||||
* found $(LINK2 ../tutorials/custom_types.html, here).
|
||||
*
|
||||
* Code based on $(LINK2 http://www.pyyaml.org, PyYAML).
|
||||
*/
|
||||
|
@ -60,13 +60,13 @@ final class Representer
|
|||
|
||||
/**
|
||||
* Construct a Representer.
|
||||
*
|
||||
*
|
||||
* Params: useDefaultRepresenters = Use default representer functions
|
||||
* for default YAML types? This can be
|
||||
* disabled to use custom representer
|
||||
* functions for default types.
|
||||
*/
|
||||
this(const Flag!"useDefaultRepresenters" useDefaultRepresenters = Yes.useDefaultRepresenters)
|
||||
this(const Flag!"useDefaultRepresenters" useDefaultRepresenters = Yes.useDefaultRepresenters)
|
||||
@safe pure
|
||||
{
|
||||
if(!useDefaultRepresenters){return;}
|
||||
|
@ -94,7 +94,7 @@ final class Representer
|
|||
defaultScalarStyle_ = style;
|
||||
}
|
||||
|
||||
///Set default _style for collections. If style is $(D CollectionStyle.Invalid), the _style is chosen automatically.
|
||||
///Set default _style for collections. If style is $(D CollectionStyle.Invalid), the _style is chosen automatically.
|
||||
@property void defaultCollectionStyle(CollectionStyle style) pure @safe nothrow
|
||||
{
|
||||
defaultCollectionStyle_ = style;
|
||||
|
@ -106,7 +106,6 @@ final class Representer
|
|||
* The representer function takes references to a $(D Node) storing the data
|
||||
* type and to the $(D Representer). It returns the represented node and may
|
||||
* throw a $(D RepresenterException). See the example for more information.
|
||||
*
|
||||
*
|
||||
*
|
||||
* Only one function may be specified for one data type. Default data
|
||||
|
@ -114,10 +113,10 @@ final class Representer
|
|||
* $(D Representer) constructor.
|
||||
*
|
||||
*
|
||||
* Structs and classes must implement the $(D opCmp()) operator for D:YAML
|
||||
* support. The signature of the operator that must be implemented
|
||||
* is $(D const int opCmp(ref const MyStruct s)) for structs where
|
||||
* $(I MyStruct) is the struct type, and $(D int opCmp(Object o)) for
|
||||
* Structs and classes must implement the $(D opCmp()) operator for D:YAML
|
||||
* support. The signature of the operator that must be implemented
|
||||
* is $(D const int opCmp(ref const MyStruct s)) for structs where
|
||||
* $(I MyStruct) is the struct type, and $(D int opCmp(Object o)) for
|
||||
* classes. Note that the class $(D opCmp()) should not alter the compared
|
||||
* values - it is not const for compatibility reasons.
|
||||
*
|
||||
|
@ -126,7 +125,7 @@ final class Representer
|
|||
void addRepresenter(T)(Node function(ref Node, Representer) @safe representer)
|
||||
@safe pure
|
||||
{
|
||||
assert((typeid(T) in representers_) is null,
|
||||
assert((typeid(T) in representers_) is null,
|
||||
"Representer function for data type " ~ T.stringof ~
|
||||
" already specified. Can't specify another one");
|
||||
representers_[typeid(T)] = representer;
|
||||
|
@ -223,7 +222,7 @@ final class Representer
|
|||
}
|
||||
|
||||
//If profiling shows a bottleneck on tag construction in these 3 methods,
|
||||
//we'll need to take Tag directly and have string based wrappers for
|
||||
//we'll need to take Tag directly and have string based wrappers for
|
||||
//user code.
|
||||
|
||||
/**
|
||||
|
@ -238,7 +237,7 @@ final class Representer
|
|||
*
|
||||
* Returns: The represented node.
|
||||
*/
|
||||
Node representScalar(string tag, string scalar,
|
||||
Node representScalar(string tag, string scalar,
|
||||
ScalarStyle style = ScalarStyle.Invalid) @trusted
|
||||
{
|
||||
if(style == ScalarStyle.Invalid){style = defaultScalarStyle_;}
|
||||
|
@ -291,7 +290,7 @@ final class Representer
|
|||
*
|
||||
* Throws: $(D RepresenterException) if a child could not be represented.
|
||||
*/
|
||||
Node representSequence(string tag, Node[] sequence,
|
||||
Node representSequence(string tag, Node[] sequence,
|
||||
CollectionStyle style = CollectionStyle.Invalid) @trusted
|
||||
{
|
||||
Node[] value;
|
||||
|
@ -393,7 +392,7 @@ final class Representer
|
|||
|
||||
if(style == CollectionStyle.Invalid)
|
||||
{
|
||||
style = defaultCollectionStyle_ != CollectionStyle.Invalid
|
||||
style = defaultCollectionStyle_ != CollectionStyle.Invalid
|
||||
? defaultCollectionStyle_
|
||||
: bestStyle;
|
||||
}
|
||||
|
@ -442,7 +441,7 @@ final class Representer
|
|||
auto type = data.isUserType ? data.as!YAMLObject.type : data.type;
|
||||
|
||||
enforce((type in representers_) !is null,
|
||||
new RepresenterException("No representer function for type "
|
||||
new RepresenterException("No representer function for type "
|
||||
~ type.toString() ~ " , cannot represent."));
|
||||
Node result = representers_[type](data, this);
|
||||
|
||||
|
@ -480,8 +479,8 @@ Node representNull(ref Node node, Representer representer) @safe
|
|||
Node representString(ref Node node, Representer representer) @safe
|
||||
{
|
||||
string value = node.as!string;
|
||||
return value is null
|
||||
? representNull(node, representer)
|
||||
return value is null
|
||||
? representNull(node, representer)
|
||||
: representer.representScalar("tag:yaml.org,2002:str", value);
|
||||
}
|
||||
|
||||
|
@ -498,14 +497,14 @@ Node representBytes(ref Node node, Representer representer) @safe
|
|||
///Represent a bool _node as a bool scalar.
|
||||
Node representBool(ref Node node, Representer representer) @safe
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:bool",
|
||||
return representer.representScalar("tag:yaml.org,2002:bool",
|
||||
node.as!bool ? "true" : "false");
|
||||
}
|
||||
|
||||
///Represent a long _node as an integer scalar.
|
||||
Node representLong(ref Node node, Representer representer) @safe
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:int",
|
||||
return representer.representScalar("tag:yaml.org,2002:int",
|
||||
to!string(node.as!long));
|
||||
}
|
||||
|
||||
|
@ -526,7 +525,7 @@ Node representReal(ref Node node, Representer representer) @safe
|
|||
///Represent a SysTime _node as a timestamp.
|
||||
Node representSysTime(ref Node node, Representer representer) @safe
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:timestamp",
|
||||
return representer.representScalar("tag:yaml.org,2002:timestamp",
|
||||
node.as!SysTime.toISOExtString());
|
||||
}
|
||||
|
||||
|
@ -614,7 +613,7 @@ struct MyStruct
|
|||
if(y != s.y){return y - s.y;}
|
||||
if(z != s.z){return z - s.z;}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Node representMyStruct(ref Node node, Representer representer) @safe
|
||||
|
@ -628,17 +627,17 @@ Node representMyStruct(ref Node node, Representer representer) @safe
|
|||
}
|
||||
|
||||
Node representMyStructSeq(ref Node node, Representer representer) @safe
|
||||
{
|
||||
{
|
||||
auto value = node.as!MyStruct;
|
||||
auto nodes = [Node(value.x), Node(value.y), Node(value.z)];
|
||||
return representer.representSequence("!mystruct.tag", nodes);
|
||||
}
|
||||
|
||||
Node representMyStructMap(ref Node node, Representer representer) @safe
|
||||
{
|
||||
{
|
||||
auto value = node.as!MyStruct;
|
||||
auto pairs = [Node.Pair("x", value.x),
|
||||
Node.Pair("y", value.y),
|
||||
auto pairs = [Node.Pair("x", value.x),
|
||||
Node.Pair("y", value.y),
|
||||
Node.Pair("z", value.z)];
|
||||
return representer.representMapping("!mystruct.tag", pairs);
|
||||
}
|
||||
|
@ -649,11 +648,11 @@ class MyClass
|
|||
|
||||
this(int x, int y, int z) pure @safe nothrow
|
||||
{
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.z = z;
|
||||
}
|
||||
|
||||
|
||||
override int opCmp(Object o) pure @safe nothrow
|
||||
{
|
||||
MyClass s = cast(MyClass)o;
|
||||
|
@ -686,8 +685,8 @@ import dyaml.stream;
|
|||
|
||||
@safe unittest
|
||||
{
|
||||
foreach(r; [&representMyStruct,
|
||||
&representMyStructSeq,
|
||||
foreach(r; [&representMyStruct,
|
||||
&representMyStructSeq,
|
||||
&representMyStructMap])
|
||||
{
|
||||
auto dumper = Dumper(new YMemoryStream());
|
||||
|
|
|
@ -6,10 +6,10 @@
|
|||
|
||||
/**
|
||||
* Implements a class that resolves YAML tags. This can be used to implicitly
|
||||
* resolve tags for custom data types, removing the need to explicitly
|
||||
* specify tags in YAML. A tutorial can be found
|
||||
* $(LINK2 ../tutorials/custom_types.html, here).
|
||||
*
|
||||
* resolve tags for custom data types, removing the need to explicitly
|
||||
* specify tags in YAML. A tutorial can be found
|
||||
* $(LINK2 ../tutorials/custom_types.html, here).
|
||||
*
|
||||
* Code based on $(LINK2 http://www.pyyaml.org, PyYAML).
|
||||
*/
|
||||
module dyaml.resolver;
|
||||
|
@ -30,7 +30,7 @@ import dyaml.exception;
|
|||
*
|
||||
* Can be used to implicitly resolve custom data types of scalar values.
|
||||
*/
|
||||
final class Resolver
|
||||
final class Resolver
|
||||
{
|
||||
private:
|
||||
// Default tag to use for scalars.
|
||||
|
@ -40,7 +40,7 @@ final class Resolver
|
|||
// Default tag to use for mappings.
|
||||
string defaultMappingTag_;
|
||||
|
||||
/*
|
||||
/*
|
||||
* Arrays of scalar resolver tuples indexed by starting character of a scalar.
|
||||
*
|
||||
* Each tuple stores regular expression the scalar must match,
|
||||
|
@ -60,7 +60,7 @@ final class Resolver
|
|||
*
|
||||
* Params: defaultImplicitResolvers = Use default YAML implicit resolvers?
|
||||
*/
|
||||
this(Flag!"useDefaultImplicitResolvers" defaultImplicitResolvers = Yes.useDefaultImplicitResolvers)
|
||||
this(Flag!"useDefaultImplicitResolvers" defaultImplicitResolvers = Yes.useDefaultImplicitResolvers)
|
||||
@safe
|
||||
{
|
||||
defaultScalarTag_ = "tag:yaml.org,2002:str";
|
||||
|
@ -77,11 +77,11 @@ final class Resolver
|
|||
}
|
||||
|
||||
/**
|
||||
* Add an implicit scalar resolver.
|
||||
* Add an implicit scalar resolver.
|
||||
*
|
||||
* If a scalar matches regexp and starts with any character in first,
|
||||
* If a scalar matches regexp and starts with any character in first,
|
||||
* its _tag is set to tag. If it matches more than one resolver _regexp
|
||||
* resolvers added _first override ones added later. Default resolvers
|
||||
* resolvers added _first override ones added later. Default resolvers
|
||||
* override any user specified resolvers, but they can be disabled in
|
||||
* Resolver constructor.
|
||||
*
|
||||
|
@ -93,8 +93,8 @@ final class Resolver
|
|||
* first = String of possible starting characters of the scalar.
|
||||
*
|
||||
*/
|
||||
void addImplicitResolver(string tag, Regex!char regexp, string first)
|
||||
pure @safe
|
||||
void addImplicitResolver(string tag, Regex!char regexp, string first)
|
||||
pure @safe
|
||||
{
|
||||
foreach(const dchar c; first)
|
||||
{
|
||||
|
@ -153,7 +153,7 @@ final class Resolver
|
|||
size_t dummy;
|
||||
const dchar first = value.length == 0 ? '\0' : decode(value, dummy);
|
||||
|
||||
auto resolvers = (first in yamlImplicitResolvers_) is null ?
|
||||
auto resolvers = (first in yamlImplicitResolvers_) is null ?
|
||||
[] : yamlImplicitResolvers_[first];
|
||||
|
||||
//If regexp matches, return tag.
|
||||
|
@ -187,19 +187,19 @@ final class Resolver
|
|||
return true;
|
||||
}
|
||||
|
||||
assert(tagMatch("tag:yaml.org,2002:bool",
|
||||
assert(tagMatch("tag:yaml.org,2002:bool",
|
||||
["yes", "NO", "True", "on"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:float",
|
||||
["6.8523015e+5", "685.230_15e+03", "685_230.15",
|
||||
assert(tagMatch("tag:yaml.org,2002:float",
|
||||
["6.8523015e+5", "685.230_15e+03", "685_230.15",
|
||||
"190:20:30.15", "-.inf", ".NaN"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:int",
|
||||
assert(tagMatch("tag:yaml.org,2002:int",
|
||||
["685230", "+685_230", "02472256", "0x_0A_74_AE",
|
||||
"0b1010_0111_0100_1010_1110", "190:20:30"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:merge", ["<<"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:null", ["~", "null", ""]));
|
||||
assert(tagMatch("tag:yaml.org,2002:str",
|
||||
assert(tagMatch("tag:yaml.org,2002:str",
|
||||
["abcd", "9a8b", "9.1adsf"]));
|
||||
assert(tagMatch("tag:yaml.org,2002:timestamp",
|
||||
assert(tagMatch("tag:yaml.org,2002:timestamp",
|
||||
["2001-12-15T02:59:43.1Z",
|
||||
"2001-12-14t21:59:43.10-05:00",
|
||||
"2001-12-14 21:59:43.10 -5",
|
||||
|
@ -242,15 +242,15 @@ final class Resolver
|
|||
"|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$"),
|
||||
"-+0123456789");
|
||||
addImplicitResolver("tag:yaml.org,2002:merge", regex(r"^<<$"), "<");
|
||||
addImplicitResolver("tag:yaml.org,2002:null",
|
||||
addImplicitResolver("tag:yaml.org,2002:null",
|
||||
regex(r"^$|^(?:~|null|Null|NULL)$"), "~nN\0");
|
||||
addImplicitResolver("tag:yaml.org,2002:timestamp",
|
||||
addImplicitResolver("tag:yaml.org,2002:timestamp",
|
||||
regex(r"^[0-9][0-9][0-9][0-9]-[0-9][0-9]-" ~
|
||||
"[0-9][0-9]|[0-9][0-9][0-9][0-9]-[0-9]" ~
|
||||
"[0-9]?-[0-9][0-9]?[Tt]|[ \t]+[0-9]" ~
|
||||
"[0-9]?:[0-9][0-9]:[0-9][0-9]" ~
|
||||
"(?:\\.[0-9]*)?(?:[ \t]*Z|[-+][0-9]" ~
|
||||
"[0-9]?(?::[0-9][0-9])?)?$"),
|
||||
"[0-9]?(?::[0-9][0-9])?)?$"),
|
||||
"0123456789");
|
||||
addImplicitResolver("tag:yaml.org,2002:value", regex(r"^=$"), "=");
|
||||
|
||||
|
|
|
@ -59,16 +59,16 @@ struct Serializer
|
|||
* Construct a Serializer.
|
||||
*
|
||||
* Params: emitter = Emitter to emit events produced.
|
||||
* resolver = Resolver used to determine which tags are automaticaly resolvable.
|
||||
* resolver = Resolver used to determine which tags are automaticaly resolvable.
|
||||
* encoding = Character encoding to use.
|
||||
* explicitStart = Do all document starts have to be specified explicitly?
|
||||
* explicitEnd = Do all document ends have to be specified explicitly?
|
||||
* YAMLVersion = YAML version string.
|
||||
* tagDirectives = Tag directives to emit.
|
||||
* explicitStart = Do all document starts have to be specified explicitly?
|
||||
* explicitEnd = Do all document ends have to be specified explicitly?
|
||||
* YAMLVersion = YAML version string.
|
||||
* tagDirectives = Tag directives to emit.
|
||||
*/
|
||||
this(ref Emitter emitter, Resolver resolver, Encoding encoding,
|
||||
const Flag!"explicitStart" explicitStart,
|
||||
const Flag!"explicitEnd" explicitEnd, string YAMLVersion,
|
||||
const Flag!"explicitStart" explicitStart,
|
||||
const Flag!"explicitEnd" explicitEnd, string YAMLVersion,
|
||||
TagDirective[] tagDirectives) @trusted
|
||||
{
|
||||
emitter_ = &emitter;
|
||||
|
@ -96,7 +96,7 @@ struct Serializer
|
|||
///Serialize a node, emitting it in the process.
|
||||
void serialize(ref Node node) @safe
|
||||
{
|
||||
emitter_.emit(documentStartEvent(Mark(), Mark(), explicitStart_,
|
||||
emitter_.emit(documentStartEvent(Mark(), Mark(), explicitStart_,
|
||||
YAMLVersion_, tagDirectives_));
|
||||
anchorNode(node);
|
||||
serializeNode(node);
|
||||
|
@ -112,7 +112,7 @@ struct Serializer
|
|||
/**
|
||||
* Determine if it's a good idea to add an anchor to a node.
|
||||
*
|
||||
* Used to prevent associating every single repeating scalar with an
|
||||
* Used to prevent associating every single repeating scalar with an
|
||||
* anchor/alias - only nodes long enough can use anchors.
|
||||
*
|
||||
* Params: node = Node to check for anchorability.
|
||||
|
@ -168,8 +168,8 @@ struct Serializer
|
|||
///Serialize a node and all its subnodes.
|
||||
void serializeNode(ref Node node) @safe
|
||||
{
|
||||
//If the node has an anchor, emit an anchor (as aliasEvent) on the
|
||||
//first occurrence, save it in serializedNodes_, and emit an alias
|
||||
//If the node has an anchor, emit an anchor (as aliasEvent) on the
|
||||
//first occurrence, save it in serializedNodes_, and emit an alias
|
||||
//if it reappears.
|
||||
string aliased = null;
|
||||
if(anchorable(node) && (node in anchors_) !is null)
|
||||
|
@ -211,12 +211,12 @@ struct Serializer
|
|||
}
|
||||
if(node.isMapping)
|
||||
{
|
||||
const defaultTag = resolver_.defaultMappingTag;
|
||||
const defaultTag = resolver_.defaultMappingTag;
|
||||
const implicit = node.tag_ == defaultTag;
|
||||
emitter_.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag_,
|
||||
implicit, node.collectionStyle));
|
||||
foreach(ref Node key, ref Node value; node)
|
||||
{
|
||||
{
|
||||
serializeNode(key);
|
||||
serializeNode(value);
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ enum ScalarStyle : ubyte
|
|||
///Collection styles.
|
||||
enum CollectionStyle : ubyte
|
||||
{
|
||||
Invalid = 0, /// Invalid (uninitialized) style
|
||||
Invalid = 0, /// Invalid (uninitialized) style
|
||||
Block, /// Block style.
|
||||
Flow /// Flow style.
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue