Merge pull request #170 from BBasile/apply-dscanner
partially address D-Scanner warnings in the whole project merged-on-behalf-of: BBasile <BBasile@users.noreply.github.com>
This commit is contained in:
commit
53f5d9c799
|
@ -129,7 +129,7 @@ final class Constructor
|
||||||
void addConstructorScalar(T)(const string tag, T function(ref Node) @safe ctor)
|
void addConstructorScalar(T)(const string tag, T function(ref Node) @safe ctor)
|
||||||
{
|
{
|
||||||
const t = tag;
|
const t = tag;
|
||||||
auto deleg = addConstructor!T(t, ctor);
|
const deleg = addConstructor!T(t, ctor);
|
||||||
(*delegates!string)[t] = deleg;
|
(*delegates!string)[t] = deleg;
|
||||||
}
|
}
|
||||||
///
|
///
|
||||||
|
@ -141,7 +141,7 @@ final class Constructor
|
||||||
|
|
||||||
//Any D:YAML type must have a custom opCmp operator.
|
//Any D:YAML type must have a custom opCmp operator.
|
||||||
//This is used for ordering in mappings.
|
//This is used for ordering in mappings.
|
||||||
const int opCmp(ref const MyStruct s)
|
int opCmp(ref const MyStruct s) const
|
||||||
{
|
{
|
||||||
if(x != s.x){return x - s.x;}
|
if(x != s.x){return x - s.x;}
|
||||||
if(y != s.y){return y - s.y;}
|
if(y != s.y){return y - s.y;}
|
||||||
|
@ -175,7 +175,7 @@ final class Constructor
|
||||||
void addConstructorSequence(T)(const string tag, T function(ref Node) @safe ctor)
|
void addConstructorSequence(T)(const string tag, T function(ref Node) @safe ctor)
|
||||||
{
|
{
|
||||||
const t = tag;
|
const t = tag;
|
||||||
auto deleg = addConstructor!T(t, ctor);
|
const deleg = addConstructor!T(t, ctor);
|
||||||
(*delegates!(Node[]))[t] = deleg;
|
(*delegates!(Node[]))[t] = deleg;
|
||||||
}
|
}
|
||||||
///
|
///
|
||||||
|
@ -187,7 +187,7 @@ final class Constructor
|
||||||
|
|
||||||
//Any D:YAML type must have a custom opCmp operator.
|
//Any D:YAML type must have a custom opCmp operator.
|
||||||
//This is used for ordering in mappings.
|
//This is used for ordering in mappings.
|
||||||
const int opCmp(ref const MyStruct s)
|
int opCmp(ref const MyStruct s) const
|
||||||
{
|
{
|
||||||
if(x != s.x){return x - s.x;}
|
if(x != s.x){return x - s.x;}
|
||||||
if(y != s.y){return y - s.y;}
|
if(y != s.y){return y - s.y;}
|
||||||
|
@ -219,7 +219,7 @@ final class Constructor
|
||||||
void addConstructorMapping(T)(const string tag, T function(ref Node) @safe ctor)
|
void addConstructorMapping(T)(const string tag, T function(ref Node) @safe ctor)
|
||||||
{
|
{
|
||||||
const t = tag;
|
const t = tag;
|
||||||
auto deleg = addConstructor!T(t, ctor);
|
const deleg = addConstructor!T(t, ctor);
|
||||||
(*delegates!(Node.Pair[]))[t] = deleg;
|
(*delegates!(Node.Pair[]))[t] = deleg;
|
||||||
}
|
}
|
||||||
///
|
///
|
||||||
|
@ -230,7 +230,7 @@ final class Constructor
|
||||||
|
|
||||||
//Any D:YAML type must have a custom opCmp operator.
|
//Any D:YAML type must have a custom opCmp operator.
|
||||||
//This is used for ordering in mappings.
|
//This is used for ordering in mappings.
|
||||||
const int opCmp(ref const MyStruct s)
|
int opCmp(ref const MyStruct s) const
|
||||||
{
|
{
|
||||||
if(x != s.x){return x - s.x;}
|
if(x != s.x){return x - s.x;}
|
||||||
if(y != s.y){return y - s.y;}
|
if(y != s.y){return y - s.y;}
|
||||||
|
@ -484,7 +484,7 @@ long constructLong(ref Node node) @safe
|
||||||
//Sexagesimal.
|
//Sexagesimal.
|
||||||
else if(value.canFind(":"))
|
else if(value.canFind(":"))
|
||||||
{
|
{
|
||||||
long val = 0;
|
long val;
|
||||||
long base = 1;
|
long base = 1;
|
||||||
foreach_reverse(digit; value.split(":"))
|
foreach_reverse(digit; value.split(":"))
|
||||||
{
|
{
|
||||||
|
@ -622,7 +622,7 @@ ubyte[] constructBinary(ref Node node) @safe
|
||||||
buffer.length = 256;
|
buffer.length = 256;
|
||||||
string input = Base64.encode(test, buffer).idup;
|
string input = Base64.encode(test, buffer).idup;
|
||||||
auto node = Node(input);
|
auto node = Node(input);
|
||||||
auto value = constructBinary(node);
|
const value = constructBinary(node);
|
||||||
assert(value == test);
|
assert(value == test);
|
||||||
assert(value == [84, 104, 101, 32, 65, 110, 115, 119, 101, 114, 58, 32, 52, 50]);
|
assert(value == [84, 104, 101, 32, 65, 110, 115, 119, 101, 114, 58, 32, 52, 50]);
|
||||||
}
|
}
|
||||||
|
@ -661,7 +661,7 @@ SysTime constructTimestamp(ref Node node) @safe
|
||||||
const hour = to!int(captures[1]);
|
const hour = to!int(captures[1]);
|
||||||
const minute = to!int(captures[2]);
|
const minute = to!int(captures[2]);
|
||||||
const second = to!int(captures[3]);
|
const second = to!int(captures[3]);
|
||||||
const hectonanosecond = cast(int)(to!real("0" ~ captures[4]) * 10000000);
|
const hectonanosecond = cast(int)(to!real("0" ~ captures[4]) * 10_000_000);
|
||||||
|
|
||||||
// If available, get timezone.
|
// If available, get timezone.
|
||||||
value = matches.front.post;
|
value = matches.front.post;
|
||||||
|
@ -676,7 +676,7 @@ SysTime constructTimestamp(ref Node node) @safe
|
||||||
// We have a timezone, so parse it.
|
// We have a timezone, so parse it.
|
||||||
captures = matches.front.captures;
|
captures = matches.front.captures;
|
||||||
int sign = 1;
|
int sign = 1;
|
||||||
int tzHours = 0;
|
int tzHours;
|
||||||
if(!captures[1].empty)
|
if(!captures[1].empty)
|
||||||
{
|
{
|
||||||
if(captures[1][0] == '-') {sign = -1;}
|
if(captures[1][0] == '-') {sign = -1;}
|
||||||
|
|
|
@ -47,7 +47,7 @@ struct Dumper
|
||||||
//Stream to write to.
|
//Stream to write to.
|
||||||
YStream stream_;
|
YStream stream_;
|
||||||
//True if this Dumper owns stream_ and needs to destroy it in the destructor.
|
//True if this Dumper owns stream_ and needs to destroy it in the destructor.
|
||||||
bool weOwnStream_ = false;
|
bool weOwnStream_;
|
||||||
|
|
||||||
//Write scalars in canonical form?
|
//Write scalars in canonical form?
|
||||||
bool canonical_;
|
bool canonical_;
|
||||||
|
@ -62,7 +62,7 @@ struct Dumper
|
||||||
//YAML version string.
|
//YAML version string.
|
||||||
string YAMLVersion_ = "1.1";
|
string YAMLVersion_ = "1.1";
|
||||||
//Tag directives to use.
|
//Tag directives to use.
|
||||||
TagDirective[] tags_ = null;
|
TagDirective[] tags_;
|
||||||
//Always write document start?
|
//Always write document start?
|
||||||
Flag!"explicitStart" explicitStart_ = No.explicitStart;
|
Flag!"explicitStart" explicitStart_ = No.explicitStart;
|
||||||
//Always write document end?
|
//Always write document end?
|
||||||
|
|
|
@ -260,7 +260,7 @@ struct Emitter
|
||||||
///Determines if we need specified number of more events.
|
///Determines if we need specified number of more events.
|
||||||
bool needEvents(in uint count) @safe nothrow
|
bool needEvents(in uint count) @safe nothrow
|
||||||
{
|
{
|
||||||
int level = 0;
|
int level;
|
||||||
|
|
||||||
//Rather ugly, but good enough for now.
|
//Rather ugly, but good enough for now.
|
||||||
//Couldn't be bothered writing a range as events_ should eventually
|
//Couldn't be bothered writing a range as events_ should eventually
|
||||||
|
@ -698,7 +698,7 @@ struct Emitter
|
||||||
///Check if a simple key is next.
|
///Check if a simple key is next.
|
||||||
bool checkSimpleKey() @safe
|
bool checkSimpleKey() @safe
|
||||||
{
|
{
|
||||||
uint length = 0;
|
uint length;
|
||||||
const id = event_.id;
|
const id = event_.id;
|
||||||
const scalar = id == EventID.Scalar;
|
const scalar = id == EventID.Scalar;
|
||||||
const collectionStart = id == EventID.MappingStart ||
|
const collectionStart = id == EventID.MappingStart ||
|
||||||
|
@ -904,9 +904,8 @@ struct Emitter
|
||||||
new EmitterException("Tag prefix must not be empty"));
|
new EmitterException("Tag prefix must not be empty"));
|
||||||
|
|
||||||
auto appender = appender!string();
|
auto appender = appender!string();
|
||||||
const offset = prefix[0] == '!' ? 1 : 0;
|
const int offset = prefix[0] == '!';
|
||||||
size_t start = 0;
|
size_t start, end;
|
||||||
size_t end = 0;
|
|
||||||
|
|
||||||
foreach(const size_t i, const dchar c; prefix)
|
foreach(const size_t i, const dchar c; prefix)
|
||||||
{
|
{
|
||||||
|
@ -935,7 +934,7 @@ struct Emitter
|
||||||
|
|
||||||
string tagString = tag;
|
string tagString = tag;
|
||||||
if(tagString == "!"){return tagString;}
|
if(tagString == "!"){return tagString;}
|
||||||
string handle = null;
|
string handle;
|
||||||
string suffix = tagString;
|
string suffix = tagString;
|
||||||
|
|
||||||
//Sort lexicographically by prefix.
|
//Sort lexicographically by prefix.
|
||||||
|
@ -953,8 +952,7 @@ struct Emitter
|
||||||
|
|
||||||
auto appender = appender!string();
|
auto appender = appender!string();
|
||||||
appender.put(handle !is null && handle != "" ? handle : "!<");
|
appender.put(handle !is null && handle != "" ? handle : "!<");
|
||||||
size_t start = 0;
|
size_t start, end;
|
||||||
size_t end = 0;
|
|
||||||
foreach(const dchar c; suffix)
|
foreach(const dchar c; suffix)
|
||||||
{
|
{
|
||||||
if(isAlphaNum(c) || "-;/?:@&=+$,_.~*\'()[]"d.canFind(c) ||
|
if(isAlphaNum(c) || "-;/?:@&=+$,_.~*\'()[]"d.canFind(c) ||
|
||||||
|
@ -1581,8 +1579,9 @@ struct ScalarWriter
|
||||||
///Determine hints (indicators) for block scalar.
|
///Determine hints (indicators) for block scalar.
|
||||||
size_t determineBlockHints(char[] hints, uint bestIndent) const pure @safe
|
size_t determineBlockHints(char[] hints, uint bestIndent) const pure @safe
|
||||||
{
|
{
|
||||||
size_t hintsIdx = 0;
|
size_t hintsIdx;
|
||||||
if(text_.length == 0){return hintsIdx;}
|
if(text_.length == 0)
|
||||||
|
return hintsIdx;
|
||||||
|
|
||||||
dchar lastChar(const string str, ref size_t end)
|
dchar lastChar(const string str, ref size_t end)
|
||||||
{
|
{
|
||||||
|
|
|
@ -46,7 +46,7 @@ struct Loader
|
||||||
// Name of the input file or stream, used in error messages.
|
// Name of the input file or stream, used in error messages.
|
||||||
string name_ = "<unknown>";
|
string name_ = "<unknown>";
|
||||||
// Are we done loading?
|
// Are we done loading?
|
||||||
bool done_ = false;
|
bool done_;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@disable this();
|
@disable this();
|
||||||
|
@ -255,7 +255,7 @@ struct Loader
|
||||||
lazyInitConstructorResolver();
|
lazyInitConstructorResolver();
|
||||||
auto composer = new Composer(parser_, resolver_, constructor_);
|
auto composer = new Composer(parser_, resolver_, constructor_);
|
||||||
|
|
||||||
int result = 0;
|
int result;
|
||||||
while(composer.checkNode())
|
while(composer.checkNode())
|
||||||
{
|
{
|
||||||
auto node = composer.getNode();
|
auto node = composer.getNode();
|
||||||
|
|
|
@ -67,7 +67,7 @@ package abstract class YAMLObject
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
// Compare with another YAMLObject.
|
// Compare with another YAMLObject.
|
||||||
int cmp(const YAMLObject rhs) const @system {assert(false);};
|
int cmp(const YAMLObject) const @system {assert(false);}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stores a user defined YAML data type.
|
// Stores a user defined YAML data type.
|
||||||
|
@ -967,7 +967,7 @@ struct Node
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Input range functionality. */
|
/* Input range functionality. */
|
||||||
bool empty() @property { return position >= subnodes.length; }
|
bool empty() const @property { return position >= subnodes.length; }
|
||||||
|
|
||||||
void popFront()
|
void popFront()
|
||||||
{
|
{
|
||||||
|
@ -975,7 +975,7 @@ struct Node
|
||||||
position++;
|
position++;
|
||||||
}
|
}
|
||||||
|
|
||||||
T front() @property
|
T front() const @property
|
||||||
{
|
{
|
||||||
enforce(!empty, "Attempted to take the front of an empty sequence");
|
enforce(!empty, "Attempted to take the front of an empty sequence");
|
||||||
static if (is(Unqual!T == Node))
|
static if (is(Unqual!T == Node))
|
||||||
|
@ -1226,7 +1226,7 @@ struct Node
|
||||||
new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node",
|
new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node",
|
||||||
startMark_));
|
startMark_));
|
||||||
|
|
||||||
int result = 0;
|
int result;
|
||||||
foreach(ref node; get!(Node[]))
|
foreach(ref node; get!(Node[]))
|
||||||
{
|
{
|
||||||
static if(is(Unqual!T == Node))
|
static if(is(Unqual!T == Node))
|
||||||
|
@ -1249,7 +1249,7 @@ struct Node
|
||||||
new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node",
|
new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node",
|
||||||
startMark_));
|
startMark_));
|
||||||
|
|
||||||
int result = 0;
|
int result;
|
||||||
foreach(ref node; get!(Node[]))
|
foreach(ref node; get!(Node[]))
|
||||||
{
|
{
|
||||||
static if(is(Unqual!T == Node))
|
static if(is(Unqual!T == Node))
|
||||||
|
@ -1344,7 +1344,7 @@ struct Node
|
||||||
new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node",
|
new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node",
|
||||||
startMark_));
|
startMark_));
|
||||||
|
|
||||||
int result = 0;
|
int result;
|
||||||
foreach(ref pair; get!(Node.Pair[]))
|
foreach(ref pair; get!(Node.Pair[]))
|
||||||
{
|
{
|
||||||
static if(is(Unqual!K == Node) && is(Unqual!V == Node))
|
static if(is(Unqual!K == Node) && is(Unqual!V == Node))
|
||||||
|
@ -1379,7 +1379,7 @@ struct Node
|
||||||
new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node",
|
new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node",
|
||||||
startMark_));
|
startMark_));
|
||||||
|
|
||||||
int result = 0;
|
int result;
|
||||||
foreach(ref pair; get!(Node.Pair[]))
|
foreach(ref pair; get!(Node.Pair[]))
|
||||||
{
|
{
|
||||||
static if(is(Unqual!K == Node) && is(Unqual!V == Node))
|
static if(is(Unqual!K == Node) && is(Unqual!V == Node))
|
||||||
|
@ -2170,7 +2170,7 @@ struct Node
|
||||||
else {node = &pair.value;}
|
else {node = &pair.value;}
|
||||||
|
|
||||||
|
|
||||||
bool typeMatch = (isFloatingPoint!T && (node.isInt || node.isFloat)) ||
|
const bool typeMatch = (isFloatingPoint!T && (node.isInt || node.isFloat)) ||
|
||||||
(isIntegral!T && node.isInt) ||
|
(isIntegral!T && node.isInt) ||
|
||||||
(is(Unqual!T==bool) && node.isBool) ||
|
(is(Unqual!T==bool) && node.isBool) ||
|
||||||
(isSomeString!T && node.isString) ||
|
(isSomeString!T && node.isString) ||
|
||||||
|
|
|
@ -379,7 +379,7 @@ final class Parser
|
||||||
//Add any default tag handles that haven't been overridden.
|
//Add any default tag handles that haven't been overridden.
|
||||||
foreach(ref defaultPair; defaultTagDirectives_)
|
foreach(ref defaultPair; defaultTagDirectives_)
|
||||||
{
|
{
|
||||||
bool found = false;
|
bool found;
|
||||||
foreach(ref pair; tagDirectives_) if(defaultPair.handle == pair.handle)
|
foreach(ref pair; tagDirectives_) if(defaultPair.handle == pair.handle)
|
||||||
{
|
{
|
||||||
found = true;
|
found = true;
|
||||||
|
@ -422,8 +422,8 @@ final class Parser
|
||||||
cast(string)token.value);
|
cast(string)token.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
string anchor = null;
|
string anchor;
|
||||||
string tag = null;
|
string tag;
|
||||||
Mark startMark, endMark, tagMark;
|
Mark startMark, endMark, tagMark;
|
||||||
bool invalidMarks = true;
|
bool invalidMarks = true;
|
||||||
// The index in the tag string where tag handle ends and tag suffix starts.
|
// The index in the tag string where tag handle ends and tag suffix starts.
|
||||||
|
@ -536,7 +536,7 @@ final class Parser
|
||||||
string handleDoubleQuotedScalarEscapes(char[] tokenValue) const @safe
|
string handleDoubleQuotedScalarEscapes(char[] tokenValue) const @safe
|
||||||
{
|
{
|
||||||
string notInPlace;
|
string notInPlace;
|
||||||
bool inEscape = false;
|
bool inEscape;
|
||||||
auto appender = appender!(string)();
|
auto appender = appender!(string)();
|
||||||
for(char[] oldValue = tokenValue; !oldValue.empty();)
|
for(char[] oldValue = tokenValue; !oldValue.empty();)
|
||||||
{
|
{
|
||||||
|
@ -624,7 +624,7 @@ final class Parser
|
||||||
|
|
||||||
if(handle.length > 0)
|
if(handle.length > 0)
|
||||||
{
|
{
|
||||||
string replacement = null;
|
string replacement;
|
||||||
foreach(ref pair; tagDirectives_)
|
foreach(ref pair; tagDirectives_)
|
||||||
{
|
{
|
||||||
if(pair.handle == handle)
|
if(pair.handle == handle)
|
||||||
|
|
|
@ -47,15 +47,15 @@ final class Reader
|
||||||
{
|
{
|
||||||
private:
|
private:
|
||||||
// Buffer of currently loaded characters.
|
// Buffer of currently loaded characters.
|
||||||
char[] buffer_ = null;
|
char[] buffer_;
|
||||||
|
|
||||||
// Current position within buffer. Only data after this position can be read.
|
// Current position within buffer. Only data after this position can be read.
|
||||||
size_t bufferOffset_ = 0;
|
size_t bufferOffset_;
|
||||||
|
|
||||||
// Index of the current character in the buffer.
|
// Index of the current character in the buffer.
|
||||||
size_t charIndex_ = 0;
|
size_t charIndex_;
|
||||||
// Number of characters (code points) in buffer_.
|
// Number of characters (code points) in buffer_.
|
||||||
size_t characterCount_ = 0;
|
size_t characterCount_;
|
||||||
|
|
||||||
// Current line in file.
|
// Current line in file.
|
||||||
uint line_;
|
uint line_;
|
||||||
|
@ -74,13 +74,13 @@ final class Reader
|
||||||
// The number of consecutive ASCII characters starting at bufferOffset_.
|
// The number of consecutive ASCII characters starting at bufferOffset_.
|
||||||
//
|
//
|
||||||
// Used to minimize UTF-8 decoding.
|
// Used to minimize UTF-8 decoding.
|
||||||
size_t upcomingASCII_ = 0;
|
size_t upcomingASCII_;
|
||||||
|
|
||||||
// Index to buffer_ where the last decoded character starts.
|
// Index to buffer_ where the last decoded character starts.
|
||||||
size_t lastDecodedBufferOffset_ = 0;
|
size_t lastDecodedBufferOffset_;
|
||||||
// Offset, relative to charIndex_, of the last decoded character,
|
// Offset, relative to charIndex_, of the last decoded character,
|
||||||
// in code points, not chars.
|
// in code points, not chars.
|
||||||
size_t lastDecodedCharOffset_ = 0;
|
size_t lastDecodedCharOffset_;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/// Construct a Reader.
|
/// Construct a Reader.
|
||||||
|
@ -464,7 +464,7 @@ private:
|
||||||
// Very few levels as we don't want arbitrarily nested transactions.
|
// Very few levels as we don't want arbitrarily nested transactions.
|
||||||
size_t[4] endStack_;
|
size_t[4] endStack_;
|
||||||
// The number of elements currently in endStack_.
|
// The number of elements currently in endStack_.
|
||||||
size_t endStackUsed_ = 0;
|
size_t endStackUsed_;
|
||||||
|
|
||||||
@safe const pure nothrow @nogc invariant()
|
@safe const pure nothrow @nogc invariant()
|
||||||
{
|
{
|
||||||
|
@ -616,7 +616,7 @@ public:
|
||||||
{
|
{
|
||||||
private:
|
private:
|
||||||
// The slice builder affected by the transaction.
|
// The slice builder affected by the transaction.
|
||||||
SliceBuilder* builder_ = null;
|
SliceBuilder* builder_;
|
||||||
// Index of the return point of the transaction in StringBuilder.endStack_.
|
// Index of the return point of the transaction in StringBuilder.endStack_.
|
||||||
size_t stackLevel_;
|
size_t stackLevel_;
|
||||||
// True after commit() has been called.
|
// True after commit() has been called.
|
||||||
|
@ -842,7 +842,7 @@ bool isPrintableValidUTF8(const char[] chars) @safe pure
|
||||||
false, false, false, false, false, false, false, false,
|
false, false, false, false, false, false, false, false,
|
||||||
false, false, false, false, false, false, false, false];
|
false, false, false, false, false, false, false, false];
|
||||||
|
|
||||||
for(size_t index = 0; index < chars.length;)
|
for(size_t index; index < chars.length;)
|
||||||
{
|
{
|
||||||
// Fast path for ASCII.
|
// Fast path for ASCII.
|
||||||
// Both this while() block and the if() block below it are optimized, unrolled
|
// Both this while() block and the if() block below it are optimized, unrolled
|
||||||
|
|
|
@ -1792,7 +1792,7 @@ final class Scanner
|
||||||
spacesTransaction = Transaction(&reader_.sliceBuilder);
|
spacesTransaction = Transaction(&reader_.sliceBuilder);
|
||||||
|
|
||||||
const startLength = reader_.sliceBuilder.length;
|
const startLength = reader_.sliceBuilder.length;
|
||||||
scanPlainSpacesToSlice(startMark);
|
scanPlainSpacesToSlice();
|
||||||
if(startLength == reader_.sliceBuilder.length ||
|
if(startLength == reader_.sliceBuilder.length ||
|
||||||
(flowLevel_ == 0 && reader_.column < indent))
|
(flowLevel_ == 0 && reader_.column < indent))
|
||||||
{
|
{
|
||||||
|
@ -1810,7 +1810,7 @@ final class Scanner
|
||||||
///
|
///
|
||||||
/// Assumes that the caller is building a slice in Reader, and puts the spaces
|
/// Assumes that the caller is building a slice in Reader, and puts the spaces
|
||||||
/// into that slice.
|
/// into that slice.
|
||||||
void scanPlainSpacesToSlice(const Mark startMark) @safe
|
void scanPlainSpacesToSlice() @safe
|
||||||
{
|
{
|
||||||
// The specification is really confusing about tabs in plain scalars.
|
// The specification is really confusing about tabs in plain scalars.
|
||||||
// We just forbid them completely. Do not use tabs in YAML!
|
// We just forbid them completely. Do not use tabs in YAML!
|
||||||
|
|
|
@ -165,7 +165,7 @@ struct Serializer
|
||||||
//If the node has an anchor, emit an anchor (as aliasEvent) on the
|
//If the node has an anchor, emit an anchor (as aliasEvent) on the
|
||||||
//first occurrence, save it in serializedNodes_, and emit an alias
|
//first occurrence, save it in serializedNodes_, and emit an alias
|
||||||
//if it reappears.
|
//if it reappears.
|
||||||
string aliased = null;
|
string aliased;
|
||||||
if(anchorable(node) && (node in anchors_) !is null)
|
if(anchorable(node) && (node in anchors_) !is null)
|
||||||
{
|
{
|
||||||
aliased = anchors_[node];
|
aliased = anchors_[node];
|
||||||
|
@ -182,7 +182,7 @@ struct Serializer
|
||||||
assert(node.isType!string, "Scalar node type must be string before serialized");
|
assert(node.isType!string, "Scalar node type must be string before serialized");
|
||||||
auto value = node.as!string;
|
auto value = node.as!string;
|
||||||
const detectedTag = resolver_.resolve(NodeID.Scalar, null, value, true);
|
const detectedTag = resolver_.resolve(NodeID.Scalar, null, value, true);
|
||||||
bool isDetected = node.tag_ == detectedTag;
|
const bool isDetected = node.tag_ == detectedTag;
|
||||||
|
|
||||||
emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_,
|
emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_,
|
||||||
isDetected, value, node.scalarStyle));
|
isDetected, value, node.scalarStyle));
|
||||||
|
|
|
@ -198,8 +198,7 @@ void display(Result[] results) @safe
|
||||||
{
|
{
|
||||||
if(results.length > 0 && !verbose && !quiet){write("\n");}
|
if(results.length > 0 && !verbose && !quiet){write("\n");}
|
||||||
|
|
||||||
size_t failures = 0;
|
size_t failures, errors;
|
||||||
size_t errors = 0;
|
|
||||||
|
|
||||||
static if(verbose)
|
static if(verbose)
|
||||||
{
|
{
|
||||||
|
|
|
@ -353,7 +353,7 @@ struct TestStruct
|
||||||
|
|
||||||
//Any D:YAML type must have a custom opCmp operator.
|
//Any D:YAML type must have a custom opCmp operator.
|
||||||
//This is used for ordering in mappings.
|
//This is used for ordering in mappings.
|
||||||
const int opCmp(ref const TestStruct s) @safe
|
int opCmp(ref const TestStruct s) const @safe
|
||||||
{
|
{
|
||||||
return value - s.value;
|
return value - s.value;
|
||||||
}
|
}
|
||||||
|
@ -414,7 +414,7 @@ void testConstructor(string dataFilename, string codeDummy) @safe
|
||||||
Node[] exp = expected[base];
|
Node[] exp = expected[base];
|
||||||
|
|
||||||
//Compare with expected results document by document.
|
//Compare with expected results document by document.
|
||||||
size_t i = 0;
|
size_t i;
|
||||||
foreach(node; loader)
|
foreach(node; loader)
|
||||||
{
|
{
|
||||||
if(!node.equals!(No.useTag)(exp[i]))
|
if(!node.equals!(No.useTag)(exp[i]))
|
||||||
|
|
|
@ -32,7 +32,7 @@ bool compareEvents(Event[] events1, Event[] events2) @safe
|
||||||
{
|
{
|
||||||
if(events1.length != events2.length){return false;}
|
if(events1.length != events2.length){return false;}
|
||||||
|
|
||||||
for(uint e = 0; e < events1.length; ++e)
|
for(uint e; e < events1.length; ++e)
|
||||||
{
|
{
|
||||||
auto e1 = events1[e];
|
auto e1 = events1[e];
|
||||||
auto e2 = events2[e];
|
auto e2 = events2[e];
|
||||||
|
|
Loading…
Reference in a new issue