Const correctness and minor bugfixes all over the code.
This commit is contained in:
parent
8208e817de
commit
f95f0d14c8
|
@ -135,9 +135,7 @@ final class Composer
|
||||||
//Drop the DOCUMENT-END event.
|
//Drop the DOCUMENT-END event.
|
||||||
parser_.getEvent();
|
parser_.getEvent();
|
||||||
|
|
||||||
//Clear anchors.
|
clear(anchors_);
|
||||||
Node[Anchor] empty;
|
|
||||||
anchors_ = empty;
|
|
||||||
return node;
|
return node;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -157,6 +157,8 @@ struct Dumper
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@disable this();
|
@disable this();
|
||||||
|
@disable bool opEquals(ref Dumper);
|
||||||
|
@disable int opCmp(ref Dumper);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a Dumper writing to a file.
|
* Construct a Dumper writing to a file.
|
||||||
|
@ -165,7 +167,7 @@ struct Dumper
|
||||||
*
|
*
|
||||||
* Throws: YAMLException if the file can not be dumped to (e.g. cannot be opened).
|
* Throws: YAMLException if the file can not be dumped to (e.g. cannot be opened).
|
||||||
*/
|
*/
|
||||||
this(string filename)
|
this(in string filename)
|
||||||
{
|
{
|
||||||
name_ = filename;
|
name_ = filename;
|
||||||
try{this(new File(filename, FileMode.OutNew));}
|
try{this(new File(filename, FileMode.OutNew));}
|
||||||
|
@ -195,33 +197,33 @@ struct Dumper
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set stream _name. Used in debugging messages.
|
///Set stream _name. Used in debugging messages.
|
||||||
@property void name(string name)
|
@property void name(in string name)
|
||||||
{
|
{
|
||||||
name_ = name;
|
name_ = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Specify custom Resolver to use.
|
///Specify custom Resolver to use.
|
||||||
void resolver(Resolver resolver)
|
@property void resolver(Resolver resolver)
|
||||||
{
|
{
|
||||||
clear(resolver_);
|
clear(resolver_);
|
||||||
resolver_ = resolver;
|
resolver_ = resolver;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Specify custom Representer to use.
|
///Specify custom Representer to use.
|
||||||
void representer(Representer representer)
|
@property void representer(Representer representer)
|
||||||
{
|
{
|
||||||
clear(representer_);
|
clear(representer_);
|
||||||
representer_ = representer;
|
representer_ = representer;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write scalars in _canonical form?
|
///Write scalars in _canonical form?
|
||||||
void canonical(in bool canonical)
|
@property void canonical(in bool canonical)
|
||||||
{
|
{
|
||||||
canonical_ = canonical;
|
canonical_ = canonical;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set indentation width. 2 by default. Must not be zero.
|
///Set indentation width. 2 by default. Must not be zero.
|
||||||
void indent(in uint indent)
|
@property void indent(in uint indent)
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(indent != 0, "Can't use zero YAML indent width");
|
assert(indent != 0, "Can't use zero YAML indent width");
|
||||||
|
@ -232,37 +234,37 @@ struct Dumper
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set preferred text _width.
|
///Set preferred text _width.
|
||||||
void textWidth(in uint width)
|
@property void textWidth(in uint width)
|
||||||
{
|
{
|
||||||
textWidth_ = width;
|
textWidth_ = width;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set line break to use. Unix by default.
|
///Set line break to use. Unix by default.
|
||||||
void lineBreak(in LineBreak lineBreak)
|
@property void lineBreak(in LineBreak lineBreak)
|
||||||
{
|
{
|
||||||
lineBreak_ = lineBreak;
|
lineBreak_ = lineBreak;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set character _encoding to use. UTF-8 by default.
|
///Set character _encoding to use. UTF-8 by default.
|
||||||
void encoding(in Encoding encoding)
|
@property void encoding(in Encoding encoding)
|
||||||
{
|
{
|
||||||
encoding_ = encoding;
|
encoding_ = encoding;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Always explicitly write document start?
|
///Always explicitly write document start?
|
||||||
void explicitStart(in bool explicit)
|
@property void explicitStart(in bool explicit)
|
||||||
{
|
{
|
||||||
explicitStart_ = explicit;
|
explicitStart_ = explicit;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Always explicitly write document end?
|
///Always explicitly write document end?
|
||||||
void explicitEnd(in bool explicit)
|
@property void explicitEnd(in bool explicit)
|
||||||
{
|
{
|
||||||
explicitEnd_ = explicit;
|
explicitEnd_ = explicit;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Specify YAML version string. "1.1" by default.
|
///Specify YAML version string. "1.1" by default.
|
||||||
void YAMLVersion(in string YAMLVersion)
|
@property void YAMLVersion(in string YAMLVersion)
|
||||||
{
|
{
|
||||||
YAMLVersion_ = YAMLVersion;
|
YAMLVersion_ = YAMLVersion;
|
||||||
}
|
}
|
||||||
|
@ -297,7 +299,7 @@ struct Dumper
|
||||||
* dumper.dump(Node("foo"));
|
* dumper.dump(Node("foo"));
|
||||||
* --------------------
|
* --------------------
|
||||||
*/
|
*/
|
||||||
void tagDirectives(string[string] tags)
|
@property void tagDirectives(string[string] tags)
|
||||||
{
|
{
|
||||||
tagDirective[] t;
|
tagDirective[] t;
|
||||||
foreach(handle, prefix; tags)
|
foreach(handle, prefix; tags)
|
||||||
|
|
|
@ -148,8 +148,8 @@ struct Emitter
|
||||||
ScalarStyle style_ = ScalarStyle.Invalid;
|
ScalarStyle style_ = ScalarStyle.Invalid;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@disable int opCmp(ref Emitter e);
|
@disable int opCmp(ref Emitter);
|
||||||
@disable bool opEquals(ref Emitter e);
|
@disable bool opEquals(ref Emitter);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct an emitter.
|
* Construct an emitter.
|
||||||
|
@ -159,7 +159,8 @@ struct Emitter
|
||||||
* indent = Indentation width.
|
* indent = Indentation width.
|
||||||
* lineBreak = Line break character/s.
|
* lineBreak = Line break character/s.
|
||||||
*/
|
*/
|
||||||
this(Stream stream, bool canonical, int indent, int width, LineBreak lineBreak)
|
this(Stream stream, in bool canonical, in int indent, in int width,
|
||||||
|
in LineBreak lineBreak)
|
||||||
in{assert(stream.writeable, "Can't emit YAML to a non-writable stream");}
|
in{assert(stream.writeable, "Can't emit YAML to a non-writable stream");}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
@ -226,7 +227,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Write a string to the file/stream.
|
///Write a string to the file/stream.
|
||||||
void writeString(string str)
|
void writeString(in string str)
|
||||||
{
|
{
|
||||||
try final switch(encoding_)
|
try final switch(encoding_)
|
||||||
{
|
{
|
||||||
|
@ -276,8 +277,8 @@ struct Emitter
|
||||||
immutable event = events_.next();
|
immutable event = events_.next();
|
||||||
static starts = [EventID.DocumentStart, EventID.SequenceStart, EventID.MappingStart];
|
static starts = [EventID.DocumentStart, EventID.SequenceStart, EventID.MappingStart];
|
||||||
static ends = [EventID.DocumentEnd, EventID.SequenceEnd, EventID.MappingEnd];
|
static ends = [EventID.DocumentEnd, EventID.SequenceEnd, EventID.MappingEnd];
|
||||||
if(starts.canFind(event.id)) {++level;}
|
if(starts.canFind(event.id)) {++level;}
|
||||||
else if(ends.canFind(event.id)) {--level;}
|
else if(ends.canFind(event.id)){--level;}
|
||||||
else if(event.id == EventID.StreamStart){level = -1;}
|
else if(event.id == EventID.StreamStart){level = -1;}
|
||||||
|
|
||||||
if(level < 0)
|
if(level < 0)
|
||||||
|
@ -329,7 +330,7 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Expect nothing, throwing if we still have something.
|
///Expect nothing, throwing if we still have something.
|
||||||
void expectNothing()
|
void expectNothing() const
|
||||||
{
|
{
|
||||||
throw new Error("Expected nothing, but got " ~ event_.idString);
|
throw new Error("Expected nothing, but got " ~ event_.idString);
|
||||||
}
|
}
|
||||||
|
@ -684,8 +685,8 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
immutable event = events_.peek();
|
immutable event = events_.peek();
|
||||||
bool emptyScalar = event.id == EventID.Scalar && event.anchor.isNull() &&
|
const emptyScalar = event.id == EventID.Scalar && event.anchor.isNull() &&
|
||||||
event.tag.isNull() && event.implicit && event.value == "";
|
event.tag.isNull() && event.implicit && event.value == "";
|
||||||
return emptyScalar;
|
return emptyScalar;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -867,7 +868,7 @@ struct Emitter
|
||||||
static void encodeChar(Writer)(ref Writer writer, in dchar c)
|
static void encodeChar(Writer)(ref Writer writer, in dchar c)
|
||||||
{
|
{
|
||||||
char[4] data;
|
char[4] data;
|
||||||
auto bytes = encode(data, c);
|
const bytes = encode(data, c);
|
||||||
//For each byte add string in format %AB , where AB are hex digits of the byte.
|
//For each byte add string in format %AB , where AB are hex digits of the byte.
|
||||||
foreach(const char b; data[0 .. bytes])
|
foreach(const char b; data[0 .. bytes])
|
||||||
{
|
{
|
||||||
|
@ -1261,6 +1262,9 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
@disable int opCmp(ref Emitter);
|
||||||
|
@disable bool opEquals(ref Emitter);
|
||||||
|
|
||||||
///Used as "null" UTF-32 character.
|
///Used as "null" UTF-32 character.
|
||||||
immutable dcharNone = dchar.max;
|
immutable dcharNone = dchar.max;
|
||||||
|
|
||||||
|
@ -1286,7 +1290,7 @@ struct ScalarWriter
|
||||||
|
|
||||||
public:
|
public:
|
||||||
///Construct a ScalarWriter using emitter to output text.
|
///Construct a ScalarWriter using emitter to output text.
|
||||||
this(ref Emitter emitter, string text, bool split = true)
|
this(ref Emitter emitter, string text, in bool split = true)
|
||||||
{
|
{
|
||||||
emitter_ = &emitter;
|
emitter_ = &emitter;
|
||||||
text_ = text;
|
text_ = text;
|
||||||
|
@ -1562,7 +1566,7 @@ struct ScalarWriter
|
||||||
}
|
}
|
||||||
|
|
||||||
///Determine hints (indicators) for block scalar.
|
///Determine hints (indicators) for block scalar.
|
||||||
size_t determineBlockHints(ref char[] hints, uint bestIndent)
|
size_t determineBlockHints(ref char[] hints, uint bestIndent) const
|
||||||
{
|
{
|
||||||
size_t hintsIdx = 0;
|
size_t hintsIdx = 0;
|
||||||
if(text_.length == 0){return hintsIdx;}
|
if(text_.length == 0){return hintsIdx;}
|
||||||
|
@ -1597,7 +1601,7 @@ struct ScalarWriter
|
||||||
{
|
{
|
||||||
char[4] hints;
|
char[4] hints;
|
||||||
hints[0] = indicator;
|
hints[0] = indicator;
|
||||||
auto hintsLength = 1 + determineBlockHints(hints[1 .. $], emitter_.bestIndent_);
|
const hintsLength = 1 + determineBlockHints(hints[1 .. $], emitter_.bestIndent_);
|
||||||
emitter_.writeIndicator(cast(string)hints[0 .. hintsLength], true);
|
emitter_.writeIndicator(cast(string)hints[0 .. hintsLength], true);
|
||||||
if(hints.length > 0 && hints[$ - 1] == '+')
|
if(hints.length > 0 && hints[$ - 1] == '+')
|
||||||
{
|
{
|
||||||
|
|
|
@ -11,11 +11,11 @@ module dyaml.escapes;
|
||||||
package:
|
package:
|
||||||
|
|
||||||
///Translation table from YAML escapes to dchars.
|
///Translation table from YAML escapes to dchars.
|
||||||
dchar[dchar] fromEscapes;
|
immutable dchar[dchar] fromEscapes;
|
||||||
///Translation table from dchars to YAML escapes.
|
///Translation table from dchars to YAML escapes.
|
||||||
dchar[dchar] toEscapes;
|
immutable dchar[dchar] toEscapes;
|
||||||
///Translation table from prefixes of escaped hexadecimal format characters to their lengths.
|
///Translation table from prefixes of escaped hexadecimal format characters to their lengths.
|
||||||
uint[dchar] escapeHexCodes;
|
immutable uint[dchar] escapeHexCodes;
|
||||||
|
|
||||||
|
|
||||||
static this()
|
static this()
|
||||||
|
|
|
@ -47,6 +47,8 @@ enum EventID : ubyte
|
||||||
*/
|
*/
|
||||||
struct Event
|
struct Event
|
||||||
{
|
{
|
||||||
|
@disable int opCmp(ref Event);
|
||||||
|
|
||||||
///Value of the event, if any.
|
///Value of the event, if any.
|
||||||
string value;
|
string value;
|
||||||
///Start position of the event in file/stream.
|
///Start position of the event in file/stream.
|
||||||
|
@ -108,7 +110,7 @@ Event event(EventID id)(in Mark start, in Mark end, in Anchor anchor = Anchor())
|
||||||
*/
|
*/
|
||||||
Event collectionStartEvent(EventID id)(in Mark start, in Mark end, in Anchor anchor,
|
Event collectionStartEvent(EventID id)(in Mark start, in Mark end, in Anchor anchor,
|
||||||
in Tag tag, in bool implicit,
|
in Tag tag, in bool implicit,
|
||||||
in CollectionStyle style)
|
in CollectionStyle style) pure
|
||||||
{
|
{
|
||||||
static assert(id == EventID.SequenceStart || id == EventID.SequenceEnd ||
|
static assert(id == EventID.SequenceStart || id == EventID.SequenceEnd ||
|
||||||
id == EventID.MappingStart || id == EventID.MappingEnd);
|
id == EventID.MappingStart || id == EventID.MappingEnd);
|
||||||
|
@ -123,7 +125,7 @@ Event collectionStartEvent(EventID id)(in Mark start, in Mark end, in Anchor anc
|
||||||
* end = End position of the event in the file/stream.
|
* end = End position of the event in the file/stream.
|
||||||
* encoding = Encoding of the stream.
|
* encoding = Encoding of the stream.
|
||||||
*/
|
*/
|
||||||
Event streamStartEvent(in Mark start, in Mark end, Encoding encoding)
|
Event streamStartEvent(in Mark start, in Mark end, in Encoding encoding) pure
|
||||||
{
|
{
|
||||||
return Event(null, start, end, Anchor(), Tag(), EventID.StreamStart,
|
return Event(null, start, end, Anchor(), Tag(), EventID.StreamStart,
|
||||||
ScalarStyle.Invalid, false, false, TagDirectives(), encoding);
|
ScalarStyle.Invalid, false, false, TagDirectives(), encoding);
|
||||||
|
@ -148,8 +150,8 @@ alias collectionStartEvent!(EventID.MappingStart) mappingStartEvent;
|
||||||
* YAMLVersion = YAML version string of the document.
|
* YAMLVersion = YAML version string of the document.
|
||||||
* tagDirectives = Tag directives of the document.
|
* tagDirectives = Tag directives of the document.
|
||||||
*/
|
*/
|
||||||
Event documentStartEvent(Mark start, Mark end, bool explicit, string YAMLVersion,
|
Event documentStartEvent(in Mark start, in Mark end, bool explicit, string YAMLVersion,
|
||||||
TagDirectives tagDirectives)
|
in TagDirectives tagDirectives) pure
|
||||||
{
|
{
|
||||||
return Event(YAMLVersion, start, end, Anchor(), Tag(), EventID.DocumentStart,
|
return Event(YAMLVersion, start, end, Anchor(), Tag(), EventID.DocumentStart,
|
||||||
ScalarStyle.Invalid, explicit, false, tagDirectives);
|
ScalarStyle.Invalid, explicit, false, tagDirectives);
|
||||||
|
@ -162,7 +164,7 @@ Event documentStartEvent(Mark start, Mark end, bool explicit, string YAMLVersion
|
||||||
* end = End position of the event in the file/stream.
|
* end = End position of the event in the file/stream.
|
||||||
* explicit = Is this an explicit document end?
|
* explicit = Is this an explicit document end?
|
||||||
*/
|
*/
|
||||||
Event documentEndEvent(Mark start, Mark end, bool explicit)
|
Event documentEndEvent(in Mark start, in Mark end, bool explicit) pure
|
||||||
{
|
{
|
||||||
return Event(null, start, end, Anchor(), Tag(), EventID.DocumentEnd,
|
return Event(null, start, end, Anchor(), Tag(), EventID.DocumentEnd,
|
||||||
ScalarStyle.Invalid, explicit);
|
ScalarStyle.Invalid, explicit);
|
||||||
|
@ -181,7 +183,7 @@ Event documentEndEvent(Mark start, Mark end, bool explicit)
|
||||||
*/
|
*/
|
||||||
Event scalarEvent(in Mark start, in Mark end, in Anchor anchor, in Tag tag,
|
Event scalarEvent(in Mark start, in Mark end, in Anchor anchor, in Tag tag,
|
||||||
in Tuple!(bool, bool) implicit, in string value,
|
in Tuple!(bool, bool) implicit, in string value,
|
||||||
in ScalarStyle style = ScalarStyle.Invalid)
|
in ScalarStyle style = ScalarStyle.Invalid) pure
|
||||||
{
|
{
|
||||||
return Event(value, start, end, anchor, tag, EventID.Scalar, style, implicit[0],
|
return Event(value, start, end, anchor, tag, EventID.Scalar, style, implicit[0],
|
||||||
implicit[1]);
|
implicit[1]);
|
||||||
|
|
|
@ -35,6 +35,8 @@ package:
|
||||||
struct Flags(names ...) if(names.length <= 8)
|
struct Flags(names ...) if(names.length <= 8)
|
||||||
{
|
{
|
||||||
private:
|
private:
|
||||||
|
@disable int opCmp(ref Flags);
|
||||||
|
|
||||||
///Byte storing the flags.
|
///Byte storing the flags.
|
||||||
ubyte flags_;
|
ubyte flags_;
|
||||||
|
|
||||||
|
|
|
@ -117,6 +117,8 @@ struct Loader
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@disable this();
|
@disable this();
|
||||||
|
@disable int opCmp(ref Loader);
|
||||||
|
@disable bool opEquals(ref Loader);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a Loader to load YAML from a file.
|
* Construct a Loader to load YAML from a file.
|
||||||
|
|
10
dyaml/node.d
10
dyaml/node.d
|
@ -746,7 +746,7 @@ struct Node
|
||||||
}
|
}
|
||||||
else if(isMapping())
|
else if(isMapping())
|
||||||
{
|
{
|
||||||
auto idx = findPair(index);
|
const idx = findPair(index);
|
||||||
if(idx < 0){add(index, value);}
|
if(idx < 0){add(index, value);}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -1033,7 +1033,7 @@ struct Node
|
||||||
}
|
}
|
||||||
else if(isMapping())
|
else if(isMapping())
|
||||||
{
|
{
|
||||||
auto idx = findPair!(T, true)(value);
|
const idx = findPair!(T, true)(value);
|
||||||
if(idx >= 0)
|
if(idx >= 0)
|
||||||
{
|
{
|
||||||
auto pairs = as!(Node.Pair[])();
|
auto pairs = as!(Node.Pair[])();
|
||||||
|
@ -1099,7 +1099,7 @@ struct Node
|
||||||
}
|
}
|
||||||
else if(isMapping())
|
else if(isMapping())
|
||||||
{
|
{
|
||||||
auto idx = findPair(index);
|
const idx = findPair(index);
|
||||||
if(idx >= 0)
|
if(idx >= 0)
|
||||||
{
|
{
|
||||||
auto pairs = get!(Node.Pair[])();
|
auto pairs = get!(Node.Pair[])();
|
||||||
|
@ -1185,9 +1185,9 @@ struct Node
|
||||||
const c2 = rhs.value_.get!(const T);
|
const c2 = rhs.value_.get!(const T);
|
||||||
if(c1 is c2){return true;}
|
if(c1 is c2){return true;}
|
||||||
if(c1.length != c2.length){return false;}
|
if(c1.length != c2.length){return false;}
|
||||||
foreach(ref i1, ref i2; lockstep(c1, c2))
|
foreach(i; 0 .. c1.length)
|
||||||
{
|
{
|
||||||
if(!i1.equals!useTag(i2)){return false;}
|
if(!c1[i].equals!useTag(c2[i])){return false;}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -347,7 +347,7 @@ final class Parser
|
||||||
{
|
{
|
||||||
immutable token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
//Name and value are separated by '\0'.
|
//Name and value are separated by '\0'.
|
||||||
auto parts = token.value.split("\0");
|
const parts = token.value.split("\0");
|
||||||
const name = parts[0];
|
const name = parts[0];
|
||||||
if(name == "YAML")
|
if(name == "YAML")
|
||||||
{
|
{
|
||||||
|
@ -367,8 +367,7 @@ final class Parser
|
||||||
foreach(ref pair; tagDirectives_)
|
foreach(ref pair; tagDirectives_)
|
||||||
{
|
{
|
||||||
//handle
|
//handle
|
||||||
auto h = pair[0];
|
const h = pair[0];
|
||||||
auto replacement = pair[1];
|
|
||||||
enforce(h != handle, new Error("Duplicate tag handle: " ~ handle,
|
enforce(h != handle, new Error("Duplicate tag handle: " ~ handle,
|
||||||
token.startMark));
|
token.startMark));
|
||||||
}
|
}
|
||||||
|
@ -382,13 +381,10 @@ final class Parser
|
||||||
foreach(ref defaultPair; defaultTagDirectives_)
|
foreach(ref defaultPair; defaultTagDirectives_)
|
||||||
{
|
{
|
||||||
bool found = false;
|
bool found = false;
|
||||||
foreach(ref pair; tagDirectives_)
|
foreach(ref pair; tagDirectives_) if(defaultPair[0] == pair[0])
|
||||||
{
|
{
|
||||||
if(defaultPair[0] == pair[0] )
|
found = true;
|
||||||
{
|
break;
|
||||||
found = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if(!found){tagDirectives_ ~= defaultPair;}
|
if(!found){tagDirectives_ ~= defaultPair;}
|
||||||
}
|
}
|
||||||
|
@ -415,7 +411,7 @@ final class Parser
|
||||||
*/
|
*/
|
||||||
|
|
||||||
///Parse a node.
|
///Parse a node.
|
||||||
Event parseNode(bool block, bool indentlessSequence = false)
|
Event parseNode(in bool block, in bool indentlessSequence = false)
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.Alias))
|
if(scanner_.checkToken(TokenID.Alias))
|
||||||
{
|
{
|
||||||
|
@ -861,8 +857,6 @@ final class Parser
|
||||||
///Return an empty scalar.
|
///Return an empty scalar.
|
||||||
Event processEmptyScalar(in Mark mark)
|
Event processEmptyScalar(in Mark mark)
|
||||||
{
|
{
|
||||||
//PyYAML uses a Tuple!(true, false) for the second last arg here,
|
|
||||||
//but the second bool is never used after that - so we don't use it.
|
|
||||||
return scalarEvent(mark, mark, Anchor(), Tag(), tuple(true, false), "");
|
return scalarEvent(mark, mark, Anchor(), Tag(), tuple(true, false), "");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,6 +52,8 @@ struct Queue(T)
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@disable void opAssign(ref Queue);
|
@disable void opAssign(ref Queue);
|
||||||
|
@disable bool opEquals(ref Queue);
|
||||||
|
@disable int opCmp(ref Queue);
|
||||||
|
|
||||||
///Destroy the queue, deallocating all its elements.
|
///Destroy the queue, deallocating all its elements.
|
||||||
~this()
|
~this()
|
||||||
|
|
|
@ -103,7 +103,7 @@ final class Reader
|
||||||
{
|
{
|
||||||
case -1:
|
case -1:
|
||||||
//readBOM() eats two more bytes in this case so get them back
|
//readBOM() eats two more bytes in this case so get them back
|
||||||
wchar bytes = stream_.getcw();
|
const wchar bytes = stream_.getcw();
|
||||||
rawBuffer8_[0] = cast(char)(bytes % 256);
|
rawBuffer8_[0] = cast(char)(bytes % 256);
|
||||||
rawBuffer8_[1] = cast(char)(bytes / 256);
|
rawBuffer8_[1] = cast(char)(bytes / 256);
|
||||||
rawUsed_ = 2;
|
rawUsed_ = 2;
|
||||||
|
@ -444,7 +444,7 @@ final class Reader
|
||||||
*
|
*
|
||||||
* Returns: True if all the characters are printable, false otherwise.
|
* Returns: True if all the characters are printable, false otherwise.
|
||||||
*/
|
*/
|
||||||
static bool printable(const ref dchar[] chars)
|
static bool printable(const ref dchar[] chars) pure
|
||||||
{
|
{
|
||||||
foreach(c; chars)
|
foreach(c; chars)
|
||||||
{
|
{
|
||||||
|
@ -460,7 +460,7 @@ final class Reader
|
||||||
}
|
}
|
||||||
|
|
||||||
///Are we done reading?
|
///Are we done reading?
|
||||||
@property bool done()
|
@property bool done() const
|
||||||
{
|
{
|
||||||
return (available_ == 0 &&
|
return (available_ == 0 &&
|
||||||
((encoding_ == Encoding.UTF_8 && rawUsed_ == 0) ||
|
((encoding_ == Encoding.UTF_8 && rawUsed_ == 0) ||
|
||||||
|
|
|
@ -54,6 +54,9 @@ final class Representer
|
||||||
CollectionStyle defaultCollectionStyle_ = CollectionStyle.Invalid;
|
CollectionStyle defaultCollectionStyle_ = CollectionStyle.Invalid;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@disable bool opEquals(ref Representer);
|
||||||
|
@disable int opCmp(ref Representer);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a Representer.
|
* Construct a Representer.
|
||||||
*
|
*
|
||||||
|
@ -62,8 +65,9 @@ final class Representer
|
||||||
* disabled to use custom representer
|
* disabled to use custom representer
|
||||||
* functions for default types.
|
* functions for default types.
|
||||||
*/
|
*/
|
||||||
this(bool useDefaultRepresenters = true)
|
this(in bool useDefaultRepresenters = true)
|
||||||
{
|
{
|
||||||
|
if(!useDefaultRepresenters){return;}
|
||||||
addRepresenter!YAMLNull(&representNull);
|
addRepresenter!YAMLNull(&representNull);
|
||||||
addRepresenter!string(&representString);
|
addRepresenter!string(&representString);
|
||||||
addRepresenter!(ubyte[])(&representBytes);
|
addRepresenter!(ubyte[])(&representBytes);
|
||||||
|
@ -83,13 +87,13 @@ final class Representer
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set default _style for scalars. Invalid means the _style is chosen automatically.
|
///Set default _style for scalars. Invalid means the _style is chosen automatically.
|
||||||
@property void defaultScalarStyle(ScalarStyle style)
|
@property void defaultScalarStyle(in ScalarStyle style)
|
||||||
{
|
{
|
||||||
defaultScalarStyle_ = style;
|
defaultScalarStyle_ = style;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Set default _style for collections. Invalid means the _style is chosen automatically.
|
///Set default _style for collections. Invalid means the _style is chosen automatically.
|
||||||
@property void defaultCollectionStyle(CollectionStyle style)
|
@property void defaultCollectionStyle(in CollectionStyle style)
|
||||||
{
|
{
|
||||||
defaultCollectionStyle_ = style;
|
defaultCollectionStyle_ = style;
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,6 +50,9 @@ final class Resolver
|
||||||
Tuple!(Tag, Regex!char)[][dchar] yamlImplicitResolvers_;
|
Tuple!(Tag, Regex!char)[][dchar] yamlImplicitResolvers_;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@disable bool opEquals(ref Resolver);
|
||||||
|
@disable int opCmp(ref Resolver);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a Resolver.
|
* Construct a Resolver.
|
||||||
*
|
*
|
||||||
|
@ -138,7 +141,7 @@ final class Resolver
|
||||||
*
|
*
|
||||||
* Returns: Resolved tag.
|
* Returns: Resolved tag.
|
||||||
*/
|
*/
|
||||||
Tag resolve(NodeID kind, Tag tag, string value, in bool implicit)
|
Tag resolve(in NodeID kind, Tag tag, string value, in bool implicit)
|
||||||
{
|
{
|
||||||
if(!tag.isNull() && tag.get() != "!"){return tag;}
|
if(!tag.isNull() && tag.get() != "!"){return tag;}
|
||||||
|
|
||||||
|
|
|
@ -182,7 +182,7 @@ final class Scanner
|
||||||
* or if there are any tokens left if no types specified.
|
* or if there are any tokens left if no types specified.
|
||||||
* false otherwise.
|
* false otherwise.
|
||||||
*/
|
*/
|
||||||
bool checkToken(TokenID[] ids ...)
|
bool checkToken(in TokenID[] ids ...)
|
||||||
{
|
{
|
||||||
//Check if the next token is one of specified types.
|
//Check if the next token is one of specified types.
|
||||||
while(needMoreTokens()){fetchToken();}
|
while(needMoreTokens()){fetchToken();}
|
||||||
|
@ -255,7 +255,7 @@ final class Scanner
|
||||||
unwindIndent(reader_.column);
|
unwindIndent(reader_.column);
|
||||||
|
|
||||||
//Get the next character.
|
//Get the next character.
|
||||||
dchar c = reader_.peek();
|
const dchar c = reader_.peek();
|
||||||
|
|
||||||
//Fetch the token.
|
//Fetch the token.
|
||||||
if(c == '\0') {return fetchStreamEnd();}
|
if(c == '\0') {return fetchStreamEnd();}
|
||||||
|
@ -331,7 +331,7 @@ final class Scanner
|
||||||
void savePossibleSimpleKey()
|
void savePossibleSimpleKey()
|
||||||
{
|
{
|
||||||
//Check if a simple key is required at the current position.
|
//Check if a simple key is required at the current position.
|
||||||
bool required = (flowLevel_ == 0 && indent_ == reader_.column);
|
const required = (flowLevel_ == 0 && indent_ == reader_.column);
|
||||||
assert(allowSimpleKey_ || !required, "A simple key is required only if it is "
|
assert(allowSimpleKey_ || !required, "A simple key is required only if it is "
|
||||||
"the first token in the current line. Therefore it is always allowed.");
|
"the first token in the current line. Therefore it is always allowed.");
|
||||||
|
|
||||||
|
@ -339,7 +339,7 @@ final class Scanner
|
||||||
|
|
||||||
//The next token might be a simple key, so save its number and position.
|
//The next token might be a simple key, so save its number and position.
|
||||||
removePossibleSimpleKey();
|
removePossibleSimpleKey();
|
||||||
uint tokenCount = tokensTaken_ + cast(uint)tokens_.length;
|
const tokenCount = tokensTaken_ + cast(uint)tokens_.length;
|
||||||
|
|
||||||
const line = reader_.line;
|
const line = reader_.line;
|
||||||
const column = reader_.column;
|
const column = reader_.column;
|
||||||
|
@ -379,7 +379,7 @@ final class Scanner
|
||||||
*
|
*
|
||||||
* Params: column = Current column in the file/stream.
|
* Params: column = Current column in the file/stream.
|
||||||
*/
|
*/
|
||||||
void unwindIndent(int column)
|
void unwindIndent(in int column)
|
||||||
{
|
{
|
||||||
if(flowLevel_ > 0)
|
if(flowLevel_ > 0)
|
||||||
{
|
{
|
||||||
|
@ -582,10 +582,10 @@ final class Scanner
|
||||||
if(possibleSimpleKeys_.length > flowLevel_ &&
|
if(possibleSimpleKeys_.length > flowLevel_ &&
|
||||||
!possibleSimpleKeys_[flowLevel_].isNull)
|
!possibleSimpleKeys_[flowLevel_].isNull)
|
||||||
{
|
{
|
||||||
auto key = possibleSimpleKeys_[flowLevel_];
|
const key = possibleSimpleKeys_[flowLevel_];
|
||||||
possibleSimpleKeys_[flowLevel_].isNull = true;
|
possibleSimpleKeys_[flowLevel_].isNull = true;
|
||||||
Mark keyMark = Mark(key.line, key.column);
|
Mark keyMark = Mark(key.line, key.column);
|
||||||
auto idx = key.tokenIndex - tokensTaken_;
|
const idx = key.tokenIndex - tokensTaken_;
|
||||||
|
|
||||||
assert(idx >= 0);
|
assert(idx >= 0);
|
||||||
|
|
||||||
|
@ -922,7 +922,7 @@ final class Scanner
|
||||||
dstring scanTagDirectiveValue(in Mark startMark)
|
dstring scanTagDirectiveValue(in Mark startMark)
|
||||||
{
|
{
|
||||||
findNextNonSpace();
|
findNextNonSpace();
|
||||||
dstring handle = scanTagDirectiveHandle(startMark);
|
const handle = scanTagDirectiveHandle(startMark);
|
||||||
findNextNonSpace();
|
findNextNonSpace();
|
||||||
return handle ~ '\0' ~ scanTagDirectivePrefix(startMark);
|
return handle ~ '\0' ~ scanTagDirectivePrefix(startMark);
|
||||||
}
|
}
|
||||||
|
@ -979,7 +979,7 @@ final class Scanner
|
||||||
{
|
{
|
||||||
const startMark = reader_.mark;
|
const startMark = reader_.mark;
|
||||||
|
|
||||||
dchar i = reader_.get();
|
const dchar i = reader_.get();
|
||||||
|
|
||||||
dstring value = i == '*' ? scanAlphaNumeric!("an alias")(startMark)
|
dstring value = i == '*' ? scanAlphaNumeric!("an alias")(startMark)
|
||||||
: scanAlphaNumeric!("an anchor")(startMark);
|
: scanAlphaNumeric!("an anchor")(startMark);
|
||||||
|
@ -1058,7 +1058,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan a block scalar token with specified style.
|
///Scan a block scalar token with specified style.
|
||||||
Token scanBlockScalar(ScalarStyle style)
|
Token scanBlockScalar(in ScalarStyle style)
|
||||||
{
|
{
|
||||||
const startMark = reader_.mark;
|
const startMark = reader_.mark;
|
||||||
|
|
||||||
|
@ -1234,7 +1234,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan a qouted flow scalar token with specified quotes.
|
///Scan a qouted flow scalar token with specified quotes.
|
||||||
Token scanFlowScalar(ScalarStyle quotes)
|
Token scanFlowScalar(in ScalarStyle quotes)
|
||||||
{
|
{
|
||||||
const startMark = reader_.mark;
|
const startMark = reader_.mark;
|
||||||
const quote = reader_.get();
|
const quote = reader_.get();
|
||||||
|
@ -1256,7 +1256,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan nonspace characters in a flow scalar.
|
///Scan nonspace characters in a flow scalar.
|
||||||
void scanFlowScalarNonSpaces(ScalarStyle quotes, in Mark startMark)
|
void scanFlowScalarNonSpaces(in ScalarStyle quotes, in Mark startMark)
|
||||||
{
|
{
|
||||||
for(;;)
|
for(;;)
|
||||||
{
|
{
|
||||||
|
@ -1423,9 +1423,9 @@ final class Scanner
|
||||||
for(;;)
|
for(;;)
|
||||||
{
|
{
|
||||||
c = reader_.peek(length);
|
c = reader_.peek(length);
|
||||||
bool done = search.canFind(c) || (flowLevel_ == 0 && c == ':' &&
|
const bool done = search.canFind(c) || (flowLevel_ == 0 && c == ':' &&
|
||||||
search.canFind(reader_.peek(length + 1))) ||
|
search.canFind(reader_.peek(length + 1))) ||
|
||||||
(flowLevel_ > 0 && ",:?[]{}"d.canFind(c));
|
(flowLevel_ > 0 && ",:?[]{}"d.canFind(c));
|
||||||
if(done){break;}
|
if(done){break;}
|
||||||
++length;
|
++length;
|
||||||
}
|
}
|
||||||
|
@ -1509,7 +1509,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan handle of a tag token.
|
///Scan handle of a tag token.
|
||||||
dstring scanTagHandle(string name, in Mark startMark)
|
dstring scanTagHandle(in string name, in Mark startMark)
|
||||||
{
|
{
|
||||||
dchar c = reader_.peek();
|
dchar c = reader_.peek();
|
||||||
enforce(c == '!',
|
enforce(c == '!',
|
||||||
|
@ -1538,7 +1538,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan URI in a tag token.
|
///Scan URI in a tag token.
|
||||||
dstring scanTagURI(string name, in Mark startMark)
|
dstring scanTagURI(in string name, in Mark startMark)
|
||||||
{
|
{
|
||||||
//Note: we do not check if URI is well-formed.
|
//Note: we do not check if URI is well-formed.
|
||||||
//Using appender_, so clear it when we're done.
|
//Using appender_, so clear it when we're done.
|
||||||
|
@ -1570,7 +1570,7 @@ final class Scanner
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scan URI escape sequences.
|
///Scan URI escape sequences.
|
||||||
dstring scanURIEscapes(string name, in Mark startMark)
|
dstring scanURIEscapes(in string name, in Mark startMark)
|
||||||
{
|
{
|
||||||
ubyte[] bytes;
|
ubyte[] bytes;
|
||||||
Mark mark = reader_.mark;
|
Mark mark = reader_.mark;
|
||||||
|
@ -1584,7 +1584,7 @@ final class Scanner
|
||||||
//Converting 2 hexadecimal digits to a byte.
|
//Converting 2 hexadecimal digits to a byte.
|
||||||
foreach(k; 0 .. 2)
|
foreach(k; 0 .. 2)
|
||||||
{
|
{
|
||||||
dchar c = reader_.peek(k);
|
const dchar c = reader_.peek(k);
|
||||||
enforce(isHexDigit(c),
|
enforce(isHexDigit(c),
|
||||||
new Error("While scanning a " ~ name, startMark,
|
new Error("While scanning a " ~ name, startMark,
|
||||||
"expected URI escape sequence of "
|
"expected URI escape sequence of "
|
||||||
|
|
|
@ -69,7 +69,7 @@ struct Serializer
|
||||||
* tagDirectives = Tag directives to emit.
|
* tagDirectives = Tag directives to emit.
|
||||||
*/
|
*/
|
||||||
this(ref Emitter emitter, Resolver resolver, Encoding encoding,
|
this(ref Emitter emitter, Resolver resolver, Encoding encoding,
|
||||||
bool explicitStart, bool explicitEnd, string YAMLVersion,
|
in bool explicitStart, in bool explicitEnd, string YAMLVersion,
|
||||||
TagDirectives tagDirectives)
|
TagDirectives tagDirectives)
|
||||||
{
|
{
|
||||||
emitter_ = &emitter;
|
emitter_ = &emitter;
|
||||||
|
@ -200,7 +200,7 @@ struct Serializer
|
||||||
if(node.isSequence)
|
if(node.isSequence)
|
||||||
{
|
{
|
||||||
const defaultTag = resolver_.defaultSequenceTag;
|
const defaultTag = resolver_.defaultSequenceTag;
|
||||||
bool implicit = node.tag_ == defaultTag;
|
const implicit = node.tag_ == defaultTag;
|
||||||
emitter_.emit(sequenceStartEvent(Mark(), Mark(), aliased, node.tag_,
|
emitter_.emit(sequenceStartEvent(Mark(), Mark(), aliased, node.tag_,
|
||||||
implicit, node.collectionStyle));
|
implicit, node.collectionStyle));
|
||||||
foreach(ref Node item; node)
|
foreach(ref Node item; node)
|
||||||
|
@ -212,8 +212,8 @@ struct Serializer
|
||||||
}
|
}
|
||||||
if(node.isMapping)
|
if(node.isMapping)
|
||||||
{
|
{
|
||||||
auto defaultTag = resolver_.defaultMappingTag;
|
const defaultTag = resolver_.defaultMappingTag;
|
||||||
bool implicit = node.tag_ == defaultTag;
|
const implicit = node.tag_ == defaultTag;
|
||||||
emitter_.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag_,
|
emitter_.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag_,
|
||||||
implicit, node.collectionStyle));
|
implicit, node.collectionStyle));
|
||||||
foreach(ref Node key, ref Node value; node)
|
foreach(ref Node key, ref Node value; node)
|
||||||
|
|
|
@ -63,12 +63,9 @@ template SharedObject(T, MixedIn)
|
||||||
///Add an object and return its index.
|
///Add an object and return its index.
|
||||||
uint add(ref T object)
|
uint add(ref T object)
|
||||||
{
|
{
|
||||||
foreach(uint index, ref T known; objects_)
|
foreach(index, ref known; objects_) if(object == known)
|
||||||
{
|
{
|
||||||
if(object == known)
|
return cast(uint)index;
|
||||||
{
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
objects_ ~= object;
|
objects_ ~= object;
|
||||||
return cast(uint)objects_.length - 1;
|
return cast(uint)objects_.length - 1;
|
||||||
|
|
|
@ -19,8 +19,11 @@ struct Tag
|
||||||
immutable(char)* tag_ = null;
|
immutable(char)* tag_ = null;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@disable int opCmp(ref Tag);
|
||||||
|
|
||||||
|
|
||||||
///Construct a tag from a string representation.
|
///Construct a tag from a string representation.
|
||||||
this(string tag)
|
this(in string tag)
|
||||||
{
|
{
|
||||||
if(tag is null || tag == "")
|
if(tag is null || tag == "")
|
||||||
{
|
{
|
||||||
|
|
|
@ -53,6 +53,8 @@ enum TokenID : ubyte
|
||||||
*/
|
*/
|
||||||
struct Token
|
struct Token
|
||||||
{
|
{
|
||||||
|
@disable int opCmp(ref Token);
|
||||||
|
|
||||||
///Value of the token, if any.
|
///Value of the token, if any.
|
||||||
string value;
|
string value;
|
||||||
///Start position of the token in file/stream.
|
///Start position of the token in file/stream.
|
||||||
|
@ -101,7 +103,7 @@ Token simpleToken(TokenID id)(in Mark start, in Mark end) pure
|
||||||
* end = End position of the token.
|
* end = End position of the token.
|
||||||
* encoding = Encoding of the stream.
|
* encoding = Encoding of the stream.
|
||||||
*/
|
*/
|
||||||
Token streamStartToken(in Mark start, in Mark end, in Encoding encoding)
|
Token streamStartToken(in Mark start, in Mark end, in Encoding encoding) pure
|
||||||
{
|
{
|
||||||
return Token(null, start, end, TokenID.StreamStart, ScalarStyle.Invalid, encoding);
|
return Token(null, start, end, TokenID.StreamStart, ScalarStyle.Invalid, encoding);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue