major attribute fixing

This commit is contained in:
Cameron Ross 2018-03-23 18:35:16 -03:00 committed by BBasile
parent d60db0c7df
commit 6088f0b632
31 changed files with 436 additions and 467 deletions

View file

@ -232,7 +232,7 @@ final class Composer
} }
///Compose a scalar node. ///Compose a scalar node.
Node composeScalarNode() @system Node composeScalarNode() @safe
{ {
immutable event = parser_.getEvent(); immutable event = parser_.getEvent();
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value, const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value,
@ -263,8 +263,6 @@ final class Composer
nodeAppender.put(composeNode(pairAppenderLevel, nodeAppenderLevel + 1)); nodeAppender.put(composeNode(pairAppenderLevel, nodeAppenderLevel + 1));
} }
core.memory.GC.disable();
scope(exit){core.memory.GC.enable();}
Node node = constructor_.node(startEvent.startMark, parser_.getEvent().endMark, Node node = constructor_.node(startEvent.startMark, parser_.getEvent().endMark,
tag, nodeAppender.data.dup, startEvent.collectionStyle); tag, nodeAppender.data.dup, startEvent.collectionStyle);
nodeAppender.clear(); nodeAppender.clear();
@ -286,7 +284,7 @@ final class Composer
* Returns: Flattened mapping as pairs. * Returns: Flattened mapping as pairs.
*/ */
Node.Pair[] flatten(ref Node root, const Mark startMark, const Mark endMark, Node.Pair[] flatten(ref Node root, const Mark startMark, const Mark endMark,
const uint pairAppenderLevel, const uint nodeAppenderLevel) @system const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe
{ {
void error(Node node) void error(Node node)
{ {
@ -306,11 +304,11 @@ final class Composer
if(root.isMapping) if(root.isMapping)
{ {
Node[] toMerge; Node[] toMerge;
toMerge.reserve(root.length);
foreach(ref Node key, ref Node value; root) foreach(ref Node key, ref Node value; root)
{ {
if(key.isType!YAMLMerge) if(key.isType!YAMLMerge)
{ {
toMerge.assumeSafeAppend();
toMerge ~= value; toMerge ~= value;
} }
else else
@ -337,8 +335,6 @@ final class Composer
error(root); error(root);
} }
core.memory.GC.disable();
scope(exit){core.memory.GC.enable();}
auto flattened = pairAppender.data.dup; auto flattened = pairAppender.data.dup;
pairAppender.clear(); pairAppender.clear();
@ -381,8 +377,6 @@ final class Composer
pairAppenderLevel + 1, nodeAppenderLevel)); pairAppenderLevel + 1, nodeAppenderLevel));
} }
core.memory.GC.disable();
scope(exit){core.memory.GC.enable();}
Node node = constructor_.node(startEvent.startMark, parser_.getEvent().endMark, Node node = constructor_.node(startEvent.startMark, parser_.getEvent().endMark,
tag, pairAppender.data.dup, startEvent.collectionStyle); tag, pairAppender.data.dup, startEvent.collectionStyle);

View file

@ -182,7 +182,6 @@ final class Constructor
* -------------------- * --------------------
*/ */
void addConstructorScalar(T)(const string tag, T function(ref Node) ctor) void addConstructorScalar(T)(const string tag, T function(ref Node) ctor)
@safe nothrow
{ {
const t = tag; const t = tag;
auto deleg = addConstructor!T(t, ctor); auto deleg = addConstructor!T(t, ctor);
@ -233,7 +232,6 @@ final class Constructor
* -------------------- * --------------------
*/ */
void addConstructorSequence(T)(const string tag, T function(ref Node) ctor) void addConstructorSequence(T)(const string tag, T function(ref Node) ctor)
@safe nothrow
{ {
const t = tag; const t = tag;
auto deleg = addConstructor!T(t, ctor); auto deleg = addConstructor!T(t, ctor);
@ -284,7 +282,6 @@ final class Constructor
* -------------------- * --------------------
*/ */
void addConstructorMapping(T)(const string tag, T function(ref Node) ctor) void addConstructorMapping(T)(const string tag, T function(ref Node) ctor)
@safe nothrow
{ {
const t = tag; const t = tag;
auto deleg = addConstructor!T(t, ctor); auto deleg = addConstructor!T(t, ctor);
@ -346,7 +343,6 @@ final class Constructor
* ctor = Constructor function. * ctor = Constructor function.
*/ */
auto addConstructor(T)(const string tag, T function(ref Node) ctor) auto addConstructor(T)(const string tag, T function(ref Node) ctor)
@safe nothrow
{ {
assert((tag in fromScalar_) is null && assert((tag in fromScalar_) is null &&
(tag in fromSequence_) is null && (tag in fromSequence_) is null &&
@ -363,7 +359,7 @@ final class Constructor
} }
//Get the array of constructor functions for scalar, sequence or mapping. //Get the array of constructor functions for scalar, sequence or mapping.
@property auto delegates(T)() @safe pure nothrow @nogc @property auto delegates(T)()
{ {
static if(is(T : string)) {return &fromScalar_;} static if(is(T : string)) {return &fromScalar_;}
else static if(is(T : Node[])) {return &fromSequence_;} else static if(is(T : Node[])) {return &fromSequence_;}
@ -397,7 +393,7 @@ bool constructBool(ref Node node) @safe
} }
/// Construct an integer (long) _node. /// Construct an integer (long) _node.
long constructLong(ref Node node) long constructLong(ref Node node) @safe
{ {
string value = node.as!string().replace("_", ""); string value = node.as!string().replace("_", "");
const char c = value[0]; const char c = value[0];
@ -442,9 +438,9 @@ long constructLong(ref Node node)
return result; return result;
} }
unittest @safe unittest
{ {
long getLong(string str) long getLong(string str) @safe
{ {
auto node = Node(str); auto node = Node(str);
return constructLong(node); return constructLong(node);
@ -466,7 +462,7 @@ unittest
} }
/// Construct a floating point (real) _node. /// Construct a floating point (real) _node.
real constructReal(ref Node node) real constructReal(ref Node node) @safe
{ {
string value = node.as!string().replace("_", "").toLower(); string value = node.as!string().replace("_", "").toLower();
const char c = value[0]; const char c = value[0];
@ -508,14 +504,14 @@ real constructReal(ref Node node)
return result; return result;
} }
unittest @safe unittest
{ {
bool eq(real a, real b, real epsilon = 0.2) bool eq(real a, real b, real epsilon = 0.2) @safe
{ {
return a >= (b - epsilon) && a <= (b + epsilon); return a >= (b - epsilon) && a <= (b + epsilon);
} }
real getReal(string str) real getReal(string str) @safe
{ {
auto node = Node(str); auto node = Node(str);
return constructReal(node); return constructReal(node);
@ -537,7 +533,7 @@ unittest
} }
/// Construct a binary (base64) _node. /// Construct a binary (base64) _node.
ubyte[] constructBinary(ref Node node) ubyte[] constructBinary(ref Node node) @safe
{ {
import std.ascii : newline; import std.ascii : newline;
import std.array : array; import std.array : array;
@ -554,12 +550,12 @@ ubyte[] constructBinary(ref Node node)
} }
} }
unittest @safe unittest
{ {
ubyte[] test = cast(ubyte[])"The Answer: 42"; auto test = "The Answer: 42".representation;
char[] buffer; char[] buffer;
buffer.length = 256; buffer.length = 256;
string input = cast(string)Base64.encode(test, buffer); string input = Base64.encode(test, buffer).idup;
auto node = Node(input); auto node = Node(input);
auto value = constructBinary(node); auto value = constructBinary(node);
assert(value == test); assert(value == test);
@ -567,7 +563,7 @@ unittest
} }
/// Construct a timestamp (SysTime) _node. /// Construct a timestamp (SysTime) _node.
SysTime constructTimestamp(ref Node node) SysTime constructTimestamp(ref Node node) @safe
{ {
string value = node.as!string; string value = node.as!string;
@ -639,7 +635,7 @@ SysTime constructTimestamp(ref Node node)
assert(false, "This code should never be reached"); assert(false, "This code should never be reached");
} }
unittest @safe unittest
{ {
writeln("D:YAML construction timestamp unittest"); writeln("D:YAML construction timestamp unittest");
@ -669,23 +665,22 @@ unittest
} }
/// Construct a string _node. /// Construct a string _node.
string constructString(ref Node node) string constructString(ref Node node) @safe
{ {
return node.as!string; return node.as!string;
} }
/// Convert a sequence of single-element mappings into a sequence of pairs. /// Convert a sequence of single-element mappings into a sequence of pairs.
Node.Pair[] getPairs(string type, Node[] nodes) Node.Pair[] getPairs(string type, Node[] nodes) @safe
{ {
Node.Pair[] pairs; Node.Pair[] pairs;
pairs.reserve(nodes.length);
foreach(ref node; nodes) foreach(ref node; nodes)
{ {
enforce(node.isMapping && node.length == 1, enforce(node.isMapping && node.length == 1,
new Exception("While constructing " ~ type ~ new Exception("While constructing " ~ type ~
", expected a mapping with single element")); ", expected a mapping with single element"));
pairs.assumeSafeAppend();
pairs ~= node.as!(Node.Pair[]); pairs ~= node.as!(Node.Pair[]);
} }
@ -693,14 +688,13 @@ Node.Pair[] getPairs(string type, Node[] nodes)
} }
/// Construct an ordered map (ordered sequence of key:value pairs without duplicates) _node. /// Construct an ordered map (ordered sequence of key:value pairs without duplicates) _node.
Node.Pair[] constructOrderedMap(ref Node node) Node.Pair[] constructOrderedMap(ref Node node) @safe
{ {
auto pairs = getPairs("ordered map", node.as!(Node[])); auto pairs = getPairs("ordered map", node.as!(Node[]));
//Detect duplicates. //Detect duplicates.
//TODO this should be replaced by something with deterministic memory allocation. //TODO this should be replaced by something with deterministic memory allocation.
auto keys = redBlackTree!Node(); auto keys = redBlackTree!Node();
scope(exit){keys.destroy();}
foreach(ref pair; pairs) foreach(ref pair; pairs)
{ {
enforce(!(pair.key in keys), enforce(!(pair.key in keys),
@ -710,37 +704,35 @@ Node.Pair[] constructOrderedMap(ref Node node)
} }
return pairs; return pairs;
} }
unittest @safe unittest
{ {
writeln("D:YAML construction ordered map unittest"); writeln("D:YAML construction ordered map unittest");
alias Node.Pair Pair; alias Node.Pair Pair;
Node[] alternateTypes(uint length) Node[] alternateTypes(uint length) @safe
{ {
Node[] pairs; Node[] pairs;
foreach(long i; 0 .. length) foreach(long i; 0 .. length)
{ {
auto pair = (i % 2) ? Pair(i.to!string, i) : Pair(i, i.to!string); auto pair = (i % 2) ? Pair(i.to!string, i) : Pair(i, i.to!string);
pairs.assumeSafeAppend();
pairs ~= Node([pair]); pairs ~= Node([pair]);
} }
return pairs; return pairs;
} }
Node[] sameType(uint length) Node[] sameType(uint length) @safe
{ {
Node[] pairs; Node[] pairs;
foreach(long i; 0 .. length) foreach(long i; 0 .. length)
{ {
auto pair = Pair(i.to!string, i); auto pair = Pair(i.to!string, i);
pairs.assumeSafeAppend();
pairs ~= Node([pair]); pairs ~= Node([pair]);
} }
return pairs; return pairs;
} }
bool hasDuplicates(Node[] nodes) bool hasDuplicates(Node[] nodes) @safe
{ {
auto node = Node(nodes); auto node = Node(nodes);
return null !is collectException(constructOrderedMap(node)); return null !is collectException(constructOrderedMap(node));
@ -755,13 +747,13 @@ unittest
} }
/// Construct a pairs (ordered sequence of key: value pairs allowing duplicates) _node. /// Construct a pairs (ordered sequence of key: value pairs allowing duplicates) _node.
Node.Pair[] constructPairs(ref Node node) Node.Pair[] constructPairs(ref Node node) @safe
{ {
return getPairs("pairs", node.as!(Node[])); return getPairs("pairs", node.as!(Node[]));
} }
/// Construct a set _node. /// Construct a set _node.
Node[] constructSet(ref Node node) Node[] constructSet(ref Node node) @safe
{ {
auto pairs = node.as!(Node.Pair[]); auto pairs = node.as!(Node.Pair[]);
@ -769,28 +761,26 @@ Node[] constructSet(ref Node node)
// memory allocation if possible. // memory allocation if possible.
// Detect duplicates. // Detect duplicates.
ubyte[Node] map; ubyte[Node] map;
scope(exit){map.destroy();}
Node[] nodes; Node[] nodes;
nodes.reserve(pairs.length);
foreach(ref pair; pairs) foreach(ref pair; pairs)
{ {
enforce((pair.key in map) is null, new Exception("Duplicate entry in a set")); enforce((pair.key in map) is null, new Exception("Duplicate entry in a set"));
map[pair.key] = 0; map[pair.key] = 0;
nodes.assumeSafeAppend();
nodes ~= pair.key; nodes ~= pair.key;
} }
return nodes; return nodes;
} }
unittest @safe unittest
{ {
writeln("D:YAML construction set unittest"); writeln("D:YAML construction set unittest");
Node.Pair[] set(uint length) Node.Pair[] set(uint length) @safe
{ {
Node.Pair[] pairs; Node.Pair[] pairs;
foreach(long i; 0 .. length) foreach(long i; 0 .. length)
{ {
pairs.assumeSafeAppend();
pairs ~= Node.Pair(i.to!string, YAMLNull()); pairs ~= Node.Pair(i.to!string, YAMLNull());
} }
@ -827,19 +817,18 @@ unittest
} }
/// Construct a sequence (array) _node. /// Construct a sequence (array) _node.
Node[] constructSequence(ref Node node) Node[] constructSequence(ref Node node) @safe
{ {
return node.as!(Node[]); return node.as!(Node[]);
} }
/// Construct an unordered map (unordered set of key:value _pairs without duplicates) _node. /// Construct an unordered map (unordered set of key:value _pairs without duplicates) _node.
Node.Pair[] constructMap(ref Node node) Node.Pair[] constructMap(ref Node node) @safe
{ {
auto pairs = node.as!(Node.Pair[]); auto pairs = node.as!(Node.Pair[]);
//Detect duplicates. //Detect duplicates.
//TODO this should be replaced by something with deterministic memory allocation. //TODO this should be replaced by something with deterministic memory allocation.
auto keys = redBlackTree!Node(); auto keys = redBlackTree!Node();
scope(exit){keys.destroy();}
foreach(ref pair; pairs) foreach(ref pair; pairs)
{ {
enforce(!(pair.key in keys), enforce(!(pair.key in keys),
@ -868,26 +857,26 @@ struct MyStruct
} }
} }
MyStruct constructMyStructScalar(ref Node node) MyStruct constructMyStructScalar(ref Node node) @safe
{ {
// Guaranteed to be string as we construct from scalar. // Guaranteed to be string as we construct from scalar.
auto parts = node.as!string().split(":"); auto parts = node.as!string().split(":");
return MyStruct(to!int(parts[0]), to!int(parts[1]), to!int(parts[2])); return MyStruct(to!int(parts[0]), to!int(parts[1]), to!int(parts[2]));
} }
MyStruct constructMyStructSequence(ref Node node) MyStruct constructMyStructSequence(ref Node node) @safe
{ {
// node is guaranteed to be sequence. // node is guaranteed to be sequence.
return MyStruct(node[0].as!int, node[1].as!int, node[2].as!int); return MyStruct(node[0].as!int, node[1].as!int, node[2].as!int);
} }
MyStruct constructMyStructMapping(ref Node node) MyStruct constructMyStructMapping(ref Node node) @safe
{ {
// node is guaranteed to be mapping. // node is guaranteed to be mapping.
return MyStruct(node["x"].as!int, node["y"].as!int, node["z"].as!int); return MyStruct(node["x"].as!int, node["y"].as!int, node["z"].as!int);
} }
unittest @safe unittest
{ {
char[] data = "!mystruct 1:2:3".dup; char[] data = "!mystruct 1:2:3".dup;
auto loader = Loader(data); auto loader = Loader(data);
@ -899,7 +888,7 @@ unittest
assert(node.as!MyStruct == MyStruct(1, 2, 3)); assert(node.as!MyStruct == MyStruct(1, 2, 3));
} }
unittest @safe unittest
{ {
char[] data = "!mystruct [1, 2, 3]".dup; char[] data = "!mystruct [1, 2, 3]".dup;
auto loader = Loader(data); auto loader = Loader(data);
@ -911,7 +900,7 @@ unittest
assert(node.as!MyStruct == MyStruct(1, 2, 3)); assert(node.as!MyStruct == MyStruct(1, 2, 3));
} }
unittest @safe unittest
{ {
char[] data = "!mystruct {x: 1, y: 2, z: 3}".dup; char[] data = "!mystruct {x: 1, y: 2, z: 3}".dup;
auto loader = Loader(data); auto loader = Loader(data);

View file

@ -80,20 +80,20 @@ import dyaml.tagdirective;
*/ */
struct Dumper struct Dumper
{ {
unittest @safe unittest
{ {
auto node = Node([1, 2, 3, 4, 5]); auto node = Node([1, 2, 3, 4, 5]);
Dumper(new YMemoryStream()).dump(node); Dumper(new YMemoryStream()).dump(node);
} }
unittest @safe unittest
{ {
auto node1 = Node([1, 2, 3, 4, 5]); auto node1 = Node([1, 2, 3, 4, 5]);
auto node2 = Node("This document contains only one string"); auto node2 = Node("This document contains only one string");
Dumper(new YMemoryStream()).dump(node1, node2); Dumper(new YMemoryStream()).dump(node1, node2);
} }
unittest @safe unittest
{ {
//import std.stream; //import std.stream;
auto stream = new YMemoryStream(); auto stream = new YMemoryStream();
@ -101,7 +101,7 @@ struct Dumper
Dumper(stream).dump(node); Dumper(stream).dump(node);
} }
unittest @safe unittest
{ {
auto node = Node([1, 2, 3, 4, 5]); auto node = Node([1, 2, 3, 4, 5]);
auto representer = new Representer(); auto representer = new Representer();
@ -157,7 +157,7 @@ struct Dumper
* *
* Throws: YAMLException if the file can not be dumped to (e.g. cannot be opened). * Throws: YAMLException if the file can not be dumped to (e.g. cannot be opened).
*/ */
this(string filename) @trusted this(string filename) @safe
{ {
name_ = filename; name_ = filename;
//try{this(new File(filename, FileMode.OutNew));} //try{this(new File(filename, FileMode.OutNew));}
@ -183,7 +183,6 @@ struct Dumper
///Destroy the Dumper. ///Destroy the Dumper.
@trusted ~this() @trusted ~this()
{ {
YAMLVersion_ = null;
if(weOwnStream_) { destroy(stream_); } if(weOwnStream_) { destroy(stream_); }
} }
@ -194,16 +193,14 @@ struct Dumper
} }
///Specify custom Resolver to use. ///Specify custom Resolver to use.
@property void resolver(Resolver resolver) @trusted @property void resolver(Resolver resolver) @safe
{ {
resolver_.destroy();
resolver_ = resolver; resolver_ = resolver;
} }
///Specify custom Representer to use. ///Specify custom Representer to use.
@property void representer(Representer representer) @trusted @property void representer(Representer representer) @safe
{ {
representer_.destroy();
representer_ = representer; representer_ = representer;
} }
@ -290,7 +287,7 @@ struct Dumper
* dumper.dump(Node("foo")); * dumper.dump(Node("foo"));
* -------------------- * --------------------
*/ */
@property void tagDirectives(string[string] tags) pure @trusted @property void tagDirectives(string[string] tags) pure @safe
{ {
TagDirective[] t; TagDirective[] t;
foreach(handle, prefix; tags) foreach(handle, prefix; tags)

View file

@ -184,21 +184,6 @@ struct Emitter
analysis_.flags.isNull = true; analysis_.flags.isNull = true;
} }
///Destroy the emitter.
@trusted ~this()
{
stream_ = null;
states_.destroy();
events_.destroy();
indents_.destroy();
tagDirectives_.destroy();
tagDirectives_ = null;
preparedAnchor_.destroy();
preparedAnchor_ = null;
preparedTag_.destroy();
preparedTag_ = null;
}
///Emit an event. Throws EmitterException on error. ///Emit an event. Throws EmitterException on error.
void emit(Event event) @trusted void emit(Event event) @trusted
{ {
@ -237,20 +222,20 @@ struct Emitter
} }
///Write a string to the file/stream. ///Write a string to the file/stream.
void writeString(const string str) @system void writeString(const char[] str) @safe
{ {
try final switch(encoding_) try final switch(encoding_)
{ {
case Encoding.UTF_8: case Encoding.UTF_8:
stream_.writeExact(str.ptr, str.length * char.sizeof); stream_.writeExact(str);
break; break;
case Encoding.UTF_16: case Encoding.UTF_16:
const buffer = to!wstring(str); const buffer = to!wstring(str);
stream_.writeExact(buffer.ptr, buffer.length * wchar.sizeof); stream_.writeExact(buffer);
break; break;
case Encoding.UTF_32: case Encoding.UTF_32:
const buffer = to!dstring(str); const buffer = to!dstring(str);
stream_.writeExact(buffer.ptr, buffer.length * dchar.sizeof); stream_.writeExact(buffer);
break; break;
} }
catch(Exception e) catch(Exception e)
@ -260,7 +245,7 @@ struct Emitter
} }
///In some cases, we wait for a few next events before emitting. ///In some cases, we wait for a few next events before emitting.
bool needMoreEvents() @trusted nothrow bool needMoreEvents() @safe nothrow
{ {
if(events_.length == 0){return true;} if(events_.length == 0){return true;}
@ -273,7 +258,7 @@ struct Emitter
} }
///Determines if we need specified number of more events. ///Determines if we need specified number of more events.
bool needEvents(in uint count) @system nothrow bool needEvents(in uint count) @safe nothrow
{ {
int level = 0; int level = 0;
@ -315,7 +300,7 @@ struct Emitter
} }
///Determines if the type of current event is as specified. Throws if no event. ///Determines if the type of current event is as specified. Throws if no event.
bool eventTypeIs(in EventID id) const pure @trusted bool eventTypeIs(in EventID id) const pure @safe
{ {
enforce(!event_.isNull, enforce(!event_.isNull,
new Error("Expected an event, but no event is available.")); new Error("Expected an event, but no event is available."));
@ -340,7 +325,7 @@ struct Emitter
} }
///Expect nothing, throwing if we still have something. ///Expect nothing, throwing if we still have something.
void expectNothing() const @trusted void expectNothing() const @safe
{ {
throw new Error("Expected nothing, but got " ~ event_.idString); throw new Error("Expected nothing, but got " ~ event_.idString);
} }
@ -452,7 +437,7 @@ struct Emitter
} }
///Handle a new node. Context specifies where in the document we are. ///Handle a new node. Context specifies where in the document we are.
void expectNode(const Context context) @trusted void expectNode(const Context context) @safe
{ {
context_ = context; context_ = context;
@ -504,7 +489,7 @@ struct Emitter
} }
///Handle a scalar. ///Handle a scalar.
void expectScalar() @trusted void expectScalar() @safe
{ {
increaseIndent(Yes.flow); increaseIndent(Yes.flow);
processScalar(); processScalar();
@ -835,7 +820,7 @@ struct Emitter
} }
///Determine style to write the current scalar in. ///Determine style to write the current scalar in.
ScalarStyle chooseScalarStyle() @trusted ScalarStyle chooseScalarStyle() @safe
{ {
if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);} if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);}
@ -878,7 +863,7 @@ struct Emitter
} }
///Prepare YAML version string for output. ///Prepare YAML version string for output.
static string prepareVersion(const string YAMLVersion) @trusted static string prepareVersion(const string YAMLVersion) @safe
{ {
enforce(YAMLVersion.split(".")[0] == "1", enforce(YAMLVersion.split(".")[0] == "1",
new Error("Unsupported YAML version: " ~ YAMLVersion)); new Error("Unsupported YAML version: " ~ YAMLVersion));
@ -886,7 +871,7 @@ struct Emitter
} }
///Encode an Unicode character for tag directive and write it to writer. ///Encode an Unicode character for tag directive and write it to writer.
static void encodeChar(Writer)(ref Writer writer, in dchar c) @trusted static void encodeChar(Writer)(ref Writer writer, in dchar c) @safe
{ {
char[4] data; char[4] data;
const bytes = encode(data, c); const bytes = encode(data, c);
@ -898,7 +883,7 @@ struct Emitter
} }
///Prepare tag directive handle for output. ///Prepare tag directive handle for output.
static string prepareTagHandle(const string handle) @trusted static string prepareTagHandle(const string handle) @safe
{ {
enforce(handle !is null && handle != "", enforce(handle !is null && handle != "",
new Error("Tag handle must not be empty")); new Error("Tag handle must not be empty"));
@ -913,7 +898,7 @@ struct Emitter
} }
///Prepare tag directive prefix for output. ///Prepare tag directive prefix for output.
static string prepareTagPrefix(const string prefix) @trusted static string prepareTagPrefix(const string prefix) @safe
{ {
enforce(prefix !is null && prefix != "", enforce(prefix !is null && prefix != "",
new Error("Tag prefix must not be empty")); new Error("Tag prefix must not be empty"));
@ -944,7 +929,7 @@ struct Emitter
} }
///Prepare tag for output. ///Prepare tag for output.
string prepareTag(in string tag) @trusted string prepareTag(in string tag) @safe
{ {
enforce(tag !is null, new Error("Tag must not be empty")); enforce(tag !is null, new Error("Tag must not be empty"));
@ -991,7 +976,7 @@ struct Emitter
} }
///Prepare anchor for output. ///Prepare anchor for output.
static string prepareAnchor(const string anchor) @trusted static string prepareAnchor(const string anchor) @safe
{ {
enforce(anchor != "", enforce(anchor != "",
new Error("Anchor must not be empty")); new Error("Anchor must not be empty"));
@ -1172,7 +1157,7 @@ struct Emitter
//Writers. //Writers.
///Start the YAML stream (write the unicode byte order mark). ///Start the YAML stream (write the unicode byte order mark).
void writeStreamStart() @system void writeStreamStart() @safe
{ {
immutable(ubyte)[] bom; immutable(ubyte)[] bom;
//Write BOM (except for UTF-8) //Write BOM (except for UTF-8)
@ -1196,13 +1181,13 @@ struct Emitter
} }
///End the YAML stream. ///End the YAML stream.
void writeStreamEnd() @system {stream_.flush();} void writeStreamEnd() @safe {stream_.flush();}
///Write an indicator (e.g. ":", "[", ">", etc.). ///Write an indicator (e.g. ":", "[", ">", etc.).
void writeIndicator(const string indicator, void writeIndicator(const char[] indicator,
const Flag!"needWhitespace" needWhitespace, const Flag!"needWhitespace" needWhitespace,
const Flag!"whitespace" whitespace = No.whitespace, const Flag!"whitespace" whitespace = No.whitespace,
const Flag!"indentation" indentation = No.indentation) @system const Flag!"indentation" indentation = No.indentation) @safe
{ {
const bool prefixSpace = !whitespace_ && needWhitespace; const bool prefixSpace = !whitespace_ && needWhitespace;
whitespace_ = whitespace; whitespace_ = whitespace;
@ -1218,7 +1203,7 @@ struct Emitter
} }
///Write indentation. ///Write indentation.
void writeIndent() @system void writeIndent() @safe
{ {
const indent = indent_ == -1 ? 0 : indent_; const indent = indent_ == -1 ? 0 : indent_;
@ -1244,7 +1229,7 @@ struct Emitter
} }
///Start new line. ///Start new line.
void writeLineBreak(const string data = null) @system void writeLineBreak(const char[] data = null) @safe
{ {
whitespace_ = indentation_ = true; whitespace_ = indentation_ = true;
++line_; ++line_;
@ -1253,7 +1238,7 @@ struct Emitter
} }
///Write a YAML version directive. ///Write a YAML version directive.
void writeVersionDirective(const string versionText) @system void writeVersionDirective(const string versionText) @safe
{ {
writeString("%YAML "); writeString("%YAML ");
writeString(versionText); writeString(versionText);
@ -1261,7 +1246,7 @@ struct Emitter
} }
///Write a tag directive. ///Write a tag directive.
void writeTagDirective(const string handle, const string prefix) @system void writeTagDirective(const string handle, const string prefix) @safe
{ {
writeString("%TAG "); writeString("%TAG ");
writeString(handle); writeString(handle);
@ -1319,14 +1304,8 @@ struct ScalarWriter
split_ = split; split_ = split;
} }
///Destroy the ScalarWriter.
@trusted nothrow ~this()
{
text_ = null;
}
///Write text as single quoted scalar. ///Write text as single quoted scalar.
void writeSingleQuoted() @system void writeSingleQuoted() @safe
{ {
emitter_.writeIndicator("\'", Yes.needWhitespace); emitter_.writeIndicator("\'", Yes.needWhitespace);
spaces_ = breaks_ = false; spaces_ = breaks_ = false;
@ -1376,7 +1355,7 @@ struct ScalarWriter
} }
///Write text as double quoted scalar. ///Write text as double quoted scalar.
void writeDoubleQuoted() @system void writeDoubleQuoted() @safe
{ {
resetTextPosition(); resetTextPosition();
emitter_.writeIndicator("\"", Yes.needWhitespace); emitter_.writeIndicator("\"", Yes.needWhitespace);
@ -1438,7 +1417,7 @@ struct ScalarWriter
} }
///Write text as folded block scalar. ///Write text as folded block scalar.
void writeFolded() @system void writeFolded() @safe
{ {
initBlock('>'); initBlock('>');
bool leadingSpace = true; bool leadingSpace = true;
@ -1484,7 +1463,7 @@ struct ScalarWriter
} }
///Write text as literal block scalar. ///Write text as literal block scalar.
void writeLiteral() @system void writeLiteral() @safe
{ {
initBlock('|'); initBlock('|');
breaks_ = true; breaks_ = true;
@ -1511,7 +1490,7 @@ struct ScalarWriter
} }
///Write text as plain scalar. ///Write text as plain scalar.
void writePlain() @system void writePlain() @safe
{ {
if(emitter_.context_ == Emitter.Context.Root){emitter_.openEnded_ = true;} if(emitter_.context_ == Emitter.Context.Root){emitter_.openEnded_ = true;}
if(text_ == ""){return;} if(text_ == ""){return;}
@ -1588,7 +1567,7 @@ struct ScalarWriter
} }
///Determine hints (indicators) for block scalar. ///Determine hints (indicators) for block scalar.
size_t determineBlockHints(char[] hints, uint bestIndent) const pure @trusted size_t determineBlockHints(char[] hints, uint bestIndent) const pure @safe
{ {
size_t hintsIdx = 0; size_t hintsIdx = 0;
if(text_.length == 0){return hintsIdx;} if(text_.length == 0){return hintsIdx;}
@ -1619,12 +1598,12 @@ struct ScalarWriter
} }
///Initialize for block scalar writing with specified indicator. ///Initialize for block scalar writing with specified indicator.
void initBlock(const char indicator) @system void initBlock(const char indicator) @safe
{ {
char[4] hints; char[4] hints;
hints[0] = indicator; hints[0] = indicator;
const hintsLength = 1 + determineBlockHints(hints[1 .. $], emitter_.bestIndent_); const hintsLength = 1 + determineBlockHints(hints[1 .. $], emitter_.bestIndent_);
emitter_.writeIndicator(cast(string)hints[0 .. hintsLength], Yes.needWhitespace); emitter_.writeIndicator(hints[0 .. hintsLength], Yes.needWhitespace);
if(hints.length > 0 && hints[$ - 1] == '+') if(hints.length > 0 && hints[$ - 1] == '+')
{ {
emitter_.openEnded_ = true; emitter_.openEnded_ = true;
@ -1633,7 +1612,7 @@ struct ScalarWriter
} }
///Write out the current text range. ///Write out the current text range.
void writeCurrentRange(const Flag!"UpdateColumn" updateColumn) @system void writeCurrentRange(const Flag!"UpdateColumn" updateColumn) @safe
{ {
emitter_.writeString(text_[startByte_ .. endByte_]); emitter_.writeString(text_[startByte_ .. endByte_]);
if(updateColumn){emitter_.column_ += endChar_ - startChar_;} if(updateColumn){emitter_.column_ += endChar_ - startChar_;}
@ -1641,7 +1620,7 @@ struct ScalarWriter
} }
///Write line breaks in the text range. ///Write line breaks in the text range.
void writeLineBreaks() @system void writeLineBreaks() @safe
{ {
foreach(const dchar br; text_[startByte_ .. endByte_]) foreach(const dchar br; text_[startByte_ .. endByte_])
{ {
@ -1650,20 +1629,20 @@ struct ScalarWriter
{ {
char[4] brString; char[4] brString;
const bytes = encode(brString, br); const bytes = encode(brString, br);
emitter_.writeLineBreak(cast(string)brString[0 .. bytes]); emitter_.writeLineBreak(brString[0 .. bytes]);
} }
} }
updateRangeStart(); updateRangeStart();
} }
///Write line break if start of the text range is a newline. ///Write line break if start of the text range is a newline.
void writeStartLineBreak() @system void writeStartLineBreak() @safe
{ {
if(charAtStart == '\n'){emitter_.writeLineBreak();} if(charAtStart == '\n'){emitter_.writeLineBreak();}
} }
///Write indentation, optionally resetting whitespace/indentation flags. ///Write indentation, optionally resetting whitespace/indentation flags.
void writeIndent(const Flag!"ResetSpace" resetSpace) @system void writeIndent(const Flag!"ResetSpace" resetSpace) @safe
{ {
emitter_.writeIndent(); emitter_.writeIndent();
if(resetSpace) if(resetSpace)
@ -1680,7 +1659,7 @@ struct ScalarWriter
} }
///Update the line breaks_ flag, optionally updating the spaces_ flag. ///Update the line breaks_ flag, optionally updating the spaces_ flag.
void updateBreaks(in dchar c, const Flag!"UpdateSpaces" updateSpaces) pure @trusted void updateBreaks(in dchar c, const Flag!"UpdateSpaces" updateSpaces) pure @safe
{ {
if(c == dcharNone){return;} if(c == dcharNone){return;}
breaks_ = newlineSearch_.canFind(c); breaks_ = newlineSearch_.canFind(c);

View file

@ -89,10 +89,10 @@ struct Event
CollectionStyle collectionStyle = CollectionStyle.Invalid; CollectionStyle collectionStyle = CollectionStyle.Invalid;
///Is this a null (uninitialized) event? ///Is this a null (uninitialized) event?
@property bool isNull() const pure @system nothrow {return id == EventID.Invalid;} @property bool isNull() const pure @safe nothrow {return id == EventID.Invalid;}
///Get string representation of the token ID. ///Get string representation of the token ID.
@property string idString() const @system {return to!string(id);} @property string idString() const @safe {return to!string(id);}
static assert(Event.sizeof <= 64, "Event struct larger than expected"); static assert(Event.sizeof <= 64, "Event struct larger than expected");
} }
@ -105,7 +105,7 @@ struct Event
* anchor = Anchor, if this is an alias event. * anchor = Anchor, if this is an alias event.
*/ */
Event event(EventID id)(const Mark start, const Mark end, const string anchor = null) Event event(EventID id)(const Mark start, const Mark end, const string anchor = null)
pure @trusted nothrow @trusted
{ {
Event result; Event result;
result.startMark = start; result.startMark = start;
@ -149,7 +149,7 @@ Event collectionStartEvent(EventID id)
* encoding = Encoding of the stream. * encoding = Encoding of the stream.
*/ */
Event streamStartEvent(const Mark start, const Mark end, const Encoding encoding) Event streamStartEvent(const Mark start, const Mark end, const Encoding encoding)
pure @trusted nothrow pure @safe nothrow
{ {
Event result; Event result;
result.startMark = start; result.startMark = start;
@ -198,7 +198,7 @@ Event documentStartEvent(const Mark start, const Mark end, const bool explicit,
* end = End position of the event in the file/stream. * end = End position of the event in the file/stream.
* explicit = Is this an explicit document end? * explicit = Is this an explicit document end?
*/ */
Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pure @trusted nothrow Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pure @safe nothrow
{ {
Event result; Event result;
result.startMark = start; result.startMark = start;
@ -219,17 +219,15 @@ Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pu
/// style = Scalar style. /// style = Scalar style.
Event scalarEvent(const Mark start, const Mark end, const string anchor, const string tag, Event scalarEvent(const Mark start, const Mark end, const string anchor, const string tag,
const Tuple!(bool, bool) implicit, const string value, const Tuple!(bool, bool) implicit, const string value,
const ScalarStyle style = ScalarStyle.Invalid) @safe pure nothrow @nogc const ScalarStyle style = ScalarStyle.Invalid) @trusted pure nothrow @nogc
{ {
Event result; Event result;
result.value = value; result.value = value;
result.startMark = start; result.startMark = start;
result.endMark = end; result.endMark = end;
() @trusted { result.anchor = anchor;
result.anchor = anchor; result.tag = tag;
result.tag = tag;
}();
result.id = EventID.Scalar; result.id = EventID.Scalar;
result.scalarStyle = style; result.scalarStyle = style;

View file

@ -35,7 +35,7 @@ template FastCharSearch(dstring chars, uint tableSize = 256)
} }
/// Generate the search table and the canFind method. /// Generate the search table and the canFind method.
string searchCode(dstring chars, uint tableSize)() @safe pure //nothrow string searchCode(dstring chars, uint tableSize)()
{ {
import std.string; import std.string;
@ -87,3 +87,9 @@ string searchCode(dstring chars, uint tableSize)() @safe pure //nothrow
return code; return code;
} }
@safe unittest
{
mixin FastCharSearch!("+", 128) search;
assert(search.canFind('+'));
}

View file

@ -72,7 +72,7 @@ struct Flags(names ...) if(names.length <= 8)
///Flag accessors. ///Flag accessors.
mixin(flags(names)); mixin(flags(names));
} }
unittest @safe unittest
{ {
import std.stdio; import std.stdio;
writeln("Flags unittest"); writeln("Flags unittest");

View file

@ -39,7 +39,7 @@ ScalarStyle scalarStyleHack(ref const(Node) node) @safe nothrow
assert(node.isScalar, "Trying to get scalar style of a non-scalar node"); assert(node.isScalar, "Trying to get scalar style of a non-scalar node");
return node.scalarStyle; return node.scalarStyle;
} }
unittest @safe unittest
{ {
writeln("D:YAML scalarStyleHack getter unittest"); writeln("D:YAML scalarStyleHack getter unittest");
auto node = Node(5); auto node = Node(5);
@ -57,7 +57,7 @@ CollectionStyle collectionStyleHack(ref const(Node) node) @safe nothrow
assert(!node.isScalar, "Trying to get collection style of a scalar node"); assert(!node.isScalar, "Trying to get collection style of a scalar node");
return node.collectionStyle; return node.collectionStyle;
} }
unittest @safe unittest
{ {
writeln("D:YAML collectionStyleHack getter unittest"); writeln("D:YAML collectionStyleHack getter unittest");
auto node = Node([1, 2, 3, 4, 5]); auto node = Node([1, 2, 3, 4, 5]);
@ -77,7 +77,7 @@ void scalarStyleHack(ref Node node, const ScalarStyle rhs) @safe nothrow
node.scalarStyle = rhs; node.scalarStyle = rhs;
} }
/// ///
unittest @safe unittest
{ {
writeln("D:YAML scalarStyleHack setter unittest"); writeln("D:YAML scalarStyleHack setter unittest");
auto node = Node(5); auto node = Node(5);
@ -97,7 +97,7 @@ void collectionStyleHack(ref Node node, const CollectionStyle rhs) @safe nothrow
node.collectionStyle = rhs; node.collectionStyle = rhs;
} }
/// ///
unittest @safe unittest
{ {
writeln("D:YAML collectionStyleHack setter unittest"); writeln("D:YAML collectionStyleHack setter unittest");
auto node = Node([1, 2, 3, 4, 5]); auto node = Node([1, 2, 3, 4, 5]);

View file

@ -157,9 +157,9 @@ struct Loader
return Loader(cast(ubyte[])data); return Loader(cast(ubyte[])data);
} }
/// ///
unittest @safe unittest
{ {
assert(Loader.fromString(cast(char[])"42").load().as!int == 42); assert(Loader.fromString("42".dup).load().as!int == 42);
} }
/** Construct a Loader to load YAML from a buffer. /** Construct a Loader to load YAML from a buffer.
@ -265,12 +265,11 @@ struct Loader
* *
* Throws: YAMLException on a parsing error. * Throws: YAMLException on a parsing error.
*/ */
Node[] loadAll() @trusted Node[] loadAll() @safe
{ {
Node[] nodes; Node[] nodes;
foreach(ref node; this) foreach(ref node; this)
{ {
nodes.assumeSafeAppend();
nodes ~= node; nodes ~= node;
} }
return nodes; return nodes;
@ -316,14 +315,13 @@ struct Loader
package: package:
// Scan and return all tokens. Used for debugging. // Scan and return all tokens. Used for debugging.
Token[] scan() @trusted Token[] scan() @safe
{ {
try try
{ {
Token[] result; Token[] result;
while(scanner_.checkToken()) while(scanner_.checkToken())
{ {
result.assumeSafeAppend();
result ~= scanner_.getToken(); result ~= scanner_.getToken();
} }
return result; return result;
@ -378,7 +376,7 @@ struct Loader
} }
} }
unittest @safe unittest
{ {
char[] yaml_input = ("red: '#ff0000'\n" ~ char[] yaml_input = ("red: '#ff0000'\n" ~
"green: '#00ff00'\n" ~ "green: '#00ff00'\n" ~

View file

@ -112,7 +112,7 @@ package class YAMLContainer(T) if (!Node.allowed!T): YAMLObject
private: private:
// Construct a YAMLContainer holding specified value. // Construct a YAMLContainer holding specified value.
this(T value) @trusted {value_ = value;} this(T value) @safe {value_ = value;}
} }
@ -127,7 +127,7 @@ private struct Pair
public: public:
/// Construct a Pair from two values. Will be converted to Nodes if needed. /// Construct a Pair from two values. Will be converted to Nodes if needed.
this(K, V)(K key, V value) @safe this(K, V)(K key, V value)
{ {
static if(is(Unqual!K == Node)){this.key = key;} static if(is(Unqual!K == Node)){this.key = key;}
else {this.key = Node(key);} else {this.key = Node(key);}
@ -265,7 +265,7 @@ struct Node
} }
/// Ditto. /// Ditto.
// Overload for types where we can make this nothrow. // Overload for types where we can make this nothrow.
this(T)(T value, const string tag = null) @trusted pure nothrow this(T)(T value, const string tag = null) @trusted
if(scalarCtorNothrow!T) if(scalarCtorNothrow!T)
{ {
tag_ = tag; tag_ = tag;
@ -276,7 +276,7 @@ struct Node
// User defined type or plain string. // User defined type or plain string.
else { value_ = Value(value);} else { value_ = Value(value);}
} }
unittest @safe unittest
{ {
{ {
auto node = Node(42); auto node = Node(42);
@ -343,7 +343,7 @@ struct Node
value_ = Value(nodes); value_ = Value(nodes);
} }
} }
unittest @safe unittest
{ {
with(Node([1, 2, 3])) with(Node([1, 2, 3]))
{ {
@ -393,7 +393,7 @@ struct Node
foreach(key, ref value; array){pairs ~= Pair(key, value);} foreach(key, ref value; array){pairs ~= Pair(key, value);}
value_ = Value(pairs); value_ = Value(pairs);
} }
unittest @safe unittest
{ {
int[string] aa; int[string] aa;
aa["1"] = 1; aa["1"] = 1;
@ -465,7 +465,7 @@ struct Node
foreach(i; 0 .. keys.length){pairs ~= Pair(keys[i], values[i]);} foreach(i; 0 .. keys.length){pairs ~= Pair(keys[i], values[i]);}
value_ = Value(pairs); value_ = Value(pairs);
} }
unittest @safe unittest
{ {
with(Node(["1", "2"], [1, 2])) with(Node(["1", "2"], [1, 2]))
{ {
@ -535,12 +535,12 @@ struct Node
* *
* Returns: true if equal, false otherwise. * Returns: true if equal, false otherwise.
*/ */
bool opEquals(T)(const auto ref T rhs) const @safe bool opEquals(T)(const auto ref T rhs) const
{ {
return equals!(Yes.useTag)(rhs); return equals!(Yes.useTag)(rhs);
} }
/// ///
unittest @safe unittest
{ {
auto node = Node(42); auto node = Node(42);
@ -671,7 +671,7 @@ struct Node
} }
assert(false, "This code should never be reached"); assert(false, "This code should never be reached");
} }
unittest @safe unittest
{ {
assertThrown!NodeException(Node("42").get!int); assertThrown!NodeException(Node("42").get!int);
Node(YAMLNull()).get!YAMLNull; Node(YAMLNull()).get!YAMLNull;
@ -807,7 +807,7 @@ struct Node
throw new Error("Trying to index a " ~ nodeTypeString ~ " node", startMark_); throw new Error("Trying to index a " ~ nodeTypeString ~ " node", startMark_);
} }
/// ///
unittest @safe unittest
{ {
writeln("D:YAML Node opIndex unittest"); writeln("D:YAML Node opIndex unittest");
alias Node.Value Value; alias Node.Value Value;
@ -821,7 +821,7 @@ struct Node
assert(nmap["11"].as!int == 11); assert(nmap["11"].as!int == 11);
assert(nmap["14"].as!int == 14); assert(nmap["14"].as!int == 14);
} }
unittest @safe unittest
{ {
writeln("D:YAML Node opIndex unittest"); writeln("D:YAML Node opIndex unittest");
alias Node.Value Value; alias Node.Value Value;
@ -856,7 +856,7 @@ struct Node
* *
* Throws: NodeException if the node is not a collection. * Throws: NodeException if the node is not a collection.
*/ */
bool contains(T)(T rhs) const @safe bool contains(T)(T rhs) const
{ {
return contains_!(T, No.key, "contains")(rhs); return contains_!(T, No.key, "contains")(rhs);
} }
@ -870,13 +870,13 @@ struct Node
* *
* Throws: NodeException if the node is not a mapping. * Throws: NodeException if the node is not a mapping.
*/ */
bool containsKey(T)(T rhs) const @safe bool containsKey(T)(T rhs) const
{ {
return contains_!(T, Yes.key, "containsKey")(rhs); return contains_!(T, Yes.key, "containsKey")(rhs);
} }
// Unittest for contains() and containsKey(). // Unittest for contains() and containsKey().
unittest @safe unittest
{ {
writeln("D:YAML Node contains/containsKey unittest"); writeln("D:YAML Node contains/containsKey unittest");
auto seq = Node([1, 2, 3, 4, 5]); auto seq = Node([1, 2, 3, 4, 5]);
@ -946,7 +946,7 @@ struct Node
collectionStyle = rhs.collectionStyle; collectionStyle = rhs.collectionStyle;
} }
// Unittest for opAssign(). // Unittest for opAssign().
unittest @safe unittest
{ {
auto seq = Node([1, 2, 3, 4, 5]); auto seq = Node([1, 2, 3, 4, 5]);
auto assigned = seq; auto assigned = seq;
@ -1007,7 +1007,7 @@ struct Node
throw new Error("Trying to index a " ~ nodeTypeString ~ " node", startMark_); throw new Error("Trying to index a " ~ nodeTypeString ~ " node", startMark_);
} }
unittest @safe unittest
{ {
writeln("D:YAML Node opIndexAssign unittest"); writeln("D:YAML Node opIndexAssign unittest");
@ -1048,7 +1048,7 @@ struct Node
* Throws: NodeException if the node is not a sequence or an element * Throws: NodeException if the node is not a sequence or an element
* could not be converted to specified type. * could not be converted to specified type.
*/ */
auto sequence(T = Node)() @trusted auto sequence(T = Node)()
{ {
enforce(isSequence, enforce(isSequence,
new Error("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node", new Error("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node",
@ -1118,7 +1118,7 @@ struct Node
} }
return Range(get!(Node[])); return Range(get!(Node[]));
} }
unittest @safe unittest
{ {
writeln("D:YAML Node sequence unittest"); writeln("D:YAML Node sequence unittest");
@ -1137,7 +1137,7 @@ struct Node
* Throws: NodeException if the node is not a mapping. * Throws: NodeException if the node is not a mapping.
* *
*/ */
auto mapping() @trusted auto mapping() @safe
{ {
enforce(isMapping, enforce(isMapping,
new Error("Trying to 'mapping'-iterate over a " new Error("Trying to 'mapping'-iterate over a "
@ -1147,46 +1147,46 @@ struct Node
Node.Pair[] pairs; Node.Pair[] pairs;
size_t position; size_t position;
this(Node.Pair[] pairs) this(Node.Pair[] pairs) @safe
{ {
this.pairs = pairs; this.pairs = pairs;
position = 0; position = 0;
} }
/* Input range functionality. */ /* Input range functionality. */
bool empty() { return position >= pairs.length; } bool empty() @safe { return position >= pairs.length; }
void popFront() void popFront() @safe
{ {
enforce(!empty, "Attempted to popFront an empty mapping"); enforce(!empty, "Attempted to popFront an empty mapping");
position++; position++;
} }
Pair front() Pair front() @safe
{ {
enforce(!empty, "Attempted to take the front of an empty mapping"); enforce(!empty, "Attempted to take the front of an empty mapping");
return pairs[position]; return pairs[position];
} }
/* Forward range functionality. */ /* Forward range functionality. */
Range save() { return this; } Range save() @safe { return this; }
/* Bidirectional range functionality. */ /* Bidirectional range functionality. */
void popBack() void popBack() @safe
{ {
enforce(!empty, "Attempted to popBack an empty mapping"); enforce(!empty, "Attempted to popBack an empty mapping");
pairs = pairs[0 .. $ - 1]; pairs = pairs[0 .. $ - 1];
} }
Pair back() Pair back() @safe
{ {
enforce(!empty, "Attempted to take the back of an empty mapping"); enforce(!empty, "Attempted to take the back of an empty mapping");
return pairs[$ - 1]; return pairs[$ - 1];
} }
/* Random-access range functionality. */ /* Random-access range functionality. */
size_t length() const @property { return pairs.length; } size_t length() const @property @safe { return pairs.length; }
Pair opIndex(size_t index) { return pairs[index]; } Pair opIndex(size_t index) @safe { return pairs[index]; }
static assert(isInputRange!Range); static assert(isInputRange!Range);
static assert(isForwardRange!Range); static assert(isForwardRange!Range);
@ -1195,7 +1195,7 @@ struct Node
} }
return Range(get!(Node.Pair[])); return Range(get!(Node.Pair[]));
} }
unittest @safe unittest
{ {
writeln("D:YAML Node mapping unittest"); writeln("D:YAML Node mapping unittest");
@ -1222,7 +1222,7 @@ struct Node
* Throws: NodeException if the nodes is not a mapping or an element * Throws: NodeException if the nodes is not a mapping or an element
* could not be converted to specified type. * could not be converted to specified type.
*/ */
auto mappingKeys(K = Node)() @trusted auto mappingKeys(K = Node)()
{ {
enforce(isMapping, enforce(isMapping,
new Error("Trying to 'mappingKeys'-iterate over a " new Error("Trying to 'mappingKeys'-iterate over a "
@ -1232,7 +1232,7 @@ struct Node
else else
return mapping.map!(pair => pair.key.as!K); return mapping.map!(pair => pair.key.as!K);
} }
unittest @safe unittest
{ {
writeln("D:YAML Node mappingKeys unittest"); writeln("D:YAML Node mappingKeys unittest");
@ -1252,7 +1252,7 @@ struct Node
* Throws: NodeException if the nodes is not a mapping or an element * Throws: NodeException if the nodes is not a mapping or an element
* could not be converted to specified type. * could not be converted to specified type.
*/ */
auto mappingValues(V = Node)() @trusted auto mappingValues(V = Node)()
{ {
enforce(isMapping, enforce(isMapping,
new Error("Trying to 'mappingValues'-iterate over a " new Error("Trying to 'mappingValues'-iterate over a "
@ -1262,7 +1262,7 @@ struct Node
else else
return mapping.map!(pair => pair.value.as!V); return mapping.map!(pair => pair.value.as!V);
} }
unittest @safe unittest
{ {
writeln("D:YAML Node mappingValues unittest"); writeln("D:YAML Node mappingValues unittest");
@ -1304,7 +1304,7 @@ struct Node
} }
return result; return result;
} }
unittest @system unittest
{ {
writeln("D:YAML Node opApply unittest 1"); writeln("D:YAML Node opApply unittest 1");
@ -1372,7 +1372,7 @@ struct Node
} }
return result; return result;
} }
unittest @safe unittest
{ {
writeln("D:YAML Node opApply unittest 2"); writeln("D:YAML Node opApply unittest 2");
@ -1447,7 +1447,7 @@ struct Node
else {nodes ~= Node(value);} else {nodes ~= Node(value);}
value_ = Value(nodes); value_ = Value(nodes);
} }
unittest @safe unittest
{ {
writeln("D:YAML Node add unittest 1"); writeln("D:YAML Node add unittest 1");
@ -1484,7 +1484,7 @@ struct Node
pairs ~= Pair(key, value); pairs ~= Pair(key, value);
value_ = Value(pairs); value_ = Value(pairs);
} }
unittest @safe unittest
{ {
writeln("D:YAML Node add unittest 2"); writeln("D:YAML Node add unittest 2");
with(Node([1, 2], [3, 4])) with(Node([1, 2], [3, 4]))
@ -1508,7 +1508,7 @@ struct Node
* *
* See_Also: contains * See_Also: contains
*/ */
Node* opBinaryRight(string op, K)(K key) @system Node* opBinaryRight(string op, K)(K key)
if (op == "in") if (op == "in")
{ {
enforce(isMapping, new Error("Trying to use 'in' on a " ~ enforce(isMapping, new Error("Trying to use 'in' on a " ~
@ -1524,7 +1524,7 @@ struct Node
return &(get!(Node.Pair[])[idx].value); return &(get!(Node.Pair[])[idx].value);
} }
} }
unittest @safe unittest
{ {
writeln(`D:YAML Node opBinaryRight!"in" unittest`); writeln(`D:YAML Node opBinaryRight!"in" unittest`);
auto mapping = Node(["foo", "baz"], ["bar", "qux"]); auto mapping = Node(["foo", "baz"], ["bar", "qux"]);
@ -1553,7 +1553,7 @@ struct Node
{ {
remove_!(T, No.key, "remove")(rhs); remove_!(T, No.key, "remove")(rhs);
} }
unittest @safe unittest
{ {
writeln("D:YAML Node remove unittest"); writeln("D:YAML Node remove unittest");
with(Node([1, 2, 3, 4, 3])) with(Node([1, 2, 3, 4, 3]))
@ -1601,7 +1601,7 @@ struct Node
{ {
remove_!(T, Yes.key, "removeAt")(index); remove_!(T, Yes.key, "removeAt")(index);
} }
unittest @safe unittest
{ {
writeln("D:YAML Node removeAt unittest"); writeln("D:YAML Node removeAt unittest");
with(Node([1, 2, 3, 4, 3])) with(Node([1, 2, 3, 4, 3]))
@ -1632,13 +1632,13 @@ struct Node
} }
// Compute hash of the node. // Compute hash of the node.
hash_t toHash() nothrow const hash_t toHash() nothrow const @trusted
{ {
const tagHash = (tag_ is null) ? 0 : tag_.hashOf(); const valueHash = value_.toHash();
// Variant toHash is not const at the moment, so we need to const-cast.
return tagHash + value_.toHash(); return tag_ is null ? valueHash : tag_.hashOf(valueHash);
} }
unittest @safe unittest
{ {
writeln("Node(42).toHash(): ", Node(42).toHash()); writeln("Node(42).toHash(): ", Node(42).toHash());
} }
@ -1668,13 +1668,13 @@ struct Node
} }
// Construct Node.Value from user defined type. // Construct Node.Value from user defined type.
static Value userValue(T)(T value) @trusted nothrow static Value userValue(T)(T value) @trusted
{ {
return Value(cast(YAMLObject)new YAMLContainer!T(value)); return Value(cast(YAMLObject)new YAMLContainer!T(value));
} }
// Construct Node.Value from a type it can store directly (after casting if needed) // Construct Node.Value from a type it can store directly (after casting if needed)
static Value value(T)(T value) @system nothrow if(allowed!T) static Value value(T)(T value) if(allowed!T)
{ {
static if(Value.allowed!T) static if(Value.allowed!T)
{ {
@ -1702,7 +1702,7 @@ struct Node
// Equality test with any value. // Equality test with any value.
// //
// useTag determines whether or not to consider tags in node-node comparisons. // useTag determines whether or not to consider tags in node-node comparisons.
bool equals(Flag!"useTag" useTag, T)(ref T rhs) const @safe bool equals(Flag!"useTag" useTag, T)(ref T rhs) const
{ {
static if(is(Unqual!T == Node)) static if(is(Unqual!T == Node))
{ {
@ -1843,7 +1843,7 @@ struct Node
// Params: level = Level of the node in the tree. // Params: level = Level of the node in the tree.
// //
// Returns: String representing the node tree. // Returns: String representing the node tree.
@property string debugString(uint level = 0) @trusted @property string debugString(uint level = 0) @safe
{ {
string indent; string indent;
foreach(i; 0 .. level){indent ~= " ";} foreach(i; 0 .. level){indent ~= " ";}
@ -1879,17 +1879,16 @@ struct Node
} }
// Get type of the node value (YAMLObject for user types). // Get type of the node value (YAMLObject for user types).
@property TypeInfo type() const @trusted nothrow @property TypeInfo type() const @safe nothrow
{ {
alias TypeInfo delegate() const nothrow nothrowType; return value_.type;
return (cast(nothrowType)&value_.type)();
} }
public: public:
// Determine if the value stored by the node is of specified type. // Determine if the value stored by the node is of specified type.
// //
// This only works for default YAML types, not for user defined types. // This only works for default YAML types, not for user defined types.
@property bool isType(T)() const @safe nothrow @property bool isType(T)() const
{ {
return this.type is typeid(Unqual!T); return this.type is typeid(Unqual!T);
} }
@ -1928,7 +1927,7 @@ struct Node
} }
// Determine if the value can be converted to specified type. // Determine if the value can be converted to specified type.
@property bool convertsTo(T)() const @safe nothrow @property bool convertsTo(T)() const
{ {
if(isType!T){return true;} if(isType!T){return true;}
@ -1963,7 +1962,7 @@ struct Node
} }
// Implementation of remove() and removeAt() // Implementation of remove() and removeAt()
void remove_(T, Flag!"key" key, string func)(T rhs) @system void remove_(T, Flag!"key" key, string func)(T rhs)
{ {
enforce(isSequence || isMapping, enforce(isSequence || isMapping,
new Error("Trying to " ~ func ~ "() from a " ~ nodeTypeString ~ " node", new Error("Trying to " ~ func ~ "() from a " ~ nodeTypeString ~ " node",
@ -2083,7 +2082,7 @@ package:
// //
// Params: pairs = Appender managing the array of pairs to merge into. // Params: pairs = Appender managing the array of pairs to merge into.
// toMerge = Pair to merge. // toMerge = Pair to merge.
void merge(ref Appender!(Node.Pair[]) pairs, ref Node.Pair toMerge) @trusted void merge(ref Appender!(Node.Pair[]) pairs, ref Node.Pair toMerge) @safe
{ {
foreach(ref pair; pairs.data) foreach(ref pair; pairs.data)
{ {
@ -2099,7 +2098,7 @@ void merge(ref Appender!(Node.Pair[]) pairs, ref Node.Pair toMerge) @trusted
// //
// Params: pairs = Appender managing the array of pairs to merge into. // Params: pairs = Appender managing the array of pairs to merge into.
// toMerge = Pairs to merge. // toMerge = Pairs to merge.
void merge(ref Appender!(Node.Pair[]) pairs, Node.Pair[] toMerge) @trusted void merge(ref Appender!(Node.Pair[]) pairs, Node.Pair[] toMerge) @safe
{ {
bool eq(ref Node.Pair a, ref Node.Pair b){return a.key == b.key;} bool eq(ref Node.Pair a, ref Node.Pair b){return a.key == b.key;}

View file

@ -143,7 +143,7 @@ struct AppenderNoGCFixed(A : T[], T)
/// Construct an appender that will work with given buffer. /// Construct an appender that will work with given buffer.
/// ///
/// Data written to the appender will overwrite the buffer from the start. /// Data written to the appender will overwrite the buffer from the start.
this(T[] arr) @trusted pure nothrow this(T[] arr) @safe pure nothrow
{ {
// initialize to a given array. // initialize to a given array.
_data.arr = cast(Unqual!T[])arr[0 .. 0]; //trusted _data.arr = cast(Unqual!T[])arr[0 .. 0]; //trusted
@ -163,11 +163,8 @@ struct AppenderNoGCFixed(A : T[], T)
/** /**
* Returns the managed array. * Returns the managed array.
*/ */
@property inout(T)[] data() inout @trusted pure nothrow @property inout(T)[] data() inout @safe pure nothrow
{ {
/* @trusted operation:
* casting Unqual!T[] to inout(T)[]
*/
return cast(typeof(return))(_data.arr); return cast(typeof(return))(_data.arr);
} }
@ -217,7 +214,7 @@ struct AppenderNoGCFixed(A : T[], T)
@disable void clear(); @disable void clear();
} }
} }
unittest @safe unittest
{ {
char[256] buffer; char[256] buffer;
auto appender = appenderNoGC(buffer[]); auto appender = appenderNoGC(buffer[]);
@ -245,7 +242,7 @@ struct ValidateResult
/// Validate a UTF-8 string, checking if it is well-formed Unicode. /// Validate a UTF-8 string, checking if it is well-formed Unicode.
/// ///
/// See_Also: ValidateResult /// See_Also: ValidateResult
ValidateResult validateUTF8NoGC(const(char[]) str) @trusted pure nothrow @nogc ValidateResult validateUTF8NoGC(const(char[]) str) @safe pure nothrow @nogc
{ {
immutable len = str.length; immutable len = str.length;
size_t characterCount; size_t characterCount;
@ -289,7 +286,6 @@ ValidateResult validateUTF8NoGC(const(char[]) str) @trusted pure nothrow @nogc
/// 'string errorMessage' member that is null on success and otherwise stores /// 'string errorMessage' member that is null on success and otherwise stores
/// the error message. /// the error message.
auto decodeUTF8NoGC(Flag!"validated" validated)(const(char[]) str, ref size_t index) auto decodeUTF8NoGC(Flag!"validated" validated)(const(char[]) str, ref size_t index)
@trusted pure nothrow @nogc
{ {
static if(!validated) struct Result static if(!validated) struct Result
{ {
@ -301,7 +297,7 @@ auto decodeUTF8NoGC(Flag!"validated" validated)(const(char[]) str, ref size_t in
/// Dchar bitmask for different numbers of UTF-8 code units. /// Dchar bitmask for different numbers of UTF-8 code units.
enum bitMask = tuple((1 << 7) - 1, (1 << 11) - 1, (1 << 16) - 1, (1 << 21) - 1); enum bitMask = tuple((1 << 7) - 1, (1 << 11) - 1, (1 << 16) - 1, (1 << 21) - 1);
auto pstr = str.ptr + index; auto pstr = str[index..$];
immutable length = str.length - index; immutable length = str.length - index;
ubyte fst = pstr[0]; ubyte fst = pstr[0];

View file

@ -127,12 +127,12 @@ final class Parser
TagDirective[] tagDirectives_; TagDirective[] tagDirectives_;
///Stack of states. ///Stack of states.
Array!(Event delegate()) states_; Array!(Event delegate() @safe) states_;
///Stack of marks used to keep track of extents of e.g. YAML collections. ///Stack of marks used to keep track of extents of e.g. YAML collections.
Array!Mark marks_; Array!Mark marks_;
///Current state. ///Current state.
Event delegate() state_; Event delegate() @safe state_;
public: public:
///Construct a Parser using specified Scanner. ///Construct a Parser using specified Scanner.
@ -165,7 +165,7 @@ final class Parser
* or if there are any events left if no types specified. * or if there are any events left if no types specified.
* false otherwise. * false otherwise.
*/ */
bool checkEvent(EventID[] ids...) @trusted bool checkEvent(EventID[] ids...) @safe
{ {
//Check if the next event is one of specified types. //Check if the next event is one of specified types.
if(currentEvent_.isNull && state_ !is null) if(currentEvent_.isNull && state_ !is null)
@ -228,7 +228,7 @@ final class Parser
private: private:
///Pop and return the newest state in states_. ///Pop and return the newest state in states_.
Event delegate() popState() @trusted Event delegate() @safe popState() @trusted
{ {
enforce(states_.length > 0, enforce(states_.length > 0,
new YAMLException("Parser: Need to pop state but no states left to pop")); new YAMLException("Parser: Need to pop state but no states left to pop"));
@ -347,7 +347,7 @@ final class Parser
while(scanner_.checkToken(TokenID.Directive)) while(scanner_.checkToken(TokenID.Directive))
{ {
const token = scanner_.getToken(); const token = scanner_.getToken();
const value = token.value; string value = token.value.idup;
if(token.directive == DirectiveType.YAML) if(token.directive == DirectiveType.YAML)
{ {
enforce(YAMLVersion_ is null, enforce(YAMLVersion_ is null,
@ -356,11 +356,11 @@ final class Parser
enforce(minor == "1", enforce(minor == "1",
new Error("Incompatible document (version 1.x is required)", new Error("Incompatible document (version 1.x is required)",
token.startMark)); token.startMark));
YAMLVersion_ = cast(string)value; YAMLVersion_ = value;
} }
else if(token.directive == DirectiveType.TAG) else if(token.directive == DirectiveType.TAG)
{ {
auto handle = cast(string)value[0 .. token.valueDivider]; auto handle = value[0 .. token.valueDivider];
foreach(ref pair; tagDirectives_) foreach(ref pair; tagDirectives_)
{ {
@ -370,7 +370,7 @@ final class Parser
token.startMark)); token.startMark));
} }
tagDirectives_ ~= tagDirectives_ ~=
TagDirective(handle, cast(string)value[token.valueDivider .. $]); TagDirective(handle, value[token.valueDivider .. $]);
} }
// Any other directive type is ignored (only YAML and TAG are in YAML // Any other directive type is ignored (only YAML and TAG are in YAML
// 1.1/1.2, any other directives are "reserved") // 1.1/1.2, any other directives are "reserved")
@ -432,7 +432,7 @@ final class Parser
uint tagHandleEnd; uint tagHandleEnd;
//Get anchor/tag if detected. Return false otherwise. //Get anchor/tag if detected. Return false otherwise.
bool get(const TokenID id, const Flag!"first" first, ref string target) bool get(const TokenID id, const Flag!"first" first, ref string target) @trusted
{ {
if(!scanner_.checkToken(id)){return false;} if(!scanner_.checkToken(id)){return false;}
invalidMarks = false; invalidMarks = false;
@ -536,7 +536,7 @@ final class Parser
/// Handle escape sequences in a double quoted scalar. /// Handle escape sequences in a double quoted scalar.
/// ///
/// Moved here from scanner as it can't always be done in-place with slices. /// Moved here from scanner as it can't always be done in-place with slices.
string handleDoubleQuotedScalarEscapes(char[] tokenValue) string handleDoubleQuotedScalarEscapes(char[] tokenValue) const @system
{ {
string notInPlace; string notInPlace;
bool inEscape = false; bool inEscape = false;
@ -623,7 +623,7 @@ final class Parser
*/ */
string processTag(const string tag, const uint handleEnd, string processTag(const string tag, const uint handleEnd,
const Mark startMark, const Mark tagMark) const Mark startMark, const Mark tagMark)
const @trusted const @safe
{ {
const handle = tag[0 .. handleEnd]; const handle = tag[0 .. handleEnd];
const suffix = tag[handleEnd .. $]; const suffix = tag[handleEnd .. $];
@ -829,7 +829,7 @@ final class Parser
} }
///Parse a key in flow context. ///Parse a key in flow context.
Event parseFlowKey(in Event delegate() nextState) @trusted Event parseFlowKey(in Event delegate() @safe nextState) @trusted
{ {
const token = scanner_.getToken(); const token = scanner_.getToken();
@ -851,7 +851,7 @@ final class Parser
} }
///Parse a mapping value in a flow context. ///Parse a mapping value in a flow context.
Event parseFlowValue(TokenID checkId, in Event delegate() nextState) Event parseFlowValue(TokenID checkId, in Event delegate() @safe nextState)
@trusted @trusted
{ {
if(scanner_.checkToken(TokenID.Value)) if(scanner_.checkToken(TokenID.Value))

View file

@ -220,7 +220,7 @@ void free(T)(T* ptr) @system nothrow
core.stdc.stdlib.free(ptr); core.stdc.stdlib.free(ptr);
} }
unittest @safe unittest
{ {
auto queue = Queue!int(); auto queue = Queue!int();
assert(queue.empty); assert(queue.empty);

View file

@ -94,7 +94,7 @@ final class Reader
/// ///
/// Throws: ReaderException on a UTF decoding error or if there are /// Throws: ReaderException on a UTF decoding error or if there are
/// nonprintable Unicode characters illegal in YAML. /// nonprintable Unicode characters illegal in YAML.
this(ubyte[] buffer) @trusted pure //!nothrow this(ubyte[] buffer) @trusted pure
{ {
auto endianResult = fixUTFByteOrder(buffer); auto endianResult = fixUTFByteOrder(buffer);
if(endianResult.bytesStripped > 0) if(endianResult.bytesStripped > 0)
@ -124,7 +124,6 @@ final class Reader
checkASCII(); checkASCII();
} }
pure nothrow @nogc:
/// Get character at specified index relative to current position. /// Get character at specified index relative to current position.
/// ///
/// Params: index = Index of the character to get relative to current position /// Params: index = Index of the character to get relative to current position
@ -135,7 +134,7 @@ pure nothrow @nogc:
/// ///
// XXX removed; search for 'risky' to find why. // XXX removed; search for 'risky' to find why.
// Throws: ReaderException if trying to read past the end of the buffer. // Throws: ReaderException if trying to read past the end of the buffer.
dchar peek(const size_t index) @safe dchar peek(const size_t index) @safe pure nothrow @nogc
{ {
if(index < upcomingASCII_) { return buffer_[bufferOffset_ + index]; } if(index < upcomingASCII_) { return buffer_[bufferOffset_ + index]; }
if(characterCount_ <= charIndex_ + index) if(characterCount_ <= charIndex_ + index)
@ -178,7 +177,7 @@ pure nothrow @nogc:
} }
/// Optimized version of peek() for the case where peek index is 0. /// Optimized version of peek() for the case where peek index is 0.
dchar peek() @safe dchar peek() @safe pure nothrow @nogc
{ {
if(upcomingASCII_ > 0) { return buffer_[bufferOffset_]; } if(upcomingASCII_ > 0) { return buffer_[bufferOffset_]; }
if(characterCount_ <= charIndex_) { return '\0'; } if(characterCount_ <= charIndex_) { return '\0'; }
@ -195,13 +194,13 @@ pure nothrow @nogc:
/// case, '\0' will be returned. /// case, '\0' will be returned.
/// ///
/// Returns: Byte at specified position or '\0' if outside of the buffer. /// Returns: Byte at specified position or '\0' if outside of the buffer.
char peekByte(const size_t index) @safe char peekByte(const size_t index) @safe pure nothrow @nogc
{ {
return characterCount_ > (charIndex_ + index) ? buffer_[bufferOffset_ + index] : '\0'; return characterCount_ > (charIndex_ + index) ? buffer_[bufferOffset_ + index] : '\0';
} }
/// Optimized version of peekByte() for the case where peek byte index is 0. /// Optimized version of peekByte() for the case where peek byte index is 0.
char peekByte() @safe char peekByte() @safe pure nothrow @nogc
{ {
return characterCount_ > charIndex_ ? buffer_[bufferOffset_] : '\0'; return characterCount_ > charIndex_ ? buffer_[bufferOffset_] : '\0';
} }
@ -218,7 +217,7 @@ pure nothrow @nogc:
/// slice will be shorter. /// slice will be shorter.
/// ///
/// Returns: Characters starting at current position or an empty slice if out of bounds. /// Returns: Characters starting at current position or an empty slice if out of bounds.
char[] prefix(const size_t length) @safe char[] prefix(const size_t length) @safe pure nothrow @nogc
{ {
return slice(length); return slice(length);
} }
@ -234,7 +233,7 @@ pure nothrow @nogc:
/// this. /// this.
/// ///
/// Returns: Bytes starting at current position. /// Returns: Bytes starting at current position.
char[] prefixBytes(const size_t length) @safe char[] prefixBytes(const size_t length) @safe pure nothrow @nogc
{ {
assert(length == 0 || bufferOffset_ + length < buffer_.length, assert(length == 0 || bufferOffset_ + length < buffer_.length,
"prefixBytes out of bounds"); "prefixBytes out of bounds");
@ -251,7 +250,7 @@ pure nothrow @nogc:
/// be shorter. /// be shorter.
/// ///
/// Returns: Slice into the internal buffer or an empty slice if out of bounds. /// Returns: Slice into the internal buffer or an empty slice if out of bounds.
char[] slice(const size_t end) @safe char[] slice(const size_t end) @safe pure nothrow @nogc
{ {
// Fast path in case the caller has already peek()ed all the way to end. // Fast path in case the caller has already peek()ed all the way to end.
if(end == lastDecodedCharOffset_) if(end == lastDecodedCharOffset_)
@ -279,7 +278,7 @@ pure nothrow @nogc:
/// ///
/// Throws: ReaderException if trying to read past the end of the buffer /// Throws: ReaderException if trying to read past the end of the buffer
/// or if invalid data is read. /// or if invalid data is read.
dchar get() @safe dchar get() @safe pure nothrow @nogc
{ {
const result = peek(); const result = peek();
forward(); forward();
@ -291,7 +290,7 @@ pure nothrow @nogc:
/// Params: length = Number or characters (code points, not bytes) to get. /// Params: length = Number or characters (code points, not bytes) to get.
/// ///
/// Returns: Characters starting at current position. /// Returns: Characters starting at current position.
char[] get(const size_t length) @safe char[] get(const size_t length) @safe pure nothrow @nogc
{ {
auto result = slice(length); auto result = slice(length);
forward(length); forward(length);
@ -301,7 +300,7 @@ pure nothrow @nogc:
/// Move current position forward. /// Move current position forward.
/// ///
/// Params: length = Number of characters to move position forward. /// Params: length = Number of characters to move position forward.
void forward(size_t length) @safe void forward(size_t length) @safe pure nothrow @nogc
{ {
mixin FastCharSearch!"\n\u0085\u2028\u2029"d search; mixin FastCharSearch!"\n\u0085\u2028\u2029"d search;
@ -356,7 +355,7 @@ pure nothrow @nogc:
} }
/// Move current position forward by one character. /// Move current position forward by one character.
void forward() @trusted void forward() @safe pure nothrow @nogc
{ {
++charIndex_; ++charIndex_;
lastDecodedBufferOffset_ = bufferOffset_; lastDecodedBufferOffset_ = bufferOffset_;
@ -401,25 +400,24 @@ pure nothrow @nogc:
/// Used to build slices of read data in Reader; to avoid allocations. /// Used to build slices of read data in Reader; to avoid allocations.
SliceBuilder sliceBuilder; SliceBuilder sliceBuilder;
@safe pure nothrow @nogc:
/// Get a string describing current buffer position, used for error messages. /// Get a string describing current buffer position, used for error messages.
Mark mark() const { return Mark(line_, column_); } Mark mark() const pure nothrow @nogc @safe { return Mark(line_, column_); }
/// Get current line number. /// Get current line number.
uint line() const { return line_; } uint line() const @safe pure nothrow @nogc { return line_; }
/// Get current column number. /// Get current column number.
uint column() const { return column_; } uint column() const @safe pure nothrow @nogc { return column_; }
/// Get index of the current character in the buffer. /// Get index of the current character in the buffer.
size_t charIndex() const { return charIndex_; } size_t charIndex() const @safe pure nothrow @nogc { return charIndex_; }
/// Get encoding of the input buffer. /// Get encoding of the input buffer.
Encoding encoding() const { return encoding_; } Encoding encoding() const @safe pure nothrow @nogc { return encoding_; }
private: private:
// Update upcomingASCII_ (should be called forward()ing over a UTF-8 sequence) // Update upcomingASCII_ (should be called forward()ing over a UTF-8 sequence)
void checkASCII() void checkASCII() @safe pure nothrow @nogc
{ {
upcomingASCII_ = countASCII(buffer_[bufferOffset_ .. $]); upcomingASCII_ = countASCII(buffer_[bufferOffset_ .. $]);
} }
@ -428,7 +426,7 @@ private:
// lastDecodedCharOffset_/lastDecodedBufferOffset_ and update them. // lastDecodedCharOffset_/lastDecodedBufferOffset_ and update them.
// //
// Does not advance the buffer position. Used in peek() and slice(). // Does not advance the buffer position. Used in peek() and slice().
dchar decodeNext() dchar decodeNext() @safe pure nothrow @nogc
{ {
assert(lastDecodedBufferOffset_ < buffer_.length, assert(lastDecodedBufferOffset_ < buffer_.length,
"Attempted to decode past the end of YAML buffer"); "Attempted to decode past the end of YAML buffer");
@ -453,7 +451,6 @@ private:
/// See begin() documentation. /// See begin() documentation.
struct SliceBuilder struct SliceBuilder
{ {
pure nothrow @nogc:
private: private:
// No copying by the user. // No copying by the user.
@disable this(this); @disable this(this);
@ -474,7 +471,7 @@ private:
// The number of elements currently in endStack_. // The number of elements currently in endStack_.
size_t endStackUsed_ = 0; size_t endStackUsed_ = 0;
@safe const invariant() @safe const pure nothrow @nogc invariant()
{ {
if(!inProgress) { return; } if(!inProgress) { return; }
assert(end_ <= reader_.bufferOffset_, "Slice ends after buffer position"); assert(end_ <= reader_.bufferOffset_, "Slice ends after buffer position");
@ -482,7 +479,7 @@ private:
} }
// Is a slice currently being built? // Is a slice currently being built?
bool inProgress() @safe const bool inProgress() @safe const pure nothrow @nogc
{ {
assert(start_ == size_t.max ? end_ == size_t.max : end_ != size_t.max, assert(start_ == size_t.max ? end_ == size_t.max : end_ != size_t.max,
"start_/end_ are not consistent"); "start_/end_ are not consistent");
@ -500,7 +497,7 @@ public:
/// forward() move the position. E.g. it is valid to extend a slice by write()-ing /// forward() move the position. E.g. it is valid to extend a slice by write()-ing
/// a string just returned by get() - but not one returned by prefix() unless the /// a string just returned by get() - but not one returned by prefix() unless the
/// position has changed since the prefix() call. /// position has changed since the prefix() call.
void begin() @system void begin() @safe pure nothrow @nogc
{ {
assert(!inProgress, "Beginning a slice while another slice is being built"); assert(!inProgress, "Beginning a slice while another slice is being built");
assert(endStackUsed_ == 0, "Slice stack not empty at slice begin"); assert(endStackUsed_ == 0, "Slice stack not empty at slice begin");
@ -516,7 +513,7 @@ public:
/// ///
/// Returns a string; once a slice is finished it is definitive that its contents /// Returns a string; once a slice is finished it is definitive that its contents
/// will not be changed. /// will not be changed.
char[] finish() @system char[] finish() @safe pure nothrow @nogc
{ {
assert(inProgress, "finish called without begin"); assert(inProgress, "finish called without begin");
assert(endStackUsed_ == 0, "Finishing a slice with running transactions."); assert(endStackUsed_ == 0, "Finishing a slice with running transactions.");
@ -534,7 +531,7 @@ public:
/// end of the slice being built, the slice is extended (trivial operation). /// end of the slice being built, the slice is extended (trivial operation).
/// ///
/// See_Also: begin /// See_Also: begin
void write(char[] str) @system void write(char[] str) @trusted pure nothrow @nogc
{ {
assert(inProgress, "write called without begin"); assert(inProgress, "write called without begin");
assert(end_ <= reader_.bufferOffset_, assert(end_ <= reader_.bufferOffset_,
@ -561,7 +558,7 @@ public:
/// Data can only be written up to the current position in the Reader buffer. /// Data can only be written up to the current position in the Reader buffer.
/// ///
/// See_Also: begin /// See_Also: begin
void write(dchar c) @system void write(dchar c) @safe pure nothrow @nogc
{ {
assert(inProgress, "write called without begin"); assert(inProgress, "write called without begin");
if(c < 0x80) if(c < 0x80)
@ -588,7 +585,7 @@ public:
/// position = Position to insert the character at in code units, not code points. /// position = Position to insert the character at in code units, not code points.
/// Must be less than slice length(); a previously returned length() /// Must be less than slice length(); a previously returned length()
/// can be used. /// can be used.
void insert(const dchar c, const size_t position) @system void insert(const dchar c, const size_t position) @system pure nothrow @nogc
{ {
assert(inProgress, "insert called without begin"); assert(inProgress, "insert called without begin");
assert(start_ + position <= end_, "Trying to insert after the end of the slice"); assert(start_ + position <= end_, "Trying to insert after the end of the slice");
@ -612,7 +609,7 @@ public:
} }
/// Get the current length of the slice. /// Get the current length of the slice.
size_t length() @safe const size_t length() @safe const pure nothrow @nogc
{ {
return end_ - start_; return end_ - start_;
} }
@ -622,7 +619,6 @@ public:
/// Can be used to save and revert back to slice state. /// Can be used to save and revert back to slice state.
struct Transaction struct Transaction
{ {
@system pure nothrow @nogc:
private: private:
// The slice builder affected by the transaction. // The slice builder affected by the transaction.
SliceBuilder* builder_ = null; SliceBuilder* builder_ = null;
@ -639,7 +635,7 @@ public:
/// ended either by commit()-ing or reverting through the destructor. /// ended either by commit()-ing or reverting through the destructor.
/// ///
/// Saves the current state of a slice. /// Saves the current state of a slice.
this(ref SliceBuilder builder) this(ref SliceBuilder builder) @system pure nothrow @nogc
{ {
builder_ = &builder; builder_ = &builder;
stackLevel_ = builder_.endStackUsed_; stackLevel_ = builder_.endStackUsed_;
@ -653,7 +649,7 @@ public:
/// ///
/// Does nothing for a default-initialized transaction (the transaction has not /// Does nothing for a default-initialized transaction (the transaction has not
/// been started yet). /// been started yet).
void commit() void commit() @safe pure nothrow @nogc
{ {
assert(!committed_, "Can't commit a transaction more than once"); assert(!committed_, "Can't commit a transaction more than once");
@ -667,7 +663,7 @@ public:
/// Destroy the transaction and revert it if it hasn't been committed yet. /// Destroy the transaction and revert it if it hasn't been committed yet.
/// ///
/// Does nothing for a default-initialized transaction. /// Does nothing for a default-initialized transaction.
~this() ~this() @safe pure nothrow @nogc
{ {
if(builder_ is null || committed_) { return; } if(builder_ is null || committed_) { return; }
assert(builder_.endStackUsed_ == stackLevel_ + 1, assert(builder_.endStackUsed_ == stackLevel_ + 1,
@ -681,7 +677,7 @@ private:
// Push the current end of the slice so we can revert to it if needed. // Push the current end of the slice so we can revert to it if needed.
// //
// Used by Transaction. // Used by Transaction.
void push() @system void push() @safe pure nothrow @nogc
{ {
assert(inProgress, "push called without begin"); assert(inProgress, "push called without begin");
assert(endStackUsed_ < endStack_.length, "Slice stack overflow"); assert(endStackUsed_ < endStack_.length, "Slice stack overflow");
@ -692,7 +688,7 @@ private:
// value, reverting changes since the old end was pushed. // value, reverting changes since the old end was pushed.
// //
// Used by Transaction. // Used by Transaction.
void pop() @system void pop() @safe pure nothrow @nogc
{ {
assert(inProgress, "pop called without begin"); assert(inProgress, "pop called without begin");
assert(endStackUsed_ > 0, "Trying to pop an empty slice stack"); assert(endStackUsed_ > 0, "Trying to pop an empty slice stack");
@ -703,7 +699,7 @@ private:
// changes made since pushing the old end. // changes made since pushing the old end.
// //
// Used by Transaction. // Used by Transaction.
void apply() @system void apply() @safe pure nothrow @nogc
{ {
assert(inProgress, "apply called without begin"); assert(inProgress, "apply called without begin");
assert(endStackUsed_ > 0, "Trying to apply an empty slice stack"); assert(endStackUsed_ > 0, "Trying to apply an empty slice stack");
@ -823,7 +819,7 @@ auto toUTF8(ubyte[] input, const UTFEncoding encoding) @safe pure nothrow
} }
/// Determine if all characters (code points, not bytes) in a string are printable. /// Determine if all characters (code points, not bytes) in a string are printable.
bool isPrintableValidUTF8(const char[] chars) @trusted pure nothrow @nogc bool isPrintableValidUTF8(const char[] chars) @safe pure nothrow @nogc
{ {
// This is oversized (only 128 entries are necessary) simply because having 256 // This is oversized (only 128 entries are necessary) simply because having 256
// entries improves performance... for some reason (alignment?) // entries improves performance... for some reason (alignment?)
@ -1041,7 +1037,7 @@ void test1Byte(R)()
// assert(collectException(reader.peek(2))); // assert(collectException(reader.peek(2)));
} }
unittest @system unittest
{ {
testEndian!Reader(); testEndian!Reader();
testPeekPrefixForward!Reader(); testPeekPrefixForward!Reader();

View file

@ -48,7 +48,7 @@ final class Representer
{ {
private: private:
// Representer functions indexed by types. // Representer functions indexed by types.
Node function(ref Node, Representer)[TypeInfo] representers_; Node function(ref Node, Representer) @safe[TypeInfo] representers_;
// Default style for scalar nodes. // Default style for scalar nodes.
ScalarStyle defaultScalarStyle_ = ScalarStyle.Invalid; ScalarStyle defaultScalarStyle_ = ScalarStyle.Invalid;
// Default style for collection nodes. // Default style for collection nodes.
@ -82,7 +82,7 @@ final class Representer
} }
///Destroy the Representer. ///Destroy the Representer.
pure @safe nothrow ~this() ~this() pure @safe nothrow
{ {
representers_.destroy(); representers_.destroy();
representers_ = null; representers_ = null;
@ -222,8 +222,8 @@ final class Representer
* } * }
* -------------------- * --------------------
*/ */
void addRepresenter(T)(Node function(ref Node, Representer) representer) void addRepresenter(T)(Node function(ref Node, Representer) @safe representer)
@trusted pure @safe pure
{ {
assert((typeid(T) in representers_) is null, assert((typeid(T) in representers_) is null,
"Representer function for data type " ~ T.stringof ~ "Representer function for data type " ~ T.stringof ~
@ -428,7 +428,7 @@ final class Representer
package: package:
//Represent a node based on its type, and return the represented result. //Represent a node based on its type, and return the represented result.
Node representData(ref Node data) @system Node representData(ref Node data) @safe
{ {
//User types are wrapped in YAMLObject. //User types are wrapped in YAMLObject.
auto type = data.isUserType ? data.as!YAMLObject.type : data.type; auto type = data.isUserType ? data.as!YAMLObject.type : data.type;
@ -454,7 +454,7 @@ final class Representer
} }
//Represent a node, serializing with specified Serializer. //Represent a node, serializing with specified Serializer.
void represent(ref Serializer serializer, ref Node node) @trusted void represent(ref Serializer serializer, ref Node node) @safe
{ {
auto data = representData(node); auto data = representData(node);
serializer.serialize(data); serializer.serialize(data);
@ -478,12 +478,12 @@ Node representString(ref Node node, Representer representer) @safe
} }
///Represent a bytes _node as a binary scalar. ///Represent a bytes _node as a binary scalar.
Node representBytes(ref Node node, Representer representer) @system Node representBytes(ref Node node, Representer representer) @safe
{ {
const ubyte[] value = node.as!(ubyte[]); const ubyte[] value = node.as!(ubyte[]);
if(value is null){return representNull(node, representer);} if(value is null){return representNull(node, representer);}
return representer.representScalar("tag:yaml.org,2002:binary", return representer.representScalar("tag:yaml.org,2002:binary",
cast(string)Base64.encode(value), Base64.encode(value).idup,
ScalarStyle.Literal); ScalarStyle.Literal);
} }
@ -495,14 +495,14 @@ Node representBool(ref Node node, Representer representer) @safe
} }
///Represent a long _node as an integer scalar. ///Represent a long _node as an integer scalar.
Node representLong(ref Node node, Representer representer) @system Node representLong(ref Node node, Representer representer) @safe
{ {
return representer.representScalar("tag:yaml.org,2002:int", return representer.representScalar("tag:yaml.org,2002:int",
to!string(node.as!long)); to!string(node.as!long));
} }
///Represent a real _node as a floating point scalar. ///Represent a real _node as a floating point scalar.
Node representReal(ref Node node, Representer representer) @system Node representReal(ref Node node, Representer representer) @safe
{ {
real f = node.as!real; real f = node.as!real;
string value = isNaN(f) ? ".nan": string value = isNaN(f) ? ".nan":
@ -516,7 +516,7 @@ Node representReal(ref Node node, Representer representer) @system
} }
///Represent a SysTime _node as a timestamp. ///Represent a SysTime _node as a timestamp.
Node representSysTime(ref Node node, Representer representer) @system Node representSysTime(ref Node node, Representer representer) @safe
{ {
return representer.representScalar("tag:yaml.org,2002:timestamp", return representer.representScalar("tag:yaml.org,2002:timestamp",
node.as!SysTime.toISOExtString()); node.as!SysTime.toISOExtString());
@ -545,15 +545,14 @@ Node representNodes(ref Node node, Representer representer) @safe
} }
///Represent a mapping _node as map/ordered map/pairs. ///Represent a mapping _node as map/ordered map/pairs.
Node representPairs(ref Node node, Representer representer) @system Node representPairs(ref Node node, Representer representer) @safe
{ {
auto pairs = node.as!(Node.Pair[]); auto pairs = node.as!(Node.Pair[]);
bool hasDuplicates(Node.Pair[] pairs) bool hasDuplicates(Node.Pair[] pairs) @safe
{ {
//TODO this should be replaced by something with deterministic memory allocation. //TODO this should be replaced by something with deterministic memory allocation.
auto keys = redBlackTree!Node(); auto keys = redBlackTree!Node();
scope(exit){keys.destroy();}
foreach(ref pair; pairs) foreach(ref pair; pairs)
{ {
if(pair.key in keys){return true;} if(pair.key in keys){return true;}
@ -562,7 +561,7 @@ Node representPairs(ref Node node, Representer representer) @system
return false; return false;
} }
Node[] mapToSequence(Node.Pair[] pairs) Node[] mapToSequence(Node.Pair[] pairs) @safe
{ {
Node[] nodes; Node[] nodes;
nodes.length = pairs.length; nodes.length = pairs.length;
@ -610,7 +609,7 @@ struct MyStruct
} }
} }
Node representMyStruct(ref Node node, Representer representer) @system Node representMyStruct(ref Node node, Representer representer) @safe
{ {
//The node is guaranteed to be MyStruct as we add representer for MyStruct. //The node is guaranteed to be MyStruct as we add representer for MyStruct.
auto value = node.as!MyStruct; auto value = node.as!MyStruct;
@ -658,14 +657,14 @@ class MyClass
} }
///Useful for Node.as!string . ///Useful for Node.as!string .
override string toString() @trusted override string toString() @safe
{ {
return format("MyClass(%s, %s, %s)", x, y, z); return format("MyClass(%s, %s, %s)", x, y, z);
} }
} }
//Same as representMyStruct. //Same as representMyStruct.
Node representMyClass(ref Node node, Representer representer) @system Node representMyClass(ref Node node, Representer representer) @safe
{ {
//The node is guaranteed to be MyClass as we add representer for MyClass. //The node is guaranteed to be MyClass as we add representer for MyClass.
auto value = node.as!MyClass; auto value = node.as!MyClass;
@ -677,7 +676,7 @@ Node representMyClass(ref Node node, Representer representer) @system
import dyaml.stream; import dyaml.stream;
unittest @safe unittest
{ {
foreach(r; [&representMyStruct, foreach(r; [&representMyStruct,
&representMyStructSeq, &representMyStructSeq,
@ -691,7 +690,7 @@ unittest
} }
} }
unittest @safe unittest
{ {
auto dumper = Dumper(new YMemoryStream()); auto dumper = Dumper(new YMemoryStream());
auto representer = new Representer; auto representer = new Representer;

View file

@ -70,7 +70,7 @@ final class Resolver
} }
///Destroy the Resolver. ///Destroy the Resolver.
pure @safe nothrow ~this() ~this() pure @safe nothrow
{ {
yamlImplicitResolvers_.destroy(); yamlImplicitResolvers_.destroy();
yamlImplicitResolvers_ = null; yamlImplicitResolvers_ = null;
@ -169,13 +169,13 @@ final class Resolver
else if(kind == NodeID.Mapping) {return defaultMappingTag_;} else if(kind == NodeID.Mapping) {return defaultMappingTag_;}
assert(false, "This line of code should never be reached"); assert(false, "This line of code should never be reached");
} }
unittest @safe unittest
{ {
writeln("D:YAML Resolver unittest"); writeln("D:YAML Resolver unittest");
auto resolver = new Resolver(); auto resolver = new Resolver();
bool tagMatch(string tag, string[] values) bool tagMatch(string tag, string[] values) @safe
{ {
string expected = tag; string expected = tag;
foreach(value; values) foreach(value; values)

View file

@ -171,7 +171,7 @@ final class Scanner
} }
/// Destroy the scanner. /// Destroy the scanner.
@trusted ~this() ~this() @trusted
{ {
tokens_.destroy(); tokens_.destroy();
indents_.destroy(); indents_.destroy();
@ -233,14 +233,14 @@ final class Scanner
private: private:
/// Build an error message in msgBuffer_ and return it as a string. /// Build an error message in msgBuffer_ and return it as a string.
string buildMsg(S ...)(S args) @trusted pure nothrow @nogc string buildMsg(S ...)(S args) @trusted
{ {
return cast(string)msgBuffer_.printNoGC(args); return cast(string)msgBuffer_.printNoGC(args);
} }
/// Most scanning error messages have the same format; so build them with this /// Most scanning error messages have the same format; so build them with this
/// function. /// function.
string expected(T)(string expected, T found) @safe pure nothrow @nogc string expected(T)(string expected, T found)
{ {
return buildMsg("expected ", expected, ", but found ", found); return buildMsg("expected ", expected, ", but found ", found);
} }
@ -489,7 +489,7 @@ final class Scanner
} }
/// Add DOCUMENT-START or DOCUMENT-END token. /// Add DOCUMENT-START or DOCUMENT-END token.
void fetchDocumentIndicator(TokenID id)() @safe void fetchDocumentIndicator(TokenID id)()
if(id == TokenID.DocumentStart || id == TokenID.DocumentEnd) if(id == TokenID.DocumentStart || id == TokenID.DocumentEnd)
{ {
// Set indentation to -1 . // Set indentation to -1 .
@ -508,7 +508,7 @@ final class Scanner
alias fetchDocumentIndicator!(TokenID.DocumentEnd) fetchDocumentEnd; alias fetchDocumentIndicator!(TokenID.DocumentEnd) fetchDocumentEnd;
/// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. /// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
void fetchFlowCollectionStart(TokenID id)() @trusted void fetchFlowCollectionStart(TokenID id)() @safe
{ {
// '[' and '{' may start a simple key. // '[' and '{' may start a simple key.
savePossibleSimpleKey(); savePossibleSimpleKey();
@ -526,7 +526,7 @@ final class Scanner
alias fetchFlowCollectionStart!(TokenID.FlowMappingStart) fetchFlowMappingStart; alias fetchFlowCollectionStart!(TokenID.FlowMappingStart) fetchFlowMappingStart;
/// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. /// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
void fetchFlowCollectionEnd(TokenID id)() @safe void fetchFlowCollectionEnd(TokenID id)()
{ {
// Reset possible simple key on the current level. // Reset possible simple key on the current level.
removePossibleSimpleKey(); removePossibleSimpleKey();
@ -560,7 +560,7 @@ final class Scanner
/// ///
/// Params: type = String representing the token type we might need to add. /// Params: type = String representing the token type we might need to add.
/// id = Token type we might need to add. /// id = Token type we might need to add.
void blockChecks(string type, TokenID id)() @safe void blockChecks(string type, TokenID id)()
{ {
enum context = type ~ " keys are not allowed here"; enum context = type ~ " keys are not allowed here";
// Are we allowed to start a key (not neccesarily a simple one)? // Are we allowed to start a key (not neccesarily a simple one)?
@ -659,7 +659,7 @@ final class Scanner
} }
/// Add ALIAS or ANCHOR token. /// Add ALIAS or ANCHOR token.
void fetchAnchor_(TokenID id)() @trusted void fetchAnchor_(TokenID id)() @safe
if(id == TokenID.Alias || id == TokenID.Anchor) if(id == TokenID.Alias || id == TokenID.Anchor)
{ {
// ALIAS/ANCHOR could be a simple key. // ALIAS/ANCHOR could be a simple key.
@ -677,7 +677,7 @@ final class Scanner
alias fetchAnchor_!(TokenID.Anchor) fetchAnchor; alias fetchAnchor_!(TokenID.Anchor) fetchAnchor;
/// Add TAG token. /// Add TAG token.
void fetchTag() @trusted void fetchTag() @safe
{ {
//TAG could start a simple key. //TAG could start a simple key.
savePossibleSimpleKey(); savePossibleSimpleKey();
@ -689,7 +689,7 @@ final class Scanner
} }
/// Add block SCALAR token. /// Add block SCALAR token.
void fetchBlockScalar(ScalarStyle style)() @trusted void fetchBlockScalar(ScalarStyle style)() @safe
if(style == ScalarStyle.Literal || style == ScalarStyle.Folded) if(style == ScalarStyle.Literal || style == ScalarStyle.Folded)
{ {
// Reset possible simple key on the current level. // Reset possible simple key on the current level.
@ -707,7 +707,7 @@ final class Scanner
alias fetchBlockScalar!(ScalarStyle.Folded) fetchFolded; alias fetchBlockScalar!(ScalarStyle.Folded) fetchFolded;
/// Add quoted flow SCALAR token. /// Add quoted flow SCALAR token.
void fetchFlowScalar(ScalarStyle quotes)() @safe void fetchFlowScalar(ScalarStyle quotes)()
{ {
// A flow scalar could be a simple key. // A flow scalar could be a simple key.
savePossibleSimpleKey(); savePossibleSimpleKey();
@ -828,7 +828,7 @@ final class Scanner
/// characters into that slice. /// characters into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanAlphaNumericToSlice(string name)(const Mark startMark) @system void scanAlphaNumericToSlice(string name)(const Mark startMark)
{ {
size_t length = 0; size_t length = 0;
dchar c = reader_.peek(); dchar c = reader_.peek();
@ -855,7 +855,7 @@ final class Scanner
/// ///
/// Assumes that the caller is building a slice in Reader, and puts the scanned /// Assumes that the caller is building a slice in Reader, and puts the scanned
/// characters into that slice. /// characters into that slice.
void scanToNextBreakToSlice() @system void scanToNextBreakToSlice() @safe
{ {
uint length = 0; uint length = 0;
while(!searchAllBreaks.canFind(reader_.peek(length))) while(!searchAllBreaks.canFind(reader_.peek(length)))
@ -905,7 +905,7 @@ final class Scanner
} }
/// Scan directive token. /// Scan directive token.
Token scanDirective() @trusted Token scanDirective() @safe
{ {
Mark startMark = reader_.mark; Mark startMark = reader_.mark;
// Skip the '%'. // Skip the '%'.
@ -949,7 +949,7 @@ final class Scanner
/// characters into that slice. /// characters into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanDirectiveNameToSlice(const Mark startMark) @system void scanDirectiveNameToSlice(const Mark startMark) @safe
{ {
// Scan directive name. // Scan directive name.
scanAlphaNumericToSlice!"a directive"(startMark); scanAlphaNumericToSlice!"a directive"(startMark);
@ -966,7 +966,7 @@ final class Scanner
/// characters into that slice. /// characters into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanYAMLDirectiveValueToSlice(const Mark startMark) @system void scanYAMLDirectiveValueToSlice(const Mark startMark) @safe
{ {
findNextNonSpace(); findNextNonSpace();
@ -999,7 +999,7 @@ final class Scanner
/// characters into that slice. /// characters into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanYAMLDirectiveNumberToSlice(const Mark startMark) @system void scanYAMLDirectiveNumberToSlice(const Mark startMark) @safe
{ {
if(!isDigit(reader_.peek())) if(!isDigit(reader_.peek()))
{ {
@ -1023,7 +1023,7 @@ final class Scanner
/// Returns: Length of tag handle (which is before tag prefix) in scanned data /// Returns: Length of tag handle (which is before tag prefix) in scanned data
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
uint scanTagDirectiveValueToSlice(const Mark startMark) @system uint scanTagDirectiveValueToSlice(const Mark startMark) @safe
{ {
findNextNonSpace(); findNextNonSpace();
const startLength = reader_.sliceBuilder.length; const startLength = reader_.sliceBuilder.length;
@ -1042,7 +1042,7 @@ final class Scanner
/// characters into that slice. /// characters into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanTagDirectiveHandleToSlice(const Mark startMark) @system void scanTagDirectiveHandleToSlice(const Mark startMark) @safe
{ {
scanTagHandleToSlice!"directive"(startMark); scanTagHandleToSlice!"directive"(startMark);
if(error_) { return; } if(error_) { return; }
@ -1057,7 +1057,7 @@ final class Scanner
/// characters into that slice. /// characters into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanTagDirectivePrefixToSlice(const Mark startMark) @system void scanTagDirectivePrefixToSlice(const Mark startMark) @safe
{ {
scanTagURIToSlice!"directive"(startMark); scanTagURIToSlice!"directive"(startMark);
if(" \0\n\r\u0085\u2028\u2029"d.canFind(reader_.peek())) { return; } if(" \0\n\r\u0085\u2028\u2029"d.canFind(reader_.peek())) { return; }
@ -1094,7 +1094,7 @@ final class Scanner
/// Therefore we restrict aliases to ASCII alphanumeric characters. /// Therefore we restrict aliases to ASCII alphanumeric characters.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
Token scanAnchor(const TokenID id) @trusted Token scanAnchor(const TokenID id) @safe
{ {
const startMark = reader_.mark; const startMark = reader_.mark;
const dchar i = reader_.get(); const dchar i = reader_.get();
@ -1130,7 +1130,7 @@ final class Scanner
/// Scan a tag token. /// Scan a tag token.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
Token scanTag() @trusted Token scanTag() @safe
{ {
const startMark = reader_.mark; const startMark = reader_.mark;
dchar c = reader_.peek(1); dchar c = reader_.peek(1);
@ -1448,7 +1448,7 @@ final class Scanner
/// ///
/// Assumes that the caller is building a slice in Reader, and puts the scanned /// Assumes that the caller is building a slice in Reader, and puts the scanned
/// characters into that slice. /// characters into that slice.
Tuple!(uint, Mark) scanBlockScalarIndentationToSlice() @system Tuple!(uint, Mark) scanBlockScalarIndentationToSlice() @safe
{ {
uint maxIndent; uint maxIndent;
Mark endMark = reader_.mark; Mark endMark = reader_.mark;
@ -1472,7 +1472,7 @@ final class Scanner
/// ///
/// Assumes that the caller is building a slice in Reader, and puts the scanned /// Assumes that the caller is building a slice in Reader, and puts the scanned
/// characters into that slice. /// characters into that slice.
Mark scanBlockScalarBreaksToSlice(const uint indent) @trusted Mark scanBlockScalarBreaksToSlice(const uint indent) @safe
{ {
Mark endMark = reader_.mark; Mark endMark = reader_.mark;
@ -1490,7 +1490,7 @@ final class Scanner
/// Scan a qouted flow scalar token with specified quotes. /// Scan a qouted flow scalar token with specified quotes.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
Token scanFlowScalar(const ScalarStyle quotes) @trusted Token scanFlowScalar(const ScalarStyle quotes) @safe
{ {
const startMark = reader_.mark; const startMark = reader_.mark;
const quote = reader_.get(); const quote = reader_.get();
@ -1521,7 +1521,7 @@ final class Scanner
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanFlowScalarNonSpacesToSlice(const ScalarStyle quotes, const Mark startMark) void scanFlowScalarNonSpacesToSlice(const ScalarStyle quotes, const Mark startMark)
@system @safe
{ {
for(;;) with(ScalarStyle) for(;;) with(ScalarStyle)
{ {
@ -1635,7 +1635,7 @@ final class Scanner
/// spaces into that slice. /// spaces into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanFlowScalarSpacesToSlice(const Mark startMark) @system void scanFlowScalarSpacesToSlice(const Mark startMark) @safe
{ {
// Increase length as long as we see whitespace. // Increase length as long as we see whitespace.
size_t length = 0; size_t length = 0;
@ -1680,7 +1680,7 @@ final class Scanner
/// line breaks into that slice. /// line breaks into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
bool scanFlowScalarBreaksToSlice(const Mark startMark) @system bool scanFlowScalarBreaksToSlice(const Mark startMark) @safe
{ {
// True if at least one line break was found. // True if at least one line break was found.
bool anyBreaks; bool anyBreaks;
@ -1873,7 +1873,7 @@ final class Scanner
/// characters into that slice. /// characters into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanTagHandleToSlice(string name)(const Mark startMark) @system void scanTagHandleToSlice(string name)(const Mark startMark)
{ {
dchar c = reader_.peek(); dchar c = reader_.peek();
enum contextMsg = "While scanning a " ~ name; enum contextMsg = "While scanning a " ~ name;
@ -1910,7 +1910,7 @@ final class Scanner
/// characters into that slice. /// characters into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanTagURIToSlice(string name)(const Mark startMark) @trusted void scanTagURIToSlice(string name)(const Mark startMark)
{ {
// Note: we do not check if URI is well-formed. // Note: we do not check if URI is well-formed.
dchar c = reader_.peek(); dchar c = reader_.peek();
@ -1952,7 +1952,7 @@ final class Scanner
/// characters into that slice. /// characters into that slice.
/// ///
/// In case of an error, error_ is set. Use throwIfError() to handle this. /// In case of an error, error_ is set. Use throwIfError() to handle this.
void scanURIEscapesToSlice(string name)(const Mark startMark) @system void scanURIEscapesToSlice(string name)(const Mark startMark)
{ {
// URI escapes encode a UTF-8 string. We store UTF-8 code units here for // URI escapes encode a UTF-8 string. We store UTF-8 code units here for
// decoding into UTF-32. // decoding into UTF-32.
@ -1967,7 +1967,7 @@ final class Scanner
// //
// Returns the number of bytes used by the dchar in bytes on success, // Returns the number of bytes used by the dchar in bytes on success,
// size_t.max on failure. // size_t.max on failure.
static size_t getDchar(char[] bytes, Reader reader_) static size_t getDchar(char[] bytes, Reader reader_) @trusted
{ {
size_t nextChar; size_t nextChar;
dchar c; dchar c;
@ -1985,7 +1985,7 @@ final class Scanner
reader_.sliceBuilder.write(c); reader_.sliceBuilder.write(c);
if(bytes.length - nextChar > 0) if(bytes.length - nextChar > 0)
{ {
core.stdc.string.memmove(bytes.ptr, bytes.ptr + nextChar, core.stdc.string.memmove(&bytes[0], &bytes[nextChar],
bytes.length - nextChar); bytes.length - nextChar);
} }
return bytes.length - nextChar; return bytes.length - nextChar;
@ -2081,7 +2081,7 @@ final class Scanner
private: private:
/// A nothrow function that converts a dchar[] to a string. /// A nothrow function that converts a dchar[] to a string.
string utf32To8(C)(C[] str) @safe pure nothrow string utf32To8(C)(C[] str)
if(is(Unqual!C == dchar)) if(is(Unqual!C == dchar))
{ {
try { return str.to!string; } try { return str.to!string; }

View file

@ -82,7 +82,7 @@ struct Serializer
} }
///Destroy the Serializer. ///Destroy the Serializer.
@safe ~this() ~this() @safe
{ {
emitter_.emit(streamEndEvent(Mark(), Mark())); emitter_.emit(streamEndEvent(Mark(), Mark()));
YAMLVersion_.destroy(); YAMLVersion_.destroy();
@ -157,7 +157,7 @@ struct Serializer
} }
///Generate and return a new anchor. ///Generate and return a new anchor.
string generateAnchor() @trusted string generateAnchor() @safe
{ {
++lastAnchorID_; ++lastAnchorID_;
auto appender = appender!string(); auto appender = appender!string();
@ -166,7 +166,7 @@ struct Serializer
} }
///Serialize a node and all its subnodes. ///Serialize a node and all its subnodes.
void serializeNode(ref Node node) @trusted void serializeNode(ref Node node) @safe
{ {
//If the node has an anchor, emit an anchor (as aliasEvent) on the //If the node has an anchor, emit an anchor (as aliasEvent) on the
//first occurrence, save it in serializedNodes_, and emit an alias //first occurrence, save it in serializedNodes_, and emit an alias

View file

@ -31,10 +31,11 @@ immutable ubyte[][NBOMS] ByteOrderMarks =
interface YStream interface YStream
{ {
void writeExact(const void* buffer, size_t size); void writeExact(const void* buffer, size_t size);
size_t write(const(ubyte)[] buffer); void writeExact(const void[] buffer) @safe;
size_t write(const(char)[] str); size_t write(const(ubyte)[] buffer) @safe;
void flush(); size_t write(const(char)[] str) @safe;
@property bool writeable(); void flush() @safe;
@property bool writeable() @safe;
} }
class YMemoryStream : YStream class YMemoryStream : YStream
@ -46,20 +47,25 @@ class YMemoryStream : YStream
data ~= cast(ubyte[])buffer[0 .. size]; data ~= cast(ubyte[])buffer[0 .. size];
} }
size_t write(const(ubyte)[] buffer) void writeExact(const void[] buffer) @trusted
{
data ~= cast(ubyte[])buffer;
}
size_t write(const(ubyte)[] buffer) @safe
{ {
data ~= buffer; data ~= buffer;
return buffer.length; return buffer.length;
} }
size_t write(const(char)[] str) size_t write(const(char)[] str) @safe
{ {
return write(cast(const(ubyte)[])str); return write(cast(const(ubyte)[])str);
} }
void flush() {} void flush() @safe {}
@property bool writeable() { return true; } @property bool writeable() @safe { return true; }
} }
class YFile : YStream class YFile : YStream
@ -67,17 +73,17 @@ class YFile : YStream
static import std.stdio; static import std.stdio;
std.stdio.File file; std.stdio.File file;
this(string fn) this(string fn) @safe
{ {
this.file = std.stdio.File(fn, "w"); this.file = std.stdio.File(fn, "w");
} }
this(std.stdio.File file) this(std.stdio.File file) @safe
{ {
this.file = file; this.file = file;
} }
unittest @system unittest
{ {
import std.stdio : stdout; import std.stdio : stdout;
auto stream = new YFile(stdout); auto stream = new YFile(stdout);
@ -89,26 +95,31 @@ class YFile : YStream
this.file.rawWrite(cast(const) buffer[0 .. size]); this.file.rawWrite(cast(const) buffer[0 .. size]);
} }
size_t write(const(ubyte)[] buffer) void writeExact(const void[] buffer) @trusted
{
this.file.rawWrite(buffer);
}
size_t write(const(ubyte)[] buffer) @trusted
{ {
this.file.rawWrite(buffer); this.file.rawWrite(buffer);
return buffer.length; return buffer.length;
} }
size_t write(const(char)[] str) size_t write(const(char)[] str) @trusted
{ {
return write(cast(const(ubyte)[])str); return write(cast(ubyte[])str);
} }
void flush() void flush() @safe
{ {
this.file.flush(); this.file.flush();
} }
@property bool writeable() { return true; } @property bool writeable() @safe { return true; }
} }
unittest @safe unittest
{ {
import dyaml.dumper, dyaml.loader, dyaml.node; import dyaml.dumper, dyaml.loader, dyaml.node;
import std.file : readText, remove; import std.file : readText, remove;
@ -135,7 +146,7 @@ unittest
remove("output.yaml"); remove("output.yaml");
} }
unittest // #88, #89 @safe unittest // #88, #89
{ {
import dyaml.dumper, dyaml.loader; import dyaml.dumper, dyaml.loader;
import std.file : remove, read; import std.file : remove, read;
@ -147,5 +158,5 @@ unittest // #88, #89
dumper.YAMLVersion = null; // supress directive dumper.YAMLVersion = null; // supress directive
dumper.dump(Loader.fromString("Hello world".dup).load); dumper.dump(Loader.fromString("Hello world".dup).load);
assert (cast (char[]) fn.read()[0..3] == "Hel"); assert (fn.read()[0..3] == "Hel");
} }

View file

@ -19,6 +19,7 @@ import std.array;
import std.conv; import std.conv;
import std.file; import std.file;
import std.path; import std.path;
import std.traits;
import std.typecons; import std.typecons;
package: package:
@ -31,7 +32,7 @@ package:
* unittestExt = Extensions of data files needed for the unittest. * unittestExt = Extensions of data files needed for the unittest.
* skipExt = Extensions that must not be used for the unittest. * skipExt = Extensions that must not be used for the unittest.
*/ */
void run(F ...)(string testName, void function(bool, F) testFunction, void run(D)(string testName, D testFunction,
string[] unittestExt, string[] skipExt = []) string[] unittestExt, string[] skipExt = [])
{ {
immutable string dataDir = __FILE_FULL_PATH__.dirName ~ "/../../../test/data"; immutable string dataDir = __FILE_FULL_PATH__.dirName ~ "/../../../test/data";
@ -54,16 +55,25 @@ void run(F ...)(string testName, void function(bool, F) testFunction,
if(extensions.canFind(ext)){continue outer;} if(extensions.canFind(ext)){continue outer;}
} }
results ~= execute!F(testName, testFunction, filenames, verbose); results ~= execute(testName, testFunction, filenames, verbose);
} }
} }
else else
{ {
results ~= execute!F(testName, testFunction, cast(string[])[], verbose); results ~= execute(testName, testFunction, cast(string[])[], verbose);
} }
display(results, verbose); display(results, verbose);
} }
/**
* Prints an exception if verbosity is turned on.
* Params: e = Exception to print.
* verbose = Whether verbose mode is enabled.
*/
void printException(YAMLException e, bool verbose) @trusted
{
if(verbose) { writeln(typeid(e).toString(), "\n", e); }
}
private: private:
@ -85,7 +95,7 @@ alias Tuple!(string, "name", string[], "filenames", TestStatus, "kind", string,
* *
* Returns: Test input base filenames and their extensions. * Returns: Test input base filenames and their extensions.
*/ */
string[][string] findTestFilenames(const string dir) string[][string] findTestFilenames(const string dir) @trusted
{ {
//Groups of extensions indexed by base names. //Groups of extensions indexed by base names.
string[][string] names; string[][string] names;
@ -130,8 +140,8 @@ body
* *
* Returns: Information about the results of the unittest. * Returns: Information about the results of the unittest.
*/ */
Result execute(F ...)(const string testName, void function(bool, F) testFunction, Result execute(D)(const string testName, D testFunction,
string[] filenames, const bool verbose) string[] filenames, const bool verbose) @trusted
{ {
if(verbose) if(verbose)
{ {
@ -144,6 +154,7 @@ Result execute(F ...)(const string testName, void function(bool, F) testFunction
try try
{ {
//Convert filenames to parameters tuple and call the test function. //Convert filenames to parameters tuple and call the test function.
alias F = Parameters!D[1..$];
F parameters; F parameters;
stringsToTuple!(F.length - 1, F)(parameters, filenames); stringsToTuple!(F.length - 1, F)(parameters, filenames);
testFunction(verbose, parameters); testFunction(verbose, parameters);
@ -167,7 +178,7 @@ Result execute(F ...)(const string testName, void function(bool, F) testFunction
* Params: results = Unittest results. * Params: results = Unittest results.
* verbose = Print verbose output? * verbose = Print verbose output?
*/ */
void display(Result[] results, const bool verbose) void display(Result[] results, const bool verbose) @safe
{ {
if(results.length > 0 && !verbose){write("\n");} if(results.length > 0 && !verbose){write("\n");}

View file

@ -19,7 +19,7 @@ import dyaml.token;
/// Params: verbose = Print verbose output? /// Params: verbose = Print verbose output?
/// dataFilename = YAML file to parse. /// dataFilename = YAML file to parse.
/// canonicalFilename = Another file to parse, in canonical YAML format. /// canonicalFilename = Another file to parse, in canonical YAML format.
void testParser(bool verbose, string dataFilename, string canonicalFilename) void testParser(bool verbose, string dataFilename, string canonicalFilename) @safe
{ {
auto dataEvents = Loader(dataFilename).parse(); auto dataEvents = Loader(dataFilename).parse();
auto canonicalEvents = Loader(canonicalFilename).parse(); auto canonicalEvents = Loader(canonicalFilename).parse();
@ -38,7 +38,7 @@ void testParser(bool verbose, string dataFilename, string canonicalFilename)
/// Params: verbose = Print verbose output? /// Params: verbose = Print verbose output?
/// dataFilename = YAML file to load. /// dataFilename = YAML file to load.
/// canonicalFilename = Another file to load, in canonical YAML format. /// canonicalFilename = Another file to load, in canonical YAML format.
void testLoader(bool verbose, string dataFilename, string canonicalFilename) void testLoader(bool verbose, string dataFilename, string canonicalFilename) @safe
{ {
auto data = Loader(dataFilename).loadAll(); auto data = Loader(dataFilename).loadAll();
auto canonical = Loader(canonicalFilename).loadAll(); auto canonical = Loader(canonicalFilename).loadAll();
@ -62,7 +62,7 @@ void testLoader(bool verbose, string dataFilename, string canonicalFilename)
} }
unittest @safe unittest
{ {
writeln("D:YAML comparison unittest"); writeln("D:YAML comparison unittest");
run("testParser", &testParser, ["data", "canonical"]); run("testParser", &testParser, ["data", "canonical"]);

View file

@ -23,7 +23,7 @@ import dyaml.test.common;
Node[][string] expected; Node[][string] expected;
///Initialize expected. ///Initialize expected.
static this() static this() @safe
{ {
expected["aliases-cdumper-bug"] = constructAliasesCDumperBug(); expected["aliases-cdumper-bug"] = constructAliasesCDumperBug();
expected["construct-binary"] = constructBinary(); expected["construct-binary"] = constructBinary();
@ -64,15 +64,15 @@ Node.Pair pair(A, B)(A a, B b)
///Test cases: ///Test cases:
Node[] constructAliasesCDumperBug() Node[] constructAliasesCDumperBug() @safe
{ {
return [Node(["today", "today"])]; return [Node(["today", "today"])];
} }
Node[] constructBinary() Node[] constructBinary() @safe
{ {
auto canonical = cast(ubyte[])"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;"; auto canonical = "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;".representation;
auto generic = cast(ubyte[])"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;"; auto generic = "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;".representation;
auto description = "The binary value above is a tiny arrow encoded as a gif image."; auto description = "The binary value above is a tiny arrow encoded as a gif image.";
return [Node([pair("canonical", canonical), return [Node([pair("canonical", canonical),
@ -80,7 +80,7 @@ Node[] constructBinary()
pair("description", description)])]; pair("description", description)])];
} }
Node[] constructBool() Node[] constructBool() @safe
{ {
const(bool) a = true; const(bool) a = true;
immutable(bool) b = true; immutable(bool) b = true;
@ -97,13 +97,13 @@ Node[] constructBool()
pair("but", [pair("y", "is a string"), pair("n", "is a string")])])]; pair("but", [pair("y", "is a string"), pair("n", "is a string")])])];
} }
Node[] constructCustom() Node[] constructCustom() @safe
{ {
return [Node([Node(new TestClass(1, 2, 3)), return [Node([Node(new TestClass(1, 2, 3)),
Node(TestStruct(10))])]; Node(TestStruct(10))])];
} }
Node[] constructFloat() Node[] constructFloat() @safe
{ {
return [Node([pair("canonical", cast(real)685230.15), return [Node([pair("canonical", cast(real)685230.15),
pair("exponential", cast(real)685230.15), pair("exponential", cast(real)685230.15),
@ -113,7 +113,7 @@ Node[] constructFloat()
pair("not a number", real.nan)])]; pair("not a number", real.nan)])];
} }
Node[] constructInt() Node[] constructInt() @safe
{ {
return [Node([pair("canonical", 685230L), return [Node([pair("canonical", 685230L),
pair("decimal", 685230L), pair("decimal", 685230L),
@ -123,7 +123,7 @@ Node[] constructInt()
pair("sexagesimal", 685230L)])]; pair("sexagesimal", 685230L)])];
} }
Node[] constructMap() Node[] constructMap() @safe
{ {
return [Node([pair("Block style", return [Node([pair("Block style",
[pair("Clark", "Evans"), [pair("Clark", "Evans"),
@ -135,7 +135,7 @@ Node[] constructMap()
pair("Oren", "Ben-Kiki")])])]; pair("Oren", "Ben-Kiki")])])];
} }
Node[] constructMerge() Node[] constructMerge() @safe
{ {
return [Node([Node([pair("x", 1L), pair("y", 2L)]), return [Node([Node([pair("x", 1L), pair("y", 2L)]),
Node([pair("x", 0L), pair("y", 2L)]), Node([pair("x", 0L), pair("y", 2L)]),
@ -147,7 +147,7 @@ Node[] constructMerge()
Node([pair("x", 1L), pair("label", "center/big"), pair("r", 10L), pair("y", 2L)])])]; Node([pair("x", 1L), pair("label", "center/big"), pair("r", 10L), pair("y", 2L)])])];
} }
Node[] constructNull() Node[] constructNull() @safe
{ {
return [Node(YAMLNull()), return [Node(YAMLNull()),
Node([pair("empty", YAMLNull()), Node([pair("empty", YAMLNull()),
@ -162,7 +162,7 @@ Node[] constructNull()
Node(YAMLNull())])])]; Node(YAMLNull())])])];
} }
Node[] constructOMap() Node[] constructOMap() @safe
{ {
return [Node([pair("Bestiary", return [Node([pair("Bestiary",
[pair("aardvark", "African pig-like ant eater. Ugly."), [pair("aardvark", "African pig-like ant eater. Ugly."),
@ -173,7 +173,7 @@ Node[] constructOMap()
pair("three", 3L)])])]; pair("three", 3L)])])];
} }
Node[] constructPairs() Node[] constructPairs() @safe
{ {
return [Node([pair("Block tasks", return [Node([pair("Block tasks",
Node([pair("meeting", "with team."), Node([pair("meeting", "with team."),
@ -185,7 +185,7 @@ Node[] constructPairs()
pair("meeting", "with boss")], "tag:yaml.org,2002:pairs"))])]; pair("meeting", "with boss")], "tag:yaml.org,2002:pairs"))])];
} }
Node[] constructSeq() Node[] constructSeq() @safe
{ {
return [Node([pair("Block style", return [Node([pair("Block style",
[Node("Mercury"), Node("Venus"), Node("Earth"), Node("Mars"), [Node("Mercury"), Node("Venus"), Node("Earth"), Node("Mars"),
@ -197,7 +197,7 @@ Node[] constructSeq()
Node("Pluto")])])]; Node("Pluto")])])];
} }
Node[] constructSet() Node[] constructSet() @safe
{ {
return [Node([pair("baseball players", return [Node([pair("baseball players",
[Node("Mark McGwire"), Node("Sammy Sosa"), Node("Ken Griffey")]), [Node("Mark McGwire"), Node("Sammy Sosa"), Node("Ken Griffey")]),
@ -205,22 +205,22 @@ Node[] constructSet()
[Node("Boston Red Sox"), Node("Detroit Tigers"), Node("New York Yankees")])])]; [Node("Boston Red Sox"), Node("Detroit Tigers"), Node("New York Yankees")])])];
} }
Node[] constructStrASCII() Node[] constructStrASCII() @safe
{ {
return [Node("ascii string")]; return [Node("ascii string")];
} }
Node[] constructStr() Node[] constructStr() @safe
{ {
return [Node([pair("string", "abcd")])]; return [Node([pair("string", "abcd")])];
} }
Node[] constructStrUTF8() Node[] constructStrUTF8() @safe
{ {
return [Node("\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430")]; return [Node("\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430")];
} }
Node[] constructTimestamp() Node[] constructTimestamp() @safe
{ {
alias DT = DateTime; alias DT = DateTime;
alias ST = SysTime; alias ST = SysTime;
@ -231,7 +231,7 @@ Node[] constructTimestamp()
pair("date (00:00:00Z)", ST(DT(2002, 12, 14), UTC()))])]; pair("date (00:00:00Z)", ST(DT(2002, 12, 14), UTC()))])];
} }
Node[] constructValue() Node[] constructValue() @safe
{ {
return[Node([pair("link with", return[Node([pair("link with",
[Node("library1.dll"), Node("library2.dll")])]), [Node("library1.dll"), Node("library2.dll")])]),
@ -240,7 +240,7 @@ Node[] constructValue()
Node([pair("=", "library2.dll"), pair("version", cast(real)2.3)])])])]; Node([pair("=", "library2.dll"), pair("version", cast(real)2.3)])])])];
} }
Node[] duplicateMergeKey() Node[] duplicateMergeKey() @safe
{ {
return [Node([pair("foo", "bar"), return [Node([pair("foo", "bar"),
pair("x", 1L), pair("x", 1L),
@ -249,7 +249,7 @@ Node[] duplicateMergeKey()
pair("t", 4L)])]; pair("t", 4L)])];
} }
Node[] floatRepresenterBug() Node[] floatRepresenterBug() @safe
{ {
return [Node([pair(cast(real)1.0, 1L), return [Node([pair(cast(real)1.0, 1L),
pair(real.infinity, 10L), pair(real.infinity, 10L),
@ -257,12 +257,12 @@ Node[] floatRepresenterBug()
pair(real.nan, 100L)])]; pair(real.nan, 100L)])];
} }
Node[] invalidSingleQuoteBug() Node[] invalidSingleQuoteBug() @safe
{ {
return [Node([Node("foo \'bar\'"), Node("foo\n\'bar\'")])]; return [Node([Node("foo \'bar\'"), Node("foo\n\'bar\'")])];
} }
Node[] moreFloats() Node[] moreFloats() @safe
{ {
return [Node([Node(cast(real)0.0), return [Node([Node(cast(real)0.0),
Node(cast(real)1.0), Node(cast(real)1.0),
@ -273,17 +273,17 @@ Node[] moreFloats()
Node(real.nan)])]; Node(real.nan)])];
} }
Node[] negativeFloatBug() Node[] negativeFloatBug() @safe
{ {
return [Node(cast(real)-1.0)]; return [Node(cast(real)-1.0)];
} }
Node[] singleDotFloatBug() Node[] singleDotFloatBug() @safe
{ {
return [Node(".")]; return [Node(".")];
} }
Node[] timestampBugs() Node[] timestampBugs() @safe
{ {
alias DT = DateTime; alias DT = DateTime;
alias ST = SysTime; alias ST = SysTime;
@ -296,22 +296,22 @@ Node[] timestampBugs()
Node(ST(DT(2005, 7, 8, 17, 35, 4), 5176000.dur!"hnsecs", UTC()))])]; Node(ST(DT(2005, 7, 8, 17, 35, 4), 5176000.dur!"hnsecs", UTC()))])];
} }
Node[] utf16be() Node[] utf16be() @safe
{ {
return [Node("UTF-16-BE")]; return [Node("UTF-16-BE")];
} }
Node[] utf16le() Node[] utf16le() @safe
{ {
return [Node("UTF-16-LE")]; return [Node("UTF-16-LE")];
} }
Node[] utf8() Node[] utf8() @safe
{ {
return [Node("UTF-8")]; return [Node("UTF-8")];
} }
Node[] utf8implicit() Node[] utf8implicit() @safe
{ {
return [Node("implicit UTF-8")]; return [Node("implicit UTF-8")];
} }
@ -321,7 +321,7 @@ class TestClass
{ {
int x, y, z; int x, y, z;
this(int x, int y, int z) this(int x, int y, int z) @safe
{ {
this.x = x; this.x = x;
this.y = y; this.y = y;
@ -330,7 +330,7 @@ class TestClass
//Any D:YAML type must have a custom opCmp operator. //Any D:YAML type must have a custom opCmp operator.
//This is used for ordering in mappings. //This is used for ordering in mappings.
override int opCmp(Object o) override int opCmp(Object o) @safe
{ {
TestClass s = cast(TestClass)o; TestClass s = cast(TestClass)o;
if(s is null){return -1;} if(s is null){return -1;}
@ -340,7 +340,7 @@ class TestClass
return 0; return 0;
} }
override string toString() override string toString() @safe
{ {
return format("TestClass(", x, ", ", y, ", ", z, ")"); return format("TestClass(", x, ", ", y, ", ", z, ")");
} }
@ -353,19 +353,19 @@ struct TestStruct
//Any D:YAML type must have a custom opCmp operator. //Any D:YAML type must have a custom opCmp operator.
//This is used for ordering in mappings. //This is used for ordering in mappings.
const int opCmp(ref const TestStruct s) const int opCmp(ref const TestStruct s) @safe
{ {
return value - s.value; return value - s.value;
} }
} }
///Constructor function for TestClass. ///Constructor function for TestClass.
TestClass constructClass(ref Node node) TestClass constructClass(ref Node node) @safe
{ {
return new TestClass(node["x"].as!int, node["y"].as!int, node["z"].as!int); return new TestClass(node["x"].as!int, node["y"].as!int, node["z"].as!int);
} }
Node representClass(ref Node node, Representer representer) Node representClass(ref Node node, Representer representer) @safe
{ {
auto value = node.as!TestClass; auto value = node.as!TestClass;
auto pairs = [Node.Pair("x", value.x), auto pairs = [Node.Pair("x", value.x),
@ -377,13 +377,13 @@ Node representClass(ref Node node, Representer representer)
} }
///Constructor function for TestStruct. ///Constructor function for TestStruct.
TestStruct constructStruct(ref Node node) TestStruct constructStruct(ref Node node) @safe
{ {
return TestStruct(to!int(node.as!string)); return TestStruct(to!int(node.as!string));
} }
///Representer function for TestStruct. ///Representer function for TestStruct.
Node representStruct(ref Node node, Representer representer) Node representStruct(ref Node node, Representer representer) @safe
{ {
string[] keys, values; string[] keys, values;
auto value = node.as!TestStruct; auto value = node.as!TestStruct;
@ -398,7 +398,7 @@ Node representStruct(ref Node node, Representer representer)
* codeDummy = Dummy .code filename, used to determine that * codeDummy = Dummy .code filename, used to determine that
* .data file with the same name should be used in this test. * .data file with the same name should be used in this test.
*/ */
void testConstructor(bool verbose, string dataFilename, string codeDummy) void testConstructor(bool verbose, string dataFilename, string codeDummy) @safe
{ {
string base = dataFilename.baseName.stripExtension; string base = dataFilename.baseName.stripExtension;
enforce((base in expected) !is null, enforce((base in expected) !is null,
@ -436,7 +436,7 @@ void testConstructor(bool verbose, string dataFilename, string codeDummy)
} }
unittest @safe unittest
{ {
writeln("D:YAML Constructor unittest"); writeln("D:YAML Constructor unittest");
run("testConstructor", &testConstructor, ["data", "code"]); run("testConstructor", &testConstructor, ["data", "code"]);

View file

@ -28,7 +28,7 @@ import dyaml.token;
/// events2 = Second event array to compare. /// events2 = Second event array to compare.
/// ///
/// Returns: true if the events are equivalent, false otherwise. /// Returns: true if the events are equivalent, false otherwise.
bool compareEvents(Event[] events1, Event[] events2) bool compareEvents(Event[] events1, Event[] events2) @system
{ {
if(events1.length != events2.length){return false;} if(events1.length != events2.length){return false;}
@ -79,7 +79,7 @@ bool compareEvents(Event[] events1, Event[] events2)
/// dataFilename = YAML file to parse. /// dataFilename = YAML file to parse.
/// canonicalFilename = Canonical YAML file used as dummy to determine /// canonicalFilename = Canonical YAML file used as dummy to determine
/// which data files to load. /// which data files to load.
void testEmitterOnData(bool verbose, string dataFilename, string canonicalFilename) void testEmitterOnData(bool verbose, string dataFilename, string canonicalFilename) @system
{ {
//Must exist due to Anchor, Tags reference counts. //Must exist due to Anchor, Tags reference counts.
auto loader = Loader(dataFilename); auto loader = Loader(dataFilename);
@ -108,7 +108,7 @@ void testEmitterOnData(bool verbose, string dataFilename, string canonicalFilena
/// ///
/// Params: verbose = Print verbose output? /// Params: verbose = Print verbose output?
/// canonicalFilename = Canonical YAML file to parse. /// canonicalFilename = Canonical YAML file to parse.
void testEmitterOnCanonical(bool verbose, string canonicalFilename) void testEmitterOnCanonical(bool verbose, string canonicalFilename) @system
{ {
//Must exist due to Anchor, Tags reference counts. //Must exist due to Anchor, Tags reference counts.
auto loader = Loader(canonicalFilename); auto loader = Loader(canonicalFilename);
@ -141,7 +141,7 @@ void testEmitterOnCanonical(bool verbose, string canonicalFilename)
/// dataFilename = YAML file to parse. /// dataFilename = YAML file to parse.
/// canonicalFilename = Canonical YAML file used as dummy to determine /// canonicalFilename = Canonical YAML file used as dummy to determine
/// which data files to load. /// which data files to load.
void testEmitterStyles(bool verbose, string dataFilename, string canonicalFilename) void testEmitterStyles(bool verbose, string dataFilename, string canonicalFilename) @system
{ {
foreach(filename; [dataFilename, canonicalFilename]) foreach(filename; [dataFilename, canonicalFilename])
{ {
@ -194,7 +194,7 @@ void testEmitterStyles(bool verbose, string dataFilename, string canonicalFilena
} }
} }
unittest @system unittest
{ {
writeln("D:YAML Emitter unittest"); writeln("D:YAML Emitter unittest");
run("testEmitterOnData", &testEmitterOnData, ["data", "canonical"]); run("testEmitterOnData", &testEmitterOnData, ["data", "canonical"]);

View file

@ -19,7 +19,7 @@ import dyaml.test.common;
/// ///
/// Params: verbose = Print verbose output? /// Params: verbose = Print verbose output?
/// errorFilename = File name to read from. /// errorFilename = File name to read from.
void testLoaderError(bool verbose, string errorFilename) void testLoaderError(bool verbose, string errorFilename) @safe
{ {
auto buffer = std.file.read(errorFilename); auto buffer = std.file.read(errorFilename);
@ -27,7 +27,7 @@ void testLoaderError(bool verbose, string errorFilename)
try { nodes = Loader(buffer).loadAll(); } try { nodes = Loader(buffer).loadAll(); }
catch(YAMLException e) catch(YAMLException e)
{ {
if(verbose) { writeln(typeid(e).toString(), "\n", e); } printException(e, verbose);
return; return;
} }
assert(false, "Expected an exception"); assert(false, "Expected an exception");
@ -37,7 +37,7 @@ void testLoaderError(bool verbose, string errorFilename)
/// ///
/// Params: verbose = Print verbose output? /// Params: verbose = Print verbose output?
/// errorFilename = File name to read from. /// errorFilename = File name to read from.
void testLoaderErrorString(bool verbose, string errorFilename) void testLoaderErrorString(bool verbose, string errorFilename) @safe
{ {
// Load file to a buffer, then pass that to the YAML loader. // Load file to a buffer, then pass that to the YAML loader.
auto buffer = std.file.read(errorFilename); auto buffer = std.file.read(errorFilename);
@ -48,7 +48,7 @@ void testLoaderErrorString(bool verbose, string errorFilename)
} }
catch(YAMLException e) catch(YAMLException e)
{ {
if(verbose) { writeln(typeid(e).toString(), "\n", e); } printException(e, verbose);
return; return;
} }
assert(false, "Expected an exception"); assert(false, "Expected an exception");
@ -58,12 +58,12 @@ void testLoaderErrorString(bool verbose, string errorFilename)
/// ///
/// Params: verbose = Print verbose output? /// Params: verbose = Print verbose output?
/// errorFilename = File name to read from. /// errorFilename = File name to read from.
void testLoaderErrorFilename(bool verbose, string errorFilename) void testLoaderErrorFilename(bool verbose, string errorFilename) @safe
{ {
try { auto nodes = Loader(errorFilename).loadAll(); } try { auto nodes = Loader(errorFilename).loadAll(); }
catch(YAMLException e) catch(YAMLException e)
{ {
if(verbose) { writeln(typeid(e).toString(), "\n", e); } printException(e, verbose);
return; return;
} }
assert(false, "testLoaderErrorSingle(" ~ verbose.to!string ~ assert(false, "testLoaderErrorSingle(" ~ verbose.to!string ~
@ -74,19 +74,18 @@ void testLoaderErrorFilename(bool verbose, string errorFilename)
/// ///
/// Params: verbose = Print verbose output? /// Params: verbose = Print verbose output?
/// errorFilename = File name to read from. /// errorFilename = File name to read from.
void testLoaderErrorSingle(bool verbose, string errorFilename) void testLoaderErrorSingle(bool verbose, string errorFilename) @safe
{ {
try { auto nodes = Loader(errorFilename).load(); } try { auto nodes = Loader(errorFilename).load(); }
catch(YAMLException e) catch(YAMLException e)
{ {
if(verbose) { writeln(typeid(e).toString(), "\n", e); } printException(e, verbose);
return; return;
} }
assert(false, "Expected an exception"); assert(false, "Expected an exception");
} }
@safe unittest
unittest
{ {
writeln("D:YAML Errors unittest"); writeln("D:YAML Errors unittest");
run("testLoaderError", &testLoaderError, ["loader-error"]); run("testLoaderError", &testLoaderError, ["loader-error"]);

View file

@ -25,10 +25,10 @@ alias std.system.endian endian;
/// Params: wrong = Get the incorrect BOM for this system. /// Params: wrong = Get the incorrect BOM for this system.
/// ///
/// Returns: UTF-16 byte order mark. /// Returns: UTF-16 byte order mark.
wchar bom16(bool wrong = false) pure wchar bom16(bool wrong = false) pure @safe
{ {
wchar little = *(cast(wchar*)ByteOrderMarks[BOM.UTF16LE]); wchar little = '\uFEFF';
wchar big = *(cast(wchar*)ByteOrderMarks[BOM.UTF16BE]); wchar big = '\uFFFE';
if(!wrong){return endian == Endian.littleEndian ? little : big;} if(!wrong){return endian == Endian.littleEndian ? little : big;}
return endian == Endian.littleEndian ? big : little; return endian == Endian.littleEndian ? big : little;
} }
@ -38,10 +38,10 @@ wchar bom16(bool wrong = false) pure
/// Params: wrong = Get the incorrect BOM for this system. /// Params: wrong = Get the incorrect BOM for this system.
/// ///
/// Returns: UTF-32 byte order mark. /// Returns: UTF-32 byte order mark.
dchar bom32(bool wrong = false) pure dchar bom32(bool wrong = false) pure @safe
{ {
dchar little = *(cast(dchar*)ByteOrderMarks[BOM.UTF32LE]); dchar little = '\uFEFF';
dchar big = *(cast(dchar*)ByteOrderMarks[BOM.UTF32BE]); dchar big = '\uFFFE';
if(!wrong){return endian == Endian.littleEndian ? little : big;} if(!wrong){return endian == Endian.littleEndian ? little : big;}
return endian == Endian.littleEndian ? big : little; return endian == Endian.littleEndian ? big : little;
} }
@ -50,7 +50,7 @@ dchar bom32(bool wrong = false) pure
/// ///
/// Params: verbose = Print verbose output? /// Params: verbose = Print verbose output?
/// unicodeFilename = File name to read from. /// unicodeFilename = File name to read from.
void testUnicodeInput(bool verbose, string unicodeFilename) void testUnicodeInput(bool verbose, string unicodeFilename) @safe
{ {
string data = readText(unicodeFilename); string data = readText(unicodeFilename);
string expected = data.split().join(" "); string expected = data.split().join(" ");
@ -70,7 +70,7 @@ void testUnicodeInput(bool verbose, string unicodeFilename)
/// ///
/// Params: verbose = Print verbose output? /// Params: verbose = Print verbose output?
/// unicodeFilename = File name to read from. /// unicodeFilename = File name to read from.
void testUnicodeInputErrors(bool verbose, string unicodeFilename) void testUnicodeInputErrors(bool verbose, string unicodeFilename) @safe
{ {
string data = readText(unicodeFilename); string data = readText(unicodeFilename);
foreach(buffer; [cast(void[])(data.to!(wchar[])), foreach(buffer; [cast(void[])(data.to!(wchar[])),
@ -81,7 +81,7 @@ void testUnicodeInputErrors(bool verbose, string unicodeFilename)
try { Loader(buffer).load(); } try { Loader(buffer).load(); }
catch(YAMLException e) catch(YAMLException e)
{ {
if(verbose) { writeln(typeid(e).toString(), "\n", e); } printException(e, verbose);
continue; continue;
} }
assert(false, "Expected an exception"); assert(false, "Expected an exception");
@ -89,7 +89,7 @@ void testUnicodeInputErrors(bool verbose, string unicodeFilename)
} }
unittest @safe unittest
{ {
writeln("D:YAML I/O unittest"); writeln("D:YAML I/O unittest");
run("testUnicodeInput", &testUnicodeInput, ["unicode"]); run("testUnicodeInput", &testUnicodeInput, ["unicode"]);

View file

@ -18,7 +18,7 @@ import dyaml.reader;
// //
// Params: verbose = Print verbose output? // Params: verbose = Print verbose output?
// data = Stream to read. // data = Stream to read.
void runReader(const bool verbose, void[] fileData) void runReader(const bool verbose, ubyte[] fileData) @safe
{ {
try try
{ {
@ -27,7 +27,7 @@ void runReader(const bool verbose, void[] fileData)
} }
catch(ReaderException e) catch(ReaderException e)
{ {
if(verbose) { writeln(typeid(e).toString(), "\n", e); } printException(e, verbose);
return; return;
} }
assert(false, "Expected an exception"); assert(false, "Expected an exception");
@ -38,13 +38,13 @@ void runReader(const bool verbose, void[] fileData)
/// ///
/// Params: verbose = Print verbose output? /// Params: verbose = Print verbose output?
/// errorFilename = File name to read from. /// errorFilename = File name to read from.
void testStreamError(bool verbose, string errorFilename) void testStreamError(bool verbose, string errorFilename) @trusted
{ {
import std.file; import std.file;
runReader(verbose, std.file.read(errorFilename)); runReader(verbose, cast(ubyte[])std.file.read(errorFilename));
} }
unittest @safe unittest
{ {
writeln("D:YAML Reader unittest"); writeln("D:YAML Reader unittest");
run("testStreamError", &testStreamError, ["stream-error"]); run("testStreamError", &testStreamError, ["stream-error"]);

View file

@ -24,7 +24,7 @@ import dyaml.test.constructor;
/// codeFilename = File name to determine test case from. /// codeFilename = File name to determine test case from.
/// Nothing is read from this file, it only exists /// Nothing is read from this file, it only exists
/// to specify that we need a matching unittest. /// to specify that we need a matching unittest.
void testRepresenterTypes(bool verbose, string codeFilename) void testRepresenterTypes(bool verbose, string codeFilename) @trusted
{ {
string baseName = codeFilename.baseName.stripExtension; string baseName = codeFilename.baseName.stripExtension;
enforce((baseName in dyaml.test.constructor.expected) !is null, enforce((baseName in dyaml.test.constructor.expected) !is null,
@ -77,7 +77,7 @@ void testRepresenterTypes(bool verbose, string codeFilename)
} }
} }
unittest @safe unittest
{ {
writeln("D:YAML Representer unittest"); writeln("D:YAML Representer unittest");
run("testRepresenterTypes", &testRepresenterTypes, ["code"]); run("testRepresenterTypes", &testRepresenterTypes, ["code"]);

View file

@ -23,7 +23,7 @@ import dyaml.test.common;
* dataFilename = File with unittest data. * dataFilename = File with unittest data.
* detectFilename = Dummy filename used to specify which data filenames to use. * detectFilename = Dummy filename used to specify which data filenames to use.
*/ */
void testImplicitResolver(bool verbose, string dataFilename, string detectFilename) void testImplicitResolver(bool verbose, string dataFilename, string detectFilename) @safe
{ {
string correctTag; string correctTag;
Node node; Node node;
@ -48,7 +48,7 @@ void testImplicitResolver(bool verbose, string dataFilename, string detectFilena
} }
unittest @safe unittest
{ {
writeln("D:YAML Resolver unittest"); writeln("D:YAML Resolver unittest");
run("testImplicitResolver", &testImplicitResolver, ["data", "detect"]); run("testImplicitResolver", &testImplicitResolver, ["data", "detect"]);

View file

@ -24,7 +24,7 @@ import dyaml.token;
* dataFilename = File to scan. * dataFilename = File to scan.
* tokensFilename = File containing expected tokens. * tokensFilename = File containing expected tokens.
*/ */
void testTokens(bool verbose, string dataFilename, string tokensFilename) void testTokens(bool verbose, string dataFilename, string tokensFilename) @safe
{ {
//representations of YAML tokens in tokens file. //representations of YAML tokens in tokens file.
auto replace = [TokenID.Directive : "%" , auto replace = [TokenID.Directive : "%" ,
@ -72,7 +72,7 @@ void testTokens(bool verbose, string dataFilename, string tokensFilename)
* dataFilename = File to scan. * dataFilename = File to scan.
* canonicalFilename = Another file to scan, in canonical YAML format. * canonicalFilename = Another file to scan, in canonical YAML format.
*/ */
void testScanner(bool verbose, string dataFilename, string canonicalFilename) void testScanner(bool verbose, string dataFilename, string canonicalFilename) @safe
{ {
foreach(filename; [dataFilename, canonicalFilename]) foreach(filename; [dataFilename, canonicalFilename])
{ {
@ -86,7 +86,7 @@ void testScanner(bool verbose, string dataFilename, string canonicalFilename)
} }
} }
unittest @safe unittest
{ {
writeln("D:YAML tokens unittest"); writeln("D:YAML tokens unittest");
run("testTokens", &testTokens, ["data", "tokens"]); run("testTokens", &testTokens, ["data", "tokens"]);

View file

@ -97,8 +97,6 @@ struct Token
static assert(Token.sizeof <= 32, "Token has unexpected size"); static assert(Token.sizeof <= 32, "Token has unexpected size");
@safe pure nothrow @nogc:
/// Construct a directive token. /// Construct a directive token.
/// ///
/// Params: start = Start position of the token. /// Params: start = Start position of the token.
@ -106,7 +104,7 @@ static assert(Token.sizeof <= 32, "Token has unexpected size");
/// value = Value of the token. /// value = Value of the token.
/// directive = Directive type (YAML or TAG in YAML 1.1). /// directive = Directive type (YAML or TAG in YAML 1.1).
Token directiveToken(const Mark start, const Mark end, char[] value, Token directiveToken(const Mark start, const Mark end, char[] value,
DirectiveType directive, const uint nameEnd) DirectiveType directive, const uint nameEnd) @safe pure nothrow @nogc
{ {
return Token(value, start, end, TokenID.Directive, ScalarStyle.init, Encoding.init, return Token(value, start, end, TokenID.Directive, ScalarStyle.init, Encoding.init,
directive, nameEnd); directive, nameEnd);
@ -127,7 +125,7 @@ Token simpleToken(TokenID id)(const Mark start, const Mark end)
/// Params: start = Start position of the token. /// Params: start = Start position of the token.
/// end = End position of the token. /// end = End position of the token.
/// encoding = Encoding of the stream. /// encoding = Encoding of the stream.
Token streamStartToken(const Mark start, const Mark end, const Encoding encoding) Token streamStartToken(const Mark start, const Mark end, const Encoding encoding) @safe pure nothrow @nogc
{ {
return Token(null, start, end, TokenID.StreamStart, ScalarStyle.Invalid, encoding); return Token(null, start, end, TokenID.StreamStart, ScalarStyle.Invalid, encoding);
} }
@ -168,7 +166,7 @@ alias simpleValueToken!(TokenID.Anchor) anchorToken;
/// end = End position of the token. /// end = End position of the token.
/// value = Value of the token. /// value = Value of the token.
/// style = Style of the token. /// style = Style of the token.
Token scalarToken(const Mark start, const Mark end, char[] value, const ScalarStyle style) Token scalarToken(const Mark start, const Mark end, char[] value, const ScalarStyle style) @safe pure nothrow @nogc
{ {
return Token(value, start, end, TokenID.Scalar, style); return Token(value, start, end, TokenID.Scalar, style);
} }

View file

@ -47,7 +47,7 @@ auto decodeUTF(ubyte[] input, UTFEncoding encoding) @safe pure nothrow
// result = A Result struct to put decoded result and any error messages to. // result = A Result struct to put decoded result and any error messages to.
// //
// On error, result.errorMessage will be set. // On error, result.errorMessage will be set.
static void decode(C)(C[] input, ref Result result) @safe pure nothrow static void decode(C)(C[] input, ref Result result)
{ {
// End of part of input that contains complete characters that can be decoded. // End of part of input that contains complete characters that can be decoded.
const size_t end = endOfLastUTFSequence(input); const size_t end = endOfLastUTFSequence(input);
@ -107,7 +107,6 @@ auto decodeUTF(ubyte[] input, UTFEncoding encoding) @safe pure nothrow
// Determine the end of last UTF-8 or UTF-16 sequence in a raw buffer. // Determine the end of last UTF-8 or UTF-16 sequence in a raw buffer.
size_t endOfLastUTFSequence(C)(const C[] buffer) size_t endOfLastUTFSequence(C)(const C[] buffer)
@safe pure nothrow @nogc
{ {
static if(is(C == char)) static if(is(C == char))
{ {