clean up tests - print more useful messages on failure, bring closer to D style
This commit is contained in:
parent
22267baaa7
commit
88eaf879ec
|
@ -9,122 +9,173 @@ module dyaml.test.common;
|
|||
version(unittest)
|
||||
{
|
||||
|
||||
public import std.conv;
|
||||
public import std.stdio;
|
||||
public import dyaml;
|
||||
import dyaml.node;
|
||||
import dyaml.event;
|
||||
|
||||
import core.exception;
|
||||
import std.algorithm;
|
||||
import std.array;
|
||||
import std.conv;
|
||||
import std.file;
|
||||
import std.range;
|
||||
import std.path;
|
||||
import std.traits;
|
||||
import std.typecons;
|
||||
|
||||
package:
|
||||
|
||||
debug(verbose)
|
||||
{
|
||||
enum verbose = true;
|
||||
enum quiet = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
enum verbose = false;
|
||||
debug(noisy) enum quiet = false;
|
||||
else enum quiet = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run an unittest.
|
||||
*
|
||||
* Params: testName = Name of the unittest.
|
||||
* testFunction = Unittest function.
|
||||
* unittestExt = Extensions of data files needed for the unittest.
|
||||
* skipExt = Extensions that must not be used for the unittest.
|
||||
Run a test.
|
||||
|
||||
Params:
|
||||
testFunction = Unittest function.
|
||||
unittestExt = Extensions of data files needed for the unittest.
|
||||
skipExt = Extensions that must not be used for the unittest.
|
||||
*/
|
||||
void run(D)(string testName, D testFunction,
|
||||
string[] unittestExt, string[] skipExt = [])
|
||||
void run(D)(D testFunction, string[] unittestExt, string[] skipExt = [])
|
||||
{
|
||||
immutable string dataDir = __FILE_FULL_PATH__.dirName ~ "/../../../test/data";
|
||||
auto testFilenames = findTestFilenames(dataDir);
|
||||
|
||||
Result[] results;
|
||||
if(unittestExt.length > 0)
|
||||
if (unittestExt.length > 0)
|
||||
{
|
||||
outer: foreach(base, extensions; testFilenames)
|
||||
outer: foreach (base, extensions; testFilenames)
|
||||
{
|
||||
string[] filenames;
|
||||
foreach(ext; unittestExt)
|
||||
foreach (ext; unittestExt)
|
||||
{
|
||||
if(!extensions.canFind(ext)){continue outer;}
|
||||
if (!extensions.canFind(ext))
|
||||
{
|
||||
continue outer;
|
||||
}
|
||||
filenames ~= base ~ '.' ~ ext;
|
||||
}
|
||||
foreach(ext; skipExt)
|
||||
foreach (ext; skipExt)
|
||||
{
|
||||
if(extensions.canFind(ext)){continue outer;}
|
||||
if (extensions.canFind(ext))
|
||||
{
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
|
||||
results ~= execute(testName, testFunction, filenames);
|
||||
execute(testFunction, filenames);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
results ~= execute(testName, testFunction, string[].init);
|
||||
execute(testFunction, string[].init);
|
||||
}
|
||||
display(results);
|
||||
}
|
||||
|
||||
// TODO: remove when a @safe ubyte[] file read can be done.
|
||||
/**
|
||||
Reads a file as an array of bytes.
|
||||
|
||||
Params:
|
||||
filename = Full path to file to read.
|
||||
|
||||
Returns: The file's data.
|
||||
*/
|
||||
ubyte[] readData(string filename) @trusted
|
||||
{
|
||||
import std.file : read;
|
||||
return cast(ubyte[])read(filename);
|
||||
}
|
||||
void assertNodesEqual(const scope Node gotNode, const scope Node expectedNode) @safe
|
||||
{
|
||||
import std.format : format;
|
||||
assert(gotNode == expectedNode, format!"got %s, expected %s"(gotNode.debugString, expectedNode.debugString));
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints an exception if verbosity is turned on.
|
||||
* Params: e = Exception to print.
|
||||
*/
|
||||
void printException(YAMLException e) @trusted
|
||||
{
|
||||
static if(verbose) { writeln(typeid(e).toString(), "\n", e); }
|
||||
}
|
||||
Determine if events in events1 are equivalent to events in events2.
|
||||
|
||||
void printProgress(T...)(T params) @safe
|
||||
Params:
|
||||
events1 = A range of events to compare with.
|
||||
events2 = A second range of events to compare.
|
||||
|
||||
Returns: true if the events are equivalent, false otherwise.
|
||||
*/
|
||||
bool compareEvents(T, U)(T events1, U events2)
|
||||
if (isInputRange!T && isInputRange!U && is(ElementType!T == Event) && is(ElementType!U == Event))
|
||||
{
|
||||
static if(!quiet)
|
||||
foreach (e1, e2; zip(events1, events2))
|
||||
{
|
||||
writeln(params);
|
||||
//Different event types.
|
||||
if (e1.id != e2.id)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
//Different anchor (if applicable).
|
||||
if (e1.id.among!(EventID.sequenceStart, EventID.mappingStart, EventID.alias_, EventID.scalar)
|
||||
&& e1.anchor != e2.anchor)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
//Different collection tag (if applicable).
|
||||
if (e1.id.among!(EventID.sequenceStart, EventID.mappingStart) && e1.tag != e2.tag)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (e1.id == EventID.scalar)
|
||||
{
|
||||
//Different scalar tag (if applicable).
|
||||
if (!(e1.implicit || e2.implicit) && e1.tag != e2.tag)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
//Different scalar value.
|
||||
if (e1.value != e2.value)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
Throw an Error if events in events1 aren't equivalent to events in events2.
|
||||
|
||||
Params:
|
||||
events1 = First event array to compare.
|
||||
events2 = Second event array to compare.
|
||||
*/
|
||||
void assertEventsEqual(T, U)(T events1, U events2)
|
||||
if (isInputRange!T && isInputRange!U && is(ElementType!T == Event) && is(ElementType!U == Event))
|
||||
{
|
||||
auto events1Copy = events1.array;
|
||||
auto events2Copy = events2.array;
|
||||
assert(compareEvents(events1Copy, events2Copy), text("Got '", events1Copy, "', expected '", events2Copy, "'"));
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
///Unittest status.
|
||||
enum TestStatus
|
||||
{
|
||||
success, //Unittest passed.
|
||||
failure, //Unittest failed.
|
||||
error //There's an error in the unittest.
|
||||
}
|
||||
|
||||
///Unittest result.
|
||||
alias Result = Tuple!(string, "name", string[], "filenames", TestStatus, "kind", string, "info");
|
||||
|
||||
/**
|
||||
* Find unittest input filenames.
|
||||
*
|
||||
* Params: dir = Directory to look in.
|
||||
*
|
||||
* Returns: Test input base filenames and their extensions.
|
||||
*/
|
||||
Find unittest input filenames.
|
||||
|
||||
Params: dir = Directory to look in.
|
||||
|
||||
Returns: Test input base filenames and their extensions.
|
||||
*/
|
||||
//@trusted due to dirEntries
|
||||
string[][string] findTestFilenames(const string dir) @trusted
|
||||
{
|
||||
//Groups of extensions indexed by base names.
|
||||
string[][string] names;
|
||||
foreach(string name; dirEntries(dir, SpanMode.shallow))
|
||||
foreach (string name; dirEntries(dir, SpanMode.shallow))
|
||||
{
|
||||
if(isFile(name))
|
||||
if (isFile(name))
|
||||
{
|
||||
string base = name.stripExtension();
|
||||
string ext = name.extension();
|
||||
if(ext is null){ext = "";}
|
||||
if(ext[0] == '.'){ext = ext[1 .. $];}
|
||||
string ext = name.extension();
|
||||
if (ext is null)
|
||||
{
|
||||
ext = "";
|
||||
}
|
||||
if (ext[0] == '.')
|
||||
{
|
||||
ext = ext[1 .. $];
|
||||
}
|
||||
|
||||
//If the base name doesn't exist yet, add it; otherwise add new extension.
|
||||
names[base] = ((base in names) is null) ? [ext] : names[base] ~ ext;
|
||||
|
@ -134,98 +185,39 @@ string[][string] findTestFilenames(const string dir) @trusted
|
|||
}
|
||||
|
||||
/**
|
||||
* Recursively copy an array of strings to a tuple to use for unittest function input.
|
||||
*
|
||||
* Params: index = Current index in the array/tuple.
|
||||
* tuple = Tuple to copy to.
|
||||
* strings = Strings to copy.
|
||||
*/
|
||||
Recursively copy an array of strings to a tuple to use for unittest function input.
|
||||
|
||||
Params:
|
||||
index = Current index in the array/tuple.
|
||||
tuple = Tuple to copy to.
|
||||
strings = Strings to copy.
|
||||
*/
|
||||
void stringsToTuple(uint index, F ...)(ref F tuple, const string[] strings)
|
||||
in{assert(F.length == strings.length);}
|
||||
in(F.length == strings.length)
|
||||
do
|
||||
{
|
||||
tuple[index] = strings[index];
|
||||
static if(index > 0){stringsToTuple!(index - 1, F)(tuple, strings);}
|
||||
static if (index > 0)
|
||||
{
|
||||
stringsToTuple!(index - 1, F)(tuple, strings);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute an unittest on specified files.
|
||||
*
|
||||
* Params: testName = Name of the unittest.
|
||||
* testFunction = Unittest function.
|
||||
* filenames = Names of input files to test with.
|
||||
*
|
||||
* Returns: Information about the results of the unittest.
|
||||
Execute an unittest on specified files.
|
||||
|
||||
Params:
|
||||
testName = Name of the unittest.
|
||||
testFunction = Unittest function.
|
||||
filenames = Names of input files to test with.
|
||||
*/
|
||||
Result execute(D)(const string testName, D testFunction,
|
||||
string[] filenames) @trusted
|
||||
void execute(D)(D testFunction, string[] filenames)
|
||||
{
|
||||
static if(verbose)
|
||||
{
|
||||
writeln("===========================================================================");
|
||||
writeln(testName ~ "(" ~ filenames.join(", ") ~ ")...");
|
||||
}
|
||||
|
||||
auto kind = TestStatus.success;
|
||||
string info = "";
|
||||
try
|
||||
{
|
||||
//Convert filenames to parameters tuple and call the test function.
|
||||
alias F = Parameters!D[0..$];
|
||||
F parameters;
|
||||
stringsToTuple!(F.length - 1, F)(parameters, filenames);
|
||||
testFunction(parameters);
|
||||
static if (!quiet){write(".");}
|
||||
}
|
||||
catch(Throwable e)
|
||||
{
|
||||
info = to!string(typeid(e)) ~ "\n" ~ to!string(e);
|
||||
kind = (typeid(e) is typeid(AssertError)) ? TestStatus.failure : TestStatus.error;
|
||||
write((verbose ? to!string(e) : to!string(kind)) ~ " ");
|
||||
}
|
||||
|
||||
stdout.flush();
|
||||
|
||||
return Result(testName, filenames, kind, info);
|
||||
}
|
||||
|
||||
/**
|
||||
* Display unittest results.
|
||||
*
|
||||
* Params: results = Unittest results.
|
||||
*/
|
||||
void display(Result[] results) @safe
|
||||
{
|
||||
if(results.length > 0 && !verbose && !quiet){write("\n");}
|
||||
|
||||
size_t failures, errors;
|
||||
|
||||
static if(verbose)
|
||||
{
|
||||
writeln("===========================================================================");
|
||||
}
|
||||
//Results of each test.
|
||||
foreach(result; results)
|
||||
{
|
||||
static if(verbose)
|
||||
{
|
||||
writeln(result.name, "(" ~ result.filenames.join(", ") ~ "): ",
|
||||
to!string(result.kind));
|
||||
}
|
||||
|
||||
if(result.kind == TestStatus.success){continue;}
|
||||
|
||||
if(result.kind == TestStatus.failure){++failures;}
|
||||
else if(result.kind == TestStatus.error){++errors;}
|
||||
writeln(result.info);
|
||||
writeln("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
|
||||
}
|
||||
|
||||
//Totals.
|
||||
printProgress("===========================================================================");
|
||||
printProgress("TESTS: ", results.length);
|
||||
if(failures > 0){writeln("FAILURES: ", failures);}
|
||||
if(errors > 0) {writeln("ERRORS: ", errors);}
|
||||
//Convert filenames to parameters tuple and call the test function.
|
||||
alias F = Parameters!D[0..$];
|
||||
F parameters;
|
||||
stringsToTuple!(F.length - 1, F)(parameters, filenames);
|
||||
testFunction(parameters);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
||||
|
|
|
@ -6,62 +6,46 @@
|
|||
|
||||
module dyaml.test.compare;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import dyaml.test.common;
|
||||
import dyaml.test.emitter;
|
||||
import dyaml.token;
|
||||
|
||||
|
||||
/// Test parser by comparing output from parsing two equivalent YAML files.
|
||||
///
|
||||
/// Params: dataFilename = YAML file to parse.
|
||||
/// canonicalFilename = Another file to parse, in canonical YAML format.
|
||||
void testParser(string dataFilename, string canonicalFilename) @safe
|
||||
{
|
||||
auto dataEvents = Loader.fromFile(dataFilename).parse();
|
||||
auto canonicalEvents = Loader.fromFile(canonicalFilename).parse();
|
||||
|
||||
compareEvents(dataEvents, canonicalEvents);
|
||||
}
|
||||
|
||||
|
||||
/// Test loader by comparing output from loading two equivalent YAML files.
|
||||
///
|
||||
/// Params: dataFilename = YAML file to load.
|
||||
/// canonicalFilename = Another file to load, in canonical YAML format.
|
||||
void testLoader(string dataFilename, string canonicalFilename) @safe
|
||||
{
|
||||
import std.array : array;
|
||||
auto data = Loader.fromFile(dataFilename).array;
|
||||
auto canonical = Loader.fromFile(canonicalFilename).array;
|
||||
|
||||
assert(data.length == canonical.length, "Unequal node count");
|
||||
foreach(n; 0 .. data.length)
|
||||
{
|
||||
if(data[n] != canonical[n])
|
||||
{
|
||||
static if(verbose)
|
||||
{
|
||||
writeln("Normal value:");
|
||||
writeln(data[n].debugString);
|
||||
writeln("\n");
|
||||
writeln("Canonical value:");
|
||||
writeln(canonical[n].debugString);
|
||||
}
|
||||
assert(false, "testLoader(" ~ dataFilename ~ ", " ~ canonicalFilename ~ ") failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@safe unittest
|
||||
{
|
||||
printProgress("D:YAML comparison unittest");
|
||||
run("testParser", &testParser, ["data", "canonical"]);
|
||||
run("testLoader", &testLoader, ["data", "canonical"], ["test_loader_skip"]);
|
||||
}
|
||||
import dyaml : Loader;
|
||||
import dyaml.test.common : assertNodesEqual, compareEvents, run;
|
||||
|
||||
} // version(unittest)
|
||||
/**
|
||||
Test parser by comparing output from parsing two equivalent YAML files.
|
||||
|
||||
Params:
|
||||
dataFilename = YAML file to parse.
|
||||
canonicalFilename = Another file to parse, in canonical YAML format.
|
||||
*/
|
||||
static void testParser(string dataFilename, string canonicalFilename) @safe
|
||||
{
|
||||
auto dataEvents = Loader.fromFile(dataFilename).parse();
|
||||
auto canonicalEvents = Loader.fromFile(canonicalFilename).parse();
|
||||
|
||||
//BUG: the return value isn't checked! This test currently fails...
|
||||
compareEvents(dataEvents, canonicalEvents);
|
||||
}
|
||||
|
||||
/**
|
||||
Test loader by comparing output from loading two equivalent YAML files.
|
||||
|
||||
Params:
|
||||
dataFilename = YAML file to load.
|
||||
canonicalFilename = Another file to load, in canonical YAML format.
|
||||
*/
|
||||
static void testLoader(string dataFilename, string canonicalFilename) @safe
|
||||
{
|
||||
import std.array : array;
|
||||
auto data = Loader.fromFile(dataFilename).array;
|
||||
auto canonical = Loader.fromFile(canonicalFilename).array;
|
||||
|
||||
assert(data.length == canonical.length, "Unequal node count");
|
||||
foreach (n; 0 .. data.length)
|
||||
{
|
||||
assertNodesEqual(data[n], canonical[n]);
|
||||
}
|
||||
}
|
||||
run(&testParser, ["data", "canonical"]);
|
||||
run(&testLoader, ["data", "canonical"], ["test_loader_skip"]);
|
||||
}
|
||||
|
|
|
@ -10,14 +10,14 @@ module dyaml.test.constructor;
|
|||
version(unittest)
|
||||
{
|
||||
|
||||
import std.conv;
|
||||
import std.datetime;
|
||||
import std.exception;
|
||||
import std.path;
|
||||
import std.string;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.test.common;
|
||||
|
||||
import dyaml : Loader, Node, YAMLNull;
|
||||
|
||||
///Expected results of loading test inputs.
|
||||
Node[][string] expected;
|
||||
|
@ -25,35 +25,35 @@ Node[][string] expected;
|
|||
///Initialize expected.
|
||||
static this() @safe
|
||||
{
|
||||
expected["aliases-cdumper-bug"] = constructAliasesCDumperBug();
|
||||
expected["construct-binary"] = constructBinary();
|
||||
expected["construct-bool"] = constructBool();
|
||||
expected["construct-custom"] = constructCustom();
|
||||
expected["construct-float"] = constructFloat();
|
||||
expected["construct-int"] = constructInt();
|
||||
expected["construct-map"] = constructMap();
|
||||
expected["construct-merge"] = constructMerge();
|
||||
expected["construct-null"] = constructNull();
|
||||
expected["construct-omap"] = constructOMap();
|
||||
expected["construct-pairs"] = constructPairs();
|
||||
expected["construct-seq"] = constructSeq();
|
||||
expected["construct-set"] = constructSet();
|
||||
expected["construct-str-ascii"] = constructStrASCII();
|
||||
expected["construct-str"] = constructStr();
|
||||
expected["construct-str-utf8"] = constructStrUTF8();
|
||||
expected["construct-timestamp"] = constructTimestamp();
|
||||
expected["construct-value"] = constructValue();
|
||||
expected["duplicate-merge-key"] = duplicateMergeKey();
|
||||
expected["float-representer-2.3-bug"] = floatRepresenterBug();
|
||||
expected["invalid-single-quote-bug"] = invalidSingleQuoteBug();
|
||||
expected["more-floats"] = moreFloats();
|
||||
expected["negative-float-bug"] = negativeFloatBug();
|
||||
expected["aliases-cdumper-bug"] = constructAliasesCDumperBug();
|
||||
expected["construct-binary"] = constructBinary();
|
||||
expected["construct-bool"] = constructBool();
|
||||
expected["construct-custom"] = constructCustom();
|
||||
expected["construct-float"] = constructFloat();
|
||||
expected["construct-int"] = constructInt();
|
||||
expected["construct-map"] = constructMap();
|
||||
expected["construct-merge"] = constructMerge();
|
||||
expected["construct-null"] = constructNull();
|
||||
expected["construct-omap"] = constructOMap();
|
||||
expected["construct-pairs"] = constructPairs();
|
||||
expected["construct-seq"] = constructSeq();
|
||||
expected["construct-set"] = constructSet();
|
||||
expected["construct-str-ascii"] = constructStrASCII();
|
||||
expected["construct-str"] = constructStr();
|
||||
expected["construct-str-utf8"] = constructStrUTF8();
|
||||
expected["construct-timestamp"] = constructTimestamp();
|
||||
expected["construct-value"] = constructValue();
|
||||
expected["duplicate-merge-key"] = duplicateMergeKey();
|
||||
expected["float-representer-2.3-bug"] = floatRepresenterBug();
|
||||
expected["invalid-single-quote-bug"] = invalidSingleQuoteBug();
|
||||
expected["more-floats"] = moreFloats();
|
||||
expected["negative-float-bug"] = negativeFloatBug();
|
||||
expected["single-dot-is-not-float-bug"] = singleDotFloatBug();
|
||||
expected["timestamp-bugs"] = timestampBugs();
|
||||
expected["utf16be"] = utf16be();
|
||||
expected["utf16le"] = utf16le();
|
||||
expected["utf8"] = utf8();
|
||||
expected["utf8-implicit"] = utf8implicit();
|
||||
expected["timestamp-bugs"] = timestampBugs();
|
||||
expected["utf16be"] = utf16be();
|
||||
expected["utf16le"] = utf16le();
|
||||
expected["utf8"] = utf8();
|
||||
expected["utf8-implicit"] = utf8implicit();
|
||||
}
|
||||
|
||||
///Construct a pair of nodes with specified values.
|
||||
|
@ -78,8 +78,8 @@ Node[] constructAliasesCDumperBug() @safe
|
|||
|
||||
Node[] constructBinary() @safe
|
||||
{
|
||||
auto canonical = "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;".representation.dup;
|
||||
auto generic = "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;".representation.dup;
|
||||
auto canonical = "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;".representation.dup;
|
||||
auto generic = "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;".representation.dup;
|
||||
auto description = "The binary value above is a tiny arrow encoded as a gif image.";
|
||||
|
||||
return [
|
||||
|
@ -921,49 +921,37 @@ struct TestStruct
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor unittest.
|
||||
*
|
||||
* Params: dataFilename = File name to read from.
|
||||
* codeDummy = Dummy .code filename, used to determine that
|
||||
* .data file with the same name should be used in this test.
|
||||
*/
|
||||
void testConstructor(string dataFilename, string codeDummy) @safe
|
||||
{
|
||||
string base = dataFilename.baseName.stripExtension;
|
||||
enforce((base in expected) !is null,
|
||||
new Exception("Unimplemented constructor test: " ~ base));
|
||||
|
||||
auto loader = Loader.fromFile(dataFilename);
|
||||
|
||||
Node[] exp = expected[base];
|
||||
|
||||
//Compare with expected results document by document.
|
||||
size_t i;
|
||||
foreach(node; loader)
|
||||
{
|
||||
if(node != exp[i])
|
||||
{
|
||||
static if(verbose)
|
||||
{
|
||||
writeln("Expected value:");
|
||||
writeln(exp[i].debugString);
|
||||
writeln("\n");
|
||||
writeln("Actual value:");
|
||||
writeln(node.debugString);
|
||||
}
|
||||
assert(false);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
assert(i == exp.length);
|
||||
}
|
||||
} // version(unittest)
|
||||
|
||||
|
||||
@safe unittest
|
||||
{
|
||||
printProgress("D:YAML Constructor unittest");
|
||||
run("testConstructor", &testConstructor, ["data", "code"]);
|
||||
}
|
||||
import dyaml.test.common : assertNodesEqual, run;
|
||||
/**
|
||||
Constructor unittest.
|
||||
|
||||
} // version(unittest)
|
||||
Params:
|
||||
dataFilename = File name to read from.
|
||||
codeDummy = Dummy .code filename, used to determine that
|
||||
.data file with the same name should be used in this test.
|
||||
*/
|
||||
static void testConstructor(string dataFilename, string codeDummy) @safe
|
||||
{
|
||||
string base = dataFilename.baseName.stripExtension;
|
||||
assert((base in expected) !is null, "Unimplemented constructor test: " ~ base);
|
||||
|
||||
auto loader = Loader.fromFile(dataFilename);
|
||||
|
||||
Node[] exp = expected[base];
|
||||
|
||||
//Compare with expected results document by document.
|
||||
size_t i;
|
||||
foreach (node; loader)
|
||||
{
|
||||
assertNodesEqual(node, exp[i]);
|
||||
++i;
|
||||
}
|
||||
assert(i == exp.length);
|
||||
}
|
||||
run(&testConstructor, ["data", "code"]);
|
||||
}
|
||||
|
|
|
@ -6,194 +6,127 @@
|
|||
|
||||
module dyaml.test.emitter;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.algorithm;
|
||||
import std.file;
|
||||
import std.range;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.emitter;
|
||||
import dyaml.event;
|
||||
import dyaml.test.common;
|
||||
import dyaml.token;
|
||||
|
||||
// Try to emit an event range.
|
||||
void emitTestCommon(T)(ref Appender!string emitStream, T events, bool canonical = false) @safe
|
||||
if (isInputRange!T && is(ElementType!T == Event))
|
||||
{
|
||||
auto emitter = Emitter!(typeof(emitStream), char)(emitStream, canonical, 2, 80, LineBreak.unix);
|
||||
foreach(ref event; events)
|
||||
{
|
||||
emitter.emit(event);
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine if events in events1 are equivalent to events in events2.
|
||||
///
|
||||
/// Params: events1 = First event array to compare.
|
||||
/// events2 = Second event array to compare.
|
||||
///
|
||||
/// Returns: true if the events are equivalent, false otherwise.
|
||||
bool compareEvents(T, U)(T events1, U events2)
|
||||
if (isInputRange!T && isInputRange!U && is(ElementType!T == Event) && is(ElementType!U == Event))
|
||||
{
|
||||
foreach (e1, e2; zip(events1, events2))
|
||||
{
|
||||
//Different event types.
|
||||
if(e1.id != e2.id){return false;}
|
||||
//Different anchor (if applicable).
|
||||
if(e1.id.among!(EventID.sequenceStart,
|
||||
EventID.mappingStart,
|
||||
EventID.alias_,
|
||||
EventID.scalar)
|
||||
&& e1.anchor != e2.anchor)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
//Different collection tag (if applicable).
|
||||
if(e1.id.among!(EventID.sequenceStart, EventID.mappingStart) && e1.tag != e2.tag)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if(e1.id == EventID.scalar)
|
||||
{
|
||||
//Different scalar tag (if applicable).
|
||||
if(!(e1.implicit || e2.implicit)
|
||||
&& e1.tag != e2.tag)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
//Different scalar value.
|
||||
if(e1.value != e2.value)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/// Test emitter by getting events from parsing a file, emitting them, parsing
|
||||
/// the emitted result and comparing events from parsing the emitted result with
|
||||
/// originally parsed events.
|
||||
///
|
||||
/// Params: dataFilename = YAML file to parse.
|
||||
/// canonicalFilename = Canonical YAML file used as dummy to determine
|
||||
/// which data files to load.
|
||||
void testEmitterOnData(string dataFilename, string canonicalFilename) @safe
|
||||
{
|
||||
//Must exist due to Anchor, Tags reference counts.
|
||||
auto loader = Loader.fromFile(dataFilename);
|
||||
auto events = loader.parse();
|
||||
auto emitStream = Appender!string();
|
||||
emitTestCommon(emitStream, events);
|
||||
|
||||
static if(verbose)
|
||||
{
|
||||
writeln(dataFilename);
|
||||
writeln("ORIGINAL:\n", readText(dataFilename));
|
||||
writeln("OUTPUT:\n", emitStream.data);
|
||||
}
|
||||
|
||||
auto loader2 = Loader.fromString(emitStream.data);
|
||||
loader2.name = "TEST";
|
||||
auto newEvents = loader2.parse();
|
||||
assert(compareEvents(events, newEvents));
|
||||
}
|
||||
|
||||
/// Test emitter by getting events from parsing a canonical YAML file, emitting
|
||||
/// them both in canonical and normal format, parsing the emitted results and
|
||||
/// comparing events from parsing the emitted result with originally parsed events.
|
||||
///
|
||||
/// Params: canonicalFilename = Canonical YAML file to parse.
|
||||
void testEmitterOnCanonical(string canonicalFilename) @safe
|
||||
{
|
||||
//Must exist due to Anchor, Tags reference counts.
|
||||
auto loader = Loader.fromFile(canonicalFilename);
|
||||
auto events = loader.parse();
|
||||
foreach(canonical; [false, true])
|
||||
{
|
||||
auto emitStream = Appender!string();
|
||||
emitTestCommon(emitStream, events, canonical);
|
||||
static if(verbose)
|
||||
{
|
||||
writeln("OUTPUT (canonical=", canonical, "):\n",
|
||||
emitStream.data);
|
||||
}
|
||||
auto loader2 = Loader.fromString(emitStream.data);
|
||||
loader2.name = "TEST";
|
||||
auto newEvents = loader2.parse();
|
||||
assert(compareEvents(events, newEvents));
|
||||
}
|
||||
}
|
||||
|
||||
/// Test emitter by getting events from parsing a file, emitting them with all
|
||||
/// possible scalar and collection styles, parsing the emitted results and
|
||||
/// comparing events from parsing the emitted result with originally parsed events.
|
||||
///
|
||||
/// Params: dataFilename = YAML file to parse.
|
||||
/// canonicalFilename = Canonical YAML file used as dummy to determine
|
||||
/// which data files to load.
|
||||
void testEmitterStyles(string dataFilename, string canonicalFilename) @safe
|
||||
{
|
||||
foreach(filename; [dataFilename, canonicalFilename])
|
||||
{
|
||||
//must exist due to Anchor, Tags reference counts
|
||||
auto loader = Loader.fromFile(canonicalFilename);
|
||||
auto events = loader.parse();
|
||||
foreach(flowStyle; [CollectionStyle.block, CollectionStyle.flow])
|
||||
{
|
||||
foreach(style; [ScalarStyle.literal, ScalarStyle.folded,
|
||||
ScalarStyle.doubleQuoted, ScalarStyle.singleQuoted,
|
||||
ScalarStyle.plain])
|
||||
{
|
||||
Event[] styledEvents;
|
||||
foreach(event; events)
|
||||
{
|
||||
if(event.id == EventID.scalar)
|
||||
{
|
||||
event = scalarEvent(Mark(), Mark(), event.anchor, event.tag,
|
||||
event.implicit,
|
||||
event.value, style);
|
||||
}
|
||||
else if(event.id == EventID.sequenceStart)
|
||||
{
|
||||
event = sequenceStartEvent(Mark(), Mark(), event.anchor,
|
||||
event.tag, event.implicit, flowStyle);
|
||||
}
|
||||
else if(event.id == EventID.mappingStart)
|
||||
{
|
||||
event = mappingStartEvent(Mark(), Mark(), event.anchor,
|
||||
event.tag, event.implicit, flowStyle);
|
||||
}
|
||||
styledEvents ~= event;
|
||||
}
|
||||
auto emitStream = Appender!string();
|
||||
emitTestCommon(emitStream, styledEvents);
|
||||
static if(verbose)
|
||||
{
|
||||
writeln("OUTPUT (", filename, ", ", to!string(flowStyle), ", ",
|
||||
to!string(style), ")");
|
||||
writeln(emitStream.data);
|
||||
}
|
||||
auto loader2 = Loader.fromString(emitStream.data);
|
||||
loader2.name = "TEST";
|
||||
auto newEvents = loader2.parse();
|
||||
assert(compareEvents(events, newEvents));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@safe unittest
|
||||
{
|
||||
printProgress("D:YAML Emitter unittest");
|
||||
run("testEmitterOnData", &testEmitterOnData, ["data", "canonical"]);
|
||||
run("testEmitterOnCanonical", &testEmitterOnCanonical, ["canonical"]);
|
||||
run("testEmitterStyles", &testEmitterStyles, ["data", "canonical"]);
|
||||
}
|
||||
import std.array : Appender;
|
||||
import std.range : ElementType, isInputRange;
|
||||
|
||||
} // version(unittest)
|
||||
import dyaml : CollectionStyle, LineBreak, Loader, Mark, ScalarStyle;
|
||||
import dyaml.emitter : Emitter;
|
||||
import dyaml.event : Event, EventID, mappingStartEvent, scalarEvent, sequenceStartEvent;
|
||||
import dyaml.test.common : assertEventsEqual, run;
|
||||
|
||||
// Try to emit an event range.
|
||||
static void emitTestCommon(T)(ref Appender!string emitStream, T events, bool canonical = false) @safe
|
||||
if (isInputRange!T && is(ElementType!T == Event))
|
||||
{
|
||||
auto emitter = Emitter!(typeof(emitStream), char)(emitStream, canonical, 2, 80, LineBreak.unix);
|
||||
foreach (ref event; events)
|
||||
{
|
||||
emitter.emit(event);
|
||||
}
|
||||
}
|
||||
/**
|
||||
Test emitter by getting events from parsing a file, emitting them, parsing
|
||||
the emitted result and comparing events from parsing the emitted result with
|
||||
originally parsed events.
|
||||
|
||||
Params:
|
||||
dataFilename = YAML file to parse.
|
||||
canonicalFilename = Canonical YAML file used as dummy to determine
|
||||
which data files to load.
|
||||
*/
|
||||
static void testEmitterOnData(string dataFilename, string canonicalFilename) @safe
|
||||
{
|
||||
//Must exist due to Anchor, Tags reference counts.
|
||||
auto loader = Loader.fromFile(dataFilename);
|
||||
auto events = loader.parse();
|
||||
auto emitStream = Appender!string();
|
||||
emitTestCommon(emitStream, events);
|
||||
|
||||
auto loader2 = Loader.fromString(emitStream.data);
|
||||
loader2.name = "TEST";
|
||||
auto newEvents = loader2.parse();
|
||||
assertEventsEqual(events, newEvents);
|
||||
}
|
||||
/**
|
||||
Test emitter by getting events from parsing a canonical YAML file, emitting
|
||||
them both in canonical and normal format, parsing the emitted results and
|
||||
comparing events from parsing the emitted result with originally parsed events.
|
||||
|
||||
Params: canonicalFilename = Canonical YAML file to parse.
|
||||
*/
|
||||
static void testEmitterOnCanonical(string canonicalFilename) @safe
|
||||
{
|
||||
//Must exist due to Anchor, Tags reference counts.
|
||||
auto loader = Loader.fromFile(canonicalFilename);
|
||||
auto events = loader.parse();
|
||||
foreach (canonical; [false, true])
|
||||
{
|
||||
auto emitStream = Appender!string();
|
||||
emitTestCommon(emitStream, events, canonical);
|
||||
|
||||
auto loader2 = Loader.fromString(emitStream.data);
|
||||
loader2.name = "TEST";
|
||||
auto newEvents = loader2.parse();
|
||||
assertEventsEqual(events, newEvents);
|
||||
}
|
||||
}
|
||||
/**
|
||||
Test emitter by getting events from parsing a file, emitting them with all
|
||||
possible scalar and collection styles, parsing the emitted results and
|
||||
comparing events from parsing the emitted result with originally parsed events.
|
||||
|
||||
Params:
|
||||
dataFilename = YAML file to parse.
|
||||
canonicalFilename = Canonical YAML file used as dummy to determine
|
||||
which data files to load.
|
||||
*/
|
||||
static void testEmitterStyles(string dataFilename, string canonicalFilename) @safe
|
||||
{
|
||||
foreach (filename; [dataFilename, canonicalFilename])
|
||||
{
|
||||
//must exist due to Anchor, Tags reference counts
|
||||
auto loader = Loader.fromFile(canonicalFilename);
|
||||
auto events = loader.parse();
|
||||
foreach (flowStyle; [CollectionStyle.block, CollectionStyle.flow])
|
||||
{
|
||||
foreach (style; [ScalarStyle.literal, ScalarStyle.folded,
|
||||
ScalarStyle.doubleQuoted, ScalarStyle.singleQuoted,
|
||||
ScalarStyle.plain])
|
||||
{
|
||||
Event[] styledEvents;
|
||||
foreach (event; events)
|
||||
{
|
||||
if (event.id == EventID.scalar)
|
||||
{
|
||||
event = scalarEvent(Mark(), Mark(), event.anchor, event.tag,
|
||||
event.implicit,
|
||||
event.value, style);
|
||||
}
|
||||
else if (event.id == EventID.sequenceStart)
|
||||
{
|
||||
event = sequenceStartEvent(Mark(), Mark(), event.anchor,
|
||||
event.tag, event.implicit, flowStyle);
|
||||
}
|
||||
else if (event.id == EventID.mappingStart)
|
||||
{
|
||||
event = mappingStartEvent(Mark(), Mark(), event.anchor,
|
||||
event.tag, event.implicit, flowStyle);
|
||||
}
|
||||
styledEvents ~= event;
|
||||
}
|
||||
auto emitStream = Appender!string();
|
||||
emitTestCommon(emitStream, styledEvents);
|
||||
auto loader2 = Loader.fromString(emitStream.data);
|
||||
loader2.name = "TEST";
|
||||
auto newEvents = loader2.parse();
|
||||
assertEventsEqual(events, newEvents);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
run(&testEmitterOnData, ["data", "canonical"]);
|
||||
run(&testEmitterOnCanonical, ["canonical"]);
|
||||
run(&testEmitterStyles, ["data", "canonical"]);
|
||||
}
|
||||
|
|
|
@ -6,86 +6,59 @@
|
|||
|
||||
module dyaml.test.errors;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.file;
|
||||
|
||||
import dyaml.test.common;
|
||||
|
||||
|
||||
/// Loader error unittest from file stream.
|
||||
///
|
||||
/// Params: errorFilename = File name to read from.
|
||||
void testLoaderError(string errorFilename) @safe
|
||||
{
|
||||
import std.array : array;
|
||||
Node[] nodes;
|
||||
try { nodes = Loader.fromFile(errorFilename).array; }
|
||||
catch(YAMLException e)
|
||||
{
|
||||
printException(e);
|
||||
return;
|
||||
}
|
||||
assert(false, "Expected an exception");
|
||||
}
|
||||
|
||||
/// Loader error unittest from string.
|
||||
///
|
||||
/// Params: errorFilename = File name to read from.
|
||||
void testLoaderErrorString(string errorFilename) @safe
|
||||
{
|
||||
import std.array : array;
|
||||
try
|
||||
{
|
||||
auto nodes = Loader.fromFile(errorFilename).array;
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
printException(e);
|
||||
return;
|
||||
}
|
||||
assert(false, "Expected an exception");
|
||||
}
|
||||
|
||||
/// Loader error unittest from filename.
|
||||
///
|
||||
/// Params: errorFilename = File name to read from.
|
||||
void testLoaderErrorFilename(string errorFilename) @safe
|
||||
{
|
||||
import std.array : array;
|
||||
try { auto nodes = Loader.fromFile(errorFilename).array; }
|
||||
catch(YAMLException e)
|
||||
{
|
||||
printException(e);
|
||||
return;
|
||||
}
|
||||
assert(false, "testLoaderErrorSingle(" ~ ", " ~ errorFilename ~
|
||||
") Expected an exception");
|
||||
}
|
||||
|
||||
/// Loader error unittest loading a single document from a file.
|
||||
///
|
||||
/// Params: errorFilename = File name to read from.
|
||||
void testLoaderErrorSingle(string errorFilename) @safe
|
||||
{
|
||||
try { auto nodes = Loader.fromFile(errorFilename).load(); }
|
||||
catch(YAMLException e)
|
||||
{
|
||||
printException(e);
|
||||
return;
|
||||
}
|
||||
assert(false, "Expected an exception");
|
||||
}
|
||||
|
||||
@safe unittest
|
||||
{
|
||||
printProgress("D:YAML Errors unittest");
|
||||
run("testLoaderError", &testLoaderError, ["loader-error"]);
|
||||
run("testLoaderErrorString", &testLoaderErrorString, ["loader-error"]);
|
||||
run("testLoaderErrorFilename", &testLoaderErrorFilename, ["loader-error"]);
|
||||
run("testLoaderErrorSingle", &testLoaderErrorSingle, ["single-loader-error"]);
|
||||
}
|
||||
import std.array : array;
|
||||
import std.exception : assertThrown;
|
||||
|
||||
} // version(unittest)
|
||||
import dyaml : Loader;
|
||||
import dyaml.test.common : run;
|
||||
|
||||
/**
|
||||
Loader error unittest from file stream.
|
||||
|
||||
Params: errorFilename = File name to read from.
|
||||
*/
|
||||
static void testLoaderError(string errorFilename) @safe
|
||||
{
|
||||
assertThrown(Loader.fromFile(errorFilename).array,
|
||||
__FUNCTION__ ~ "(" ~ errorFilename ~ ") Expected an exception");
|
||||
}
|
||||
|
||||
/**
|
||||
Loader error unittest from string.
|
||||
|
||||
Params: errorFilename = File name to read from.
|
||||
*/
|
||||
static void testLoaderErrorString(string errorFilename) @safe
|
||||
{
|
||||
assertThrown(Loader.fromFile(errorFilename).array,
|
||||
__FUNCTION__ ~ "(" ~ errorFilename ~ ") Expected an exception");
|
||||
}
|
||||
|
||||
/**
|
||||
Loader error unittest from filename.
|
||||
|
||||
Params: errorFilename = File name to read from.
|
||||
*/
|
||||
static void testLoaderErrorFilename(string errorFilename) @safe
|
||||
{
|
||||
assertThrown(Loader.fromFile(errorFilename).array,
|
||||
__FUNCTION__ ~ "(" ~ errorFilename ~ ") Expected an exception");
|
||||
}
|
||||
|
||||
/**
|
||||
Loader error unittest loading a single document from a file.
|
||||
|
||||
Params: errorFilename = File name to read from.
|
||||
*/
|
||||
static void testLoaderErrorSingle(string errorFilename) @safe
|
||||
{
|
||||
assertThrown(Loader.fromFile(errorFilename).load(),
|
||||
__FUNCTION__ ~ "(" ~ errorFilename ~ ") Expected an exception");
|
||||
}
|
||||
run(&testLoaderError, ["loader-error"]);
|
||||
run(&testLoaderErrorString, ["loader-error"]);
|
||||
run(&testLoaderErrorFilename, ["loader-error"]);
|
||||
run(&testLoaderErrorSingle, ["single-loader-error"]);
|
||||
}
|
||||
|
|
|
@ -6,88 +6,87 @@
|
|||
|
||||
module dyaml.test.inputoutput;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.array;
|
||||
import std.file;
|
||||
import std.system;
|
||||
|
||||
import dyaml.test.common;
|
||||
|
||||
/// Get an UTF-16 byte order mark.
|
||||
///
|
||||
/// Params: wrong = Get the incorrect BOM for this system.
|
||||
///
|
||||
/// Returns: UTF-16 byte order mark.
|
||||
wchar bom16(bool wrong = false) pure @safe
|
||||
{
|
||||
wchar little = '\uFEFF';
|
||||
wchar big = '\uFFFE';
|
||||
if(!wrong){return endian == Endian.littleEndian ? little : big;}
|
||||
return endian == Endian.littleEndian ? big : little;
|
||||
}
|
||||
|
||||
/// Get an UTF-32 byte order mark.
|
||||
///
|
||||
/// Params: wrong = Get the incorrect BOM for this system.
|
||||
///
|
||||
/// Returns: UTF-32 byte order mark.
|
||||
dchar bom32(bool wrong = false) pure @safe
|
||||
{
|
||||
dchar little = '\uFEFF';
|
||||
dchar big = '\uFFFE';
|
||||
if(!wrong){return endian == Endian.littleEndian ? little : big;}
|
||||
return endian == Endian.littleEndian ? big : little;
|
||||
}
|
||||
|
||||
/// Unicode input unittest. Tests various encodings.
|
||||
///
|
||||
/// Params: unicodeFilename = File name to read from.
|
||||
void testUnicodeInput(string unicodeFilename) @safe
|
||||
{
|
||||
string data = readText(unicodeFilename);
|
||||
string expected = data.split().join(" ");
|
||||
|
||||
Node output = Loader.fromString(data).load();
|
||||
assert(output.as!string == expected);
|
||||
|
||||
foreach(buffer; [cast(ubyte[])(bom16() ~ data.to!(wchar[])),
|
||||
cast(ubyte[])(bom32() ~ data.to!(dchar[]))])
|
||||
{
|
||||
output = Loader.fromBuffer(buffer).load();
|
||||
assert(output.as!string == expected);
|
||||
}
|
||||
}
|
||||
|
||||
/// Unicode input error unittest. Tests various encodings with incorrect BOMs.
|
||||
///
|
||||
/// Params: unicodeFilename = File name to read from.
|
||||
void testUnicodeInputErrors(string unicodeFilename) @safe
|
||||
{
|
||||
string data = readText(unicodeFilename);
|
||||
foreach(buffer; [cast(ubyte[])(data.to!(wchar[])),
|
||||
cast(ubyte[])(data.to!(dchar[])),
|
||||
cast(ubyte[])(bom16(true) ~ data.to!(wchar[])),
|
||||
cast(ubyte[])(bom32(true) ~ data.to!(dchar[]))])
|
||||
{
|
||||
try { Loader.fromBuffer(buffer).load(); }
|
||||
catch(YAMLException e)
|
||||
{
|
||||
printException(e);
|
||||
continue;
|
||||
}
|
||||
assert(false, "Expected an exception");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@safe unittest
|
||||
{
|
||||
printProgress("D:YAML I/O unittest");
|
||||
run("testUnicodeInput", &testUnicodeInput, ["unicode"]);
|
||||
run("testUnicodeInputErrors", &testUnicodeInputErrors, ["unicode"]);
|
||||
}
|
||||
import std.array : join, split;
|
||||
import std.conv : to;
|
||||
import std.exception : assertThrown;
|
||||
import std.file : readText;
|
||||
import std.system : endian, Endian;
|
||||
|
||||
} // version(unittest)
|
||||
import dyaml : Loader, Node, YAMLException;
|
||||
import dyaml.test.common : run;
|
||||
|
||||
/**
|
||||
Get an UTF-16 byte order mark.
|
||||
|
||||
Params: wrong = Get the incorrect BOM for this system.
|
||||
|
||||
Returns: UTF-16 byte order mark.
|
||||
*/
|
||||
static wchar bom16(bool wrong = false) pure @safe
|
||||
{
|
||||
wchar little = '\uFEFF';
|
||||
wchar big = '\uFFFE';
|
||||
if (!wrong)
|
||||
{
|
||||
return endian == Endian.littleEndian ? little : big;
|
||||
}
|
||||
return endian == Endian.littleEndian ? big : little;
|
||||
}
|
||||
/**
|
||||
Get an UTF-32 byte order mark.
|
||||
|
||||
Params: wrong = Get the incorrect BOM for this system.
|
||||
|
||||
Returns: UTF-32 byte order mark.
|
||||
*/
|
||||
static dchar bom32(bool wrong = false) pure @safe
|
||||
{
|
||||
dchar little = '\uFEFF';
|
||||
dchar big = '\uFFFE';
|
||||
if (!wrong)
|
||||
{
|
||||
return endian == Endian.littleEndian ? little : big;
|
||||
}
|
||||
return endian == Endian.littleEndian ? big : little;
|
||||
}
|
||||
/**
|
||||
Unicode input unittest. Tests various encodings.
|
||||
|
||||
Params: unicodeFilename = File name to read from.
|
||||
*/
|
||||
static void testUnicodeInput(string unicodeFilename) @safe
|
||||
{
|
||||
string data = readText(unicodeFilename);
|
||||
string expected = data.split().join(" ");
|
||||
|
||||
Node output = Loader.fromString(data).load();
|
||||
assert(output.as!string == expected);
|
||||
|
||||
foreach (buffer; [cast(ubyte[]) (bom16() ~ data.to!(wchar[])),
|
||||
cast(ubyte[]) (bom32() ~ data.to!(dchar[]))])
|
||||
{
|
||||
output = Loader.fromBuffer(buffer).load();
|
||||
assert(output.as!string == expected);
|
||||
}
|
||||
}
|
||||
/**
|
||||
Unicode input error unittest. Tests various encodings with incorrect BOMs.
|
||||
|
||||
Params: unicodeFilename = File name to read from.
|
||||
*/
|
||||
static void testUnicodeInputErrors(string unicodeFilename) @safe
|
||||
{
|
||||
string data = readText(unicodeFilename);
|
||||
foreach (buffer; [cast(ubyte[]) (data.to!(wchar[])),
|
||||
cast(ubyte[]) (data.to!(dchar[])),
|
||||
cast(ubyte[]) (bom16(true) ~ data.to!(wchar[])),
|
||||
cast(ubyte[]) (bom32(true) ~ data.to!(dchar[]))])
|
||||
{
|
||||
assertThrown(Loader.fromBuffer(buffer).load());
|
||||
}
|
||||
}
|
||||
run(&testUnicodeInput, ["unicode"]);
|
||||
run(&testUnicodeInputErrors, ["unicode"]);
|
||||
}
|
||||
|
|
|
@ -6,52 +6,32 @@
|
|||
|
||||
module dyaml.test.reader;
|
||||
|
||||
|
||||
version(unittest)
|
||||
@safe unittest
|
||||
{
|
||||
import std.exception :assertThrown;
|
||||
|
||||
import dyaml.test.common;
|
||||
import dyaml.reader;
|
||||
import dyaml.test.common : readData, run;
|
||||
import dyaml.reader : Reader, ReaderException;
|
||||
|
||||
/**
|
||||
Try reading entire file through Reader, expecting an error (the file is invalid).
|
||||
|
||||
// Try reading entire file through Reader, expecting an error (the file is invalid).
|
||||
//
|
||||
// Params: data = Stream to read.
|
||||
void runReader(ubyte[] fileData) @safe
|
||||
{
|
||||
try
|
||||
Params: data = Stream to read.
|
||||
*/
|
||||
static void runReader(ubyte[] fileData) @safe
|
||||
{
|
||||
auto reader = new Reader(fileData);
|
||||
while(reader.peek() != '\0') { reader.forward(); }
|
||||
}
|
||||
catch(ReaderException e)
|
||||
|
||||
/**
|
||||
Stream error unittest. Tries to read invalid input files, expecting errors.
|
||||
|
||||
Params: errorFilename = File name to read from.
|
||||
*/
|
||||
static void testStreamError(string errorFilename) @safe
|
||||
{
|
||||
printException(e);
|
||||
return;
|
||||
assertThrown!ReaderException(runReader(readData(errorFilename)));
|
||||
}
|
||||
assert(false, "Expected an exception");
|
||||
run(&testStreamError, ["stream-error"]);
|
||||
}
|
||||
|
||||
|
||||
/// Stream error unittest. Tries to read invalid input files, expecting errors.
|
||||
///
|
||||
/// Params: errorFilename = File name to read from.
|
||||
void testStreamError(string errorFilename) @safe
|
||||
{
|
||||
runReader(readData(errorFilename));
|
||||
}
|
||||
|
||||
// TODO: remove when a @safe ubyte[] file read can be done.
|
||||
ubyte[] readData(string filename) @trusted
|
||||
{
|
||||
import std.file;
|
||||
return cast(ubyte[])std.file.read(filename);
|
||||
}
|
||||
|
||||
@safe unittest
|
||||
{
|
||||
printProgress("D:YAML Reader unittest");
|
||||
run("testStreamError", &testStreamError, ["stream-error"]);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
||||
|
|
|
@ -6,74 +6,49 @@
|
|||
|
||||
module dyaml.test.representer;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.array;
|
||||
import std.exception;
|
||||
import std.meta;
|
||||
import std.path;
|
||||
import std.typecons;
|
||||
import std.utf;
|
||||
|
||||
import dyaml.test.common;
|
||||
import dyaml.test.constructor;
|
||||
|
||||
|
||||
/// Representer unittest.
|
||||
///
|
||||
/// Params: codeFilename = File name to determine test case from.
|
||||
/// Nothing is read from this file, it only exists
|
||||
/// to specify that we need a matching unittest.
|
||||
void testRepresenterTypes(string codeFilename) @safe
|
||||
{
|
||||
string baseName = codeFilename.baseName.stripExtension;
|
||||
enforce((baseName in dyaml.test.constructor.expected) !is null,
|
||||
new Exception("Unimplemented representer test: " ~ baseName));
|
||||
|
||||
Node[] expectedNodes = expected[baseName];
|
||||
foreach(encoding; AliasSeq!(char, wchar, dchar))
|
||||
{
|
||||
immutable(encoding)[] output;
|
||||
Node[] readNodes;
|
||||
|
||||
scope(failure)
|
||||
{
|
||||
static if(verbose)
|
||||
{
|
||||
writeln("Expected nodes:");
|
||||
foreach(ref n; expectedNodes){writeln(n.debugString, "\n---\n");}
|
||||
writeln("Read nodes:");
|
||||
foreach(ref n; readNodes){writeln(n.debugString, "\n---\n");}
|
||||
() @trusted {
|
||||
writeln("OUTPUT:\n", cast(string)output);
|
||||
}();
|
||||
}
|
||||
}
|
||||
|
||||
auto emitStream = new Appender!(immutable(encoding)[]);
|
||||
auto dumper = dumper();
|
||||
dumper.dump!encoding(emitStream, expectedNodes);
|
||||
|
||||
output = emitStream.data;
|
||||
|
||||
auto loader = Loader.fromString(emitStream.data.toUTF8);
|
||||
loader.name = "TEST";
|
||||
readNodes = loader.array;
|
||||
|
||||
assert(expectedNodes.length == readNodes.length);
|
||||
foreach(n; 0 .. expectedNodes.length)
|
||||
{
|
||||
assert(expectedNodes[n] == readNodes[n]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@safe unittest
|
||||
{
|
||||
printProgress("D:YAML Representer unittest");
|
||||
run("testRepresenterTypes", &testRepresenterTypes, ["code"]);
|
||||
}
|
||||
import std.array : Appender, array;
|
||||
import std.meta : AliasSeq;
|
||||
import std.path : baseName, stripExtension;
|
||||
import std.utf : toUTF8;
|
||||
|
||||
} // version(unittest)
|
||||
import dyaml : dumper, Loader, Node;
|
||||
import dyaml.test.common : assertNodesEqual, run;
|
||||
import dyaml.test.constructor : expected;
|
||||
|
||||
/**
|
||||
Representer unittest. Dumps nodes, then loads them again.
|
||||
|
||||
Params:
|
||||
baseName = Nodes in dyaml.test.constructor.expected for roundtripping.
|
||||
*/
|
||||
static void testRepresenterTypes(string baseName) @safe
|
||||
{
|
||||
assert((baseName in expected) !is null, "Unimplemented representer test: " ~ baseName);
|
||||
|
||||
Node[] expectedNodes = expected[baseName];
|
||||
foreach (encoding; AliasSeq!(char, wchar, dchar))
|
||||
{
|
||||
auto emitStream = new Appender!(immutable(encoding)[]);
|
||||
auto dumper = dumper();
|
||||
dumper.dump!encoding(emitStream, expectedNodes);
|
||||
|
||||
immutable output = emitStream.data;
|
||||
|
||||
auto loader = Loader.fromString(emitStream.data.toUTF8);
|
||||
loader.name = "TEST";
|
||||
const readNodes = loader.array;
|
||||
|
||||
assert(expectedNodes.length == readNodes.length);
|
||||
foreach (n; 0 .. expectedNodes.length)
|
||||
{
|
||||
assertNodesEqual(expectedNodes[n], readNodes[n]);
|
||||
}
|
||||
}
|
||||
}
|
||||
foreach (key, _; expected)
|
||||
{
|
||||
testRepresenterTypes(key);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,52 +6,34 @@
|
|||
|
||||
module dyaml.test.resolver;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.file;
|
||||
import std.string;
|
||||
|
||||
import dyaml.test.common;
|
||||
|
||||
|
||||
/**
|
||||
* Implicit tag resolution unittest.
|
||||
*
|
||||
* Params: dataFilename = File with unittest data.
|
||||
* detectFilename = Dummy filename used to specify which data filenames to use.
|
||||
*/
|
||||
void testImplicitResolver(string dataFilename, string detectFilename) @safe
|
||||
{
|
||||
string correctTag;
|
||||
Node node;
|
||||
|
||||
scope(failure)
|
||||
{
|
||||
if(true)
|
||||
{
|
||||
writeln("Correct tag: ", correctTag);
|
||||
writeln("Node: ", node.debugString);
|
||||
}
|
||||
}
|
||||
|
||||
correctTag = readText(detectFilename).strip();
|
||||
|
||||
node = Loader.fromFile(dataFilename).load();
|
||||
assert(node.nodeID == NodeID.sequence);
|
||||
foreach(ref Node scalar; node)
|
||||
{
|
||||
assert(scalar.nodeID == NodeID.scalar);
|
||||
assert(scalar.tag == correctTag);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@safe unittest
|
||||
{
|
||||
printProgress("D:YAML Resolver unittest");
|
||||
run("testImplicitResolver", &testImplicitResolver, ["data", "detect"]);
|
||||
}
|
||||
import std.conv : text;
|
||||
import std.file : readText;
|
||||
import std.string : strip;
|
||||
|
||||
} // version(unittest)
|
||||
import dyaml : Loader, Node, NodeID;
|
||||
import dyaml.test.common : run;
|
||||
|
||||
|
||||
/**
|
||||
Implicit tag resolution unittest.
|
||||
|
||||
Params:
|
||||
dataFilename = File with unittest data.
|
||||
detectFilename = Dummy filename used to specify which data filenames to use.
|
||||
*/
|
||||
static void testImplicitResolver(string dataFilename, string detectFilename) @safe
|
||||
{
|
||||
const correctTag = readText(detectFilename).strip();
|
||||
|
||||
auto node = Loader.fromFile(dataFilename).load();
|
||||
assert(node.nodeID == NodeID.sequence, text("Expected sequence when reading '", dataFilename, "', got ", node.nodeID));
|
||||
foreach (Node scalar; node)
|
||||
{
|
||||
assert(scalar.nodeID == NodeID.scalar, text("Expected sequence of scalars when reading '", dataFilename, "', got sequence of ", scalar.nodeID));
|
||||
assert(scalar.tag == correctTag, text("Expected tag '", correctTag, "' when reading '", dataFilename, "', got '", scalar.tag, "'"));
|
||||
}
|
||||
}
|
||||
run(&testImplicitResolver, ["data", "detect"]);
|
||||
}
|
||||
|
|
|
@ -6,107 +6,88 @@
|
|||
|
||||
module dyaml.test.tokens;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.array;
|
||||
import std.file;
|
||||
|
||||
import dyaml.test.common;
|
||||
import dyaml.reader;
|
||||
import dyaml.scanner;
|
||||
import dyaml.token;
|
||||
|
||||
// Read and scan a YAML doc, returning the tokens.
|
||||
const(Token)[] scanTestCommon(string filename) @safe
|
||||
{
|
||||
ubyte[] yamlData;
|
||||
() @trusted { yamlData = cast(ubyte[])std.file.read(filename); }();
|
||||
auto scanner = Scanner(new Reader(yamlData));
|
||||
const(Token)[] result;
|
||||
foreach (token; scanner)
|
||||
{
|
||||
result ~= token;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test tokens output by scanner.
|
||||
*
|
||||
* Params: dataFilename = File to scan.
|
||||
* tokensFilename = File containing expected tokens.
|
||||
*/
|
||||
void testTokens(string dataFilename, string tokensFilename) @safe
|
||||
{
|
||||
//representations of YAML tokens in tokens file.
|
||||
auto replace = [
|
||||
TokenID.directive: "%",
|
||||
TokenID.documentStart: "---",
|
||||
TokenID.documentEnd: "...",
|
||||
TokenID.alias_: "*",
|
||||
TokenID.anchor: "&",
|
||||
TokenID.tag: "!",
|
||||
TokenID.scalar: "_",
|
||||
TokenID.blockSequenceStart: "[[",
|
||||
TokenID.blockMappingStart: "{{",
|
||||
TokenID.blockEnd: "]}",
|
||||
TokenID.flowSequenceStart: "[",
|
||||
TokenID.flowSequenceEnd: "]",
|
||||
TokenID.flowMappingStart: "{",
|
||||
TokenID.flowMappingEnd: "}",
|
||||
TokenID.blockEntry: ",",
|
||||
TokenID.flowEntry: ",",
|
||||
TokenID.key: "?",
|
||||
TokenID.value: ":"
|
||||
];
|
||||
|
||||
string[] tokens1;
|
||||
string[] tokens2 = readText(tokensFilename).split();
|
||||
scope(exit)
|
||||
{
|
||||
static if(verbose){writeln("tokens1: ", tokens1, "\ntokens2: ", tokens2);}
|
||||
}
|
||||
|
||||
foreach(token; scanTestCommon(dataFilename))
|
||||
{
|
||||
if(token.id != TokenID.streamStart && token.id != TokenID.streamEnd)
|
||||
{
|
||||
tokens1 ~= replace[token.id];
|
||||
}
|
||||
}
|
||||
|
||||
assert(tokens1 == tokens2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test scanner by scanning a file, expecting no errors.
|
||||
*
|
||||
* Params: dataFilename = File to scan.
|
||||
* canonicalFilename = Another file to scan, in canonical YAML format.
|
||||
*/
|
||||
void testScanner(string dataFilename, string canonicalFilename) @safe
|
||||
{
|
||||
foreach(filename; [dataFilename, canonicalFilename])
|
||||
{
|
||||
string[] tokens;
|
||||
scope(exit)
|
||||
{
|
||||
static if(verbose){writeln(tokens);}
|
||||
}
|
||||
foreach(ref token; scanTestCommon(filename))
|
||||
{
|
||||
tokens ~= to!string(token.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@safe unittest
|
||||
{
|
||||
printProgress("D:YAML tokens unittest");
|
||||
run("testTokens", &testTokens, ["data", "tokens"]);
|
||||
run("testScanner", &testScanner, ["data", "canonical"]);
|
||||
}
|
||||
import std.array : split;
|
||||
import std.conv : text;
|
||||
import std.file : readText;
|
||||
|
||||
} // version(unittest)
|
||||
import dyaml.test.common : run;
|
||||
import dyaml.reader : Reader;
|
||||
import dyaml.scanner : Scanner;
|
||||
import dyaml.token : TokenID;
|
||||
|
||||
// Read and scan a YAML doc, returning a range of tokens.
|
||||
static auto scanTestCommon(string filename) @safe
|
||||
{
|
||||
ubyte[] yamlData = cast(ubyte[])readText(filename).dup;
|
||||
return Scanner(new Reader(yamlData));
|
||||
}
|
||||
|
||||
/**
|
||||
Test tokens output by scanner.
|
||||
|
||||
Params:
|
||||
dataFilename = File to scan.
|
||||
tokensFilename = File containing expected tokens.
|
||||
*/
|
||||
static void testTokens(string dataFilename, string tokensFilename) @safe
|
||||
{
|
||||
//representations of YAML tokens in tokens file.
|
||||
auto replace = [
|
||||
TokenID.directive: "%",
|
||||
TokenID.documentStart: "---",
|
||||
TokenID.documentEnd: "...",
|
||||
TokenID.alias_: "*",
|
||||
TokenID.anchor: "&",
|
||||
TokenID.tag: "!",
|
||||
TokenID.scalar: "_",
|
||||
TokenID.blockSequenceStart: "[[",
|
||||
TokenID.blockMappingStart: "{{",
|
||||
TokenID.blockEnd: "]}",
|
||||
TokenID.flowSequenceStart: "[",
|
||||
TokenID.flowSequenceEnd: "]",
|
||||
TokenID.flowMappingStart: "{",
|
||||
TokenID.flowMappingEnd: "}",
|
||||
TokenID.blockEntry: ",",
|
||||
TokenID.flowEntry: ",",
|
||||
TokenID.key: "?",
|
||||
TokenID.value: ":"
|
||||
];
|
||||
|
||||
string[] tokens;
|
||||
string[] expectedTokens = readText(tokensFilename).split();
|
||||
|
||||
foreach (token; scanTestCommon(dataFilename))
|
||||
{
|
||||
if (token.id != TokenID.streamStart && token.id != TokenID.streamEnd)
|
||||
{
|
||||
tokens ~= replace[token.id];
|
||||
}
|
||||
}
|
||||
|
||||
assert(tokens == expectedTokens,
|
||||
text("In token test for '", tokensFilename, "', expected '", expectedTokens, "', got '", tokens, "'"));
|
||||
}
|
||||
|
||||
/**
|
||||
Test scanner by scanning a file, expecting no errors.
|
||||
|
||||
Params:
|
||||
dataFilename = File to scan.
|
||||
canonicalFilename = Another file to scan, in canonical YAML format.
|
||||
*/
|
||||
static void testScanner(string dataFilename, string canonicalFilename) @safe
|
||||
{
|
||||
foreach (filename; [dataFilename, canonicalFilename])
|
||||
{
|
||||
string[] tokens;
|
||||
foreach (token; scanTestCommon(filename))
|
||||
{
|
||||
tokens ~= token.id.text;
|
||||
}
|
||||
}
|
||||
}
|
||||
run(&testTokens, ["data", "tokens"]);
|
||||
run(&testScanner, ["data", "canonical"]);
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue