Unittest build now works with 'dub test'
This commit is contained in:
parent
c47714c950
commit
b254e35762
12 changed files with 50 additions and 87 deletions
208
source/dyaml/testcommon.d
Normal file
208
source/dyaml/testcommon.d
Normal file
|
@ -0,0 +1,208 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testcommon;
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
public import std.conv;
|
||||
public import std.stdio;
|
||||
public import std.stream;
|
||||
public import dyaml.all;
|
||||
|
||||
import core.exception;
|
||||
import std.algorithm;
|
||||
import std.array;
|
||||
import std.conv;
|
||||
import std.file;
|
||||
import std.path;
|
||||
import std.typecons;
|
||||
|
||||
package:
|
||||
|
||||
alias std.stream.File File;
|
||||
|
||||
/**
|
||||
* Run an unittest.
|
||||
*
|
||||
* Params: testName = Name of the unittest.
|
||||
* testFunction = Unittest function.
|
||||
* unittestExt = Extensions of data files needed for the unittest.
|
||||
* skipExt = Extensions that must not be used for the unittest.
|
||||
*/
|
||||
void run(F ...)(string testName, void function(bool, F) testFunction,
|
||||
string[] unittestExt, string[] skipExt = [])
|
||||
{
|
||||
immutable string dataDir = "test/data";
|
||||
auto testFilenames = findTestFilenames(dataDir);
|
||||
bool verbose = false;
|
||||
|
||||
Result[] results;
|
||||
if(unittestExt.length > 0)
|
||||
{
|
||||
outer: foreach(base, extensions; testFilenames)
|
||||
{
|
||||
string[] filenames;
|
||||
foreach(ext; unittestExt)
|
||||
{
|
||||
if(!extensions.canFind(ext)){continue outer;}
|
||||
filenames ~= base ~ '.' ~ ext;
|
||||
}
|
||||
foreach(ext; skipExt)
|
||||
{
|
||||
if(extensions.canFind(ext)){continue outer;}
|
||||
}
|
||||
|
||||
results ~= execute!F(testName, testFunction, filenames, verbose);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
results ~= execute!F(testName, testFunction, cast(string[])[], verbose);
|
||||
}
|
||||
display(results, verbose);
|
||||
}
|
||||
|
||||
|
||||
private:
|
||||
|
||||
///Unittest status.
|
||||
enum TestStatus
|
||||
{
|
||||
Success, //Unittest passed.
|
||||
Failure, //Unittest failed.
|
||||
Error //There's an error in the unittest.
|
||||
}
|
||||
|
||||
///Unittest result.
|
||||
alias Tuple!(string, "name", string[], "filenames", TestStatus, "kind", string, "info") Result;
|
||||
|
||||
/**
|
||||
* Find unittest input filenames.
|
||||
*
|
||||
* Params: dir = Directory to look in.
|
||||
*
|
||||
* Returns: Test input base filenames and their extensions.
|
||||
*/
|
||||
string[][string] findTestFilenames(const string dir)
|
||||
{
|
||||
//Groups of extensions indexed by base names.
|
||||
string[][string] names;
|
||||
foreach(string name; dirEntries(dir, SpanMode.shallow))
|
||||
{
|
||||
if(isFile(name))
|
||||
{
|
||||
string base = name.stripExtension();
|
||||
string ext = name.extension();
|
||||
if(ext is null){ext = "";}
|
||||
if(ext[0] == '.'){ext = ext[1 .. $];}
|
||||
|
||||
//If the base name doesn't exist yet, add it; otherwise add new extension.
|
||||
names[base] = ((base in names) is null) ? [ext] : names[base] ~ ext;
|
||||
}
|
||||
}
|
||||
return names;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively copy an array of strings to a tuple to use for unittest function input.
|
||||
*
|
||||
* Params: index = Current index in the array/tuple.
|
||||
* tuple = Tuple to copy to.
|
||||
* strings = Strings to copy.
|
||||
*/
|
||||
void stringsToTuple(uint index, F ...)(ref F tuple, const string[] strings)
|
||||
in{assert(F.length == strings.length);}
|
||||
body
|
||||
{
|
||||
tuple[index] = strings[index];
|
||||
static if(index > 0){stringsToTuple!(index - 1, F)(tuple, strings);}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute an unittest on specified files.
|
||||
*
|
||||
* Params: testName = Name of the unittest.
|
||||
* testFunction = Unittest function.
|
||||
* filenames = Names of input files to test with.
|
||||
* verbose = Print verbose output?
|
||||
*
|
||||
* Returns: Information about the results of the unittest.
|
||||
*/
|
||||
Result execute(F ...)(const string testName, void function(bool, F) testFunction,
|
||||
string[] filenames, const bool verbose)
|
||||
{
|
||||
if(verbose)
|
||||
{
|
||||
writeln("===========================================================================");
|
||||
writeln(testName ~ "(" ~ filenames.join(", ") ~ ")...");
|
||||
}
|
||||
|
||||
auto kind = TestStatus.Success;
|
||||
string info = "";
|
||||
try
|
||||
{
|
||||
//Convert filenames to parameters tuple and call the test function.
|
||||
F parameters;
|
||||
stringsToTuple!(F.length - 1, F)(parameters, filenames);
|
||||
testFunction(verbose, parameters);
|
||||
if(!verbose){write(".");}
|
||||
}
|
||||
catch(Throwable e)
|
||||
{
|
||||
info = to!string(typeid(e)) ~ "\n" ~ to!string(e);
|
||||
kind = (typeid(e) is typeid(AssertError)) ? TestStatus.Failure : TestStatus.Error;
|
||||
write((verbose ? to!string(e) : to!string(kind)) ~ " ");
|
||||
}
|
||||
|
||||
stdout.flush();
|
||||
|
||||
return Result(testName, filenames, kind, info);
|
||||
}
|
||||
|
||||
/**
|
||||
* Display unittest results.
|
||||
*
|
||||
* Params: results = Unittest results.
|
||||
* verbose = Print verbose output?
|
||||
*/
|
||||
void display(Result[] results, const bool verbose)
|
||||
{
|
||||
if(results.length > 0 && !verbose){write("\n");}
|
||||
|
||||
size_t failures = 0;
|
||||
size_t errors = 0;
|
||||
|
||||
if(verbose)
|
||||
{
|
||||
writeln("===========================================================================");
|
||||
}
|
||||
//Results of each test.
|
||||
foreach(result; results)
|
||||
{
|
||||
if(verbose)
|
||||
{
|
||||
writeln(result.name, "(" ~ result.filenames.join(", ") ~ "): ",
|
||||
to!string(result.kind));
|
||||
}
|
||||
|
||||
if(result.kind == TestStatus.Success){continue;}
|
||||
|
||||
if(result.kind == TestStatus.Failure){++failures;}
|
||||
else if(result.kind == TestStatus.Error){++errors;}
|
||||
writeln(result.info);
|
||||
writeln("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
|
||||
}
|
||||
|
||||
//Totals.
|
||||
writeln("===========================================================================");
|
||||
writeln("TESTS: ", results.length);
|
||||
if(failures > 0){writeln("FAILURES: ", failures);}
|
||||
if(errors > 0) {writeln("ERRORS: ", errors);}
|
||||
}
|
||||
|
||||
} // version(unittest)
|
72
source/dyaml/testcompare.d
Normal file
72
source/dyaml/testcompare.d
Normal file
|
@ -0,0 +1,72 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testcompare;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import dyaml.testcommon;
|
||||
import dyaml.token;
|
||||
|
||||
|
||||
/// Test parser by comparing output from parsing two equivalent YAML files.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// dataFilename = YAML file to parse.
|
||||
/// canonicalFilename = Another file to parse, in canonical YAML format.
|
||||
void testParser(bool verbose, string dataFilename, string canonicalFilename)
|
||||
{
|
||||
auto dataEvents = Loader(dataFilename).parse();
|
||||
auto canonicalEvents = Loader(canonicalFilename).parse();
|
||||
|
||||
assert(dataEvents.length == canonicalEvents.length);
|
||||
|
||||
foreach(e; 0 .. dataEvents.length)
|
||||
{
|
||||
assert(dataEvents[e].id == canonicalEvents[e].id);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Test loader by comparing output from loading two equivalent YAML files.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// dataFilename = YAML file to load.
|
||||
/// canonicalFilename = Another file to load, in canonical YAML format.
|
||||
void testLoader(bool verbose, string dataFilename, string canonicalFilename)
|
||||
{
|
||||
auto data = Loader(dataFilename).loadAll();
|
||||
auto canonical = Loader(canonicalFilename).loadAll();
|
||||
|
||||
assert(data.length == canonical.length, "Unequal node count");
|
||||
foreach(n; 0 .. data.length)
|
||||
{
|
||||
if(data[n] != canonical[n])
|
||||
{
|
||||
if(verbose)
|
||||
{
|
||||
writeln("Normal value:");
|
||||
writeln(data[n].debugString);
|
||||
writeln("\n");
|
||||
writeln("Canonical value:");
|
||||
writeln(canonical[n].debugString);
|
||||
}
|
||||
assert(false, "testLoader(" ~ dataFilename ~ ", " ~ canonicalFilename ~ ") failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML comparison unittest");
|
||||
run("testParser", &testParser, ["data", "canonical"]);
|
||||
run("testLoader", &testLoader, ["data", "canonical"], ["test_loader_skip"]);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
440
source/dyaml/testconstructor.d
Normal file
440
source/dyaml/testconstructor.d
Normal file
|
@ -0,0 +1,440 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testconstructor;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.datetime;
|
||||
import std.exception;
|
||||
import std.path;
|
||||
import std.string;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.tag;
|
||||
import dyaml.testcommon;
|
||||
|
||||
|
||||
///Expected results of loading test inputs.
|
||||
Node[][string] expected;
|
||||
|
||||
///Initialize expected.
|
||||
static this()
|
||||
{
|
||||
expected["aliases-cdumper-bug"] = constructAliasesCDumperBug();
|
||||
expected["construct-binary"] = constructBinary();
|
||||
expected["construct-bool"] = constructBool();
|
||||
expected["construct-custom"] = constructCustom();
|
||||
expected["construct-float"] = constructFloat();
|
||||
expected["construct-int"] = constructInt();
|
||||
expected["construct-map"] = constructMap();
|
||||
expected["construct-merge"] = constructMerge();
|
||||
expected["construct-null"] = constructNull();
|
||||
expected["construct-omap"] = constructOMap();
|
||||
expected["construct-pairs"] = constructPairs();
|
||||
expected["construct-seq"] = constructSeq();
|
||||
expected["construct-set"] = constructSet();
|
||||
expected["construct-str-ascii"] = constructStrASCII();
|
||||
expected["construct-str"] = constructStr();
|
||||
expected["construct-str-utf8"] = constructStrUTF8();
|
||||
expected["construct-timestamp"] = constructTimestamp();
|
||||
expected["construct-value"] = constructValue();
|
||||
expected["duplicate-merge-key"] = duplicateMergeKey();
|
||||
expected["float-representer-2.3-bug"] = floatRepresenterBug();
|
||||
expected["invalid-single-quote-bug"] = invalidSingleQuoteBug();
|
||||
expected["more-floats"] = moreFloats();
|
||||
expected["negative-float-bug"] = negativeFloatBug();
|
||||
expected["single-dot-is-not-float-bug"] = singleDotFloatBug();
|
||||
expected["timestamp-bugs"] = timestampBugs();
|
||||
expected["utf16be"] = utf16be();
|
||||
expected["utf16le"] = utf16le();
|
||||
expected["utf8"] = utf8();
|
||||
expected["utf8-implicit"] = utf8implicit();
|
||||
}
|
||||
|
||||
///Construct a pair of nodes with specified values.
|
||||
Node.Pair pair(A, B)(A a, B b)
|
||||
{
|
||||
return Node.Pair(a,b);
|
||||
}
|
||||
|
||||
///Test cases:
|
||||
|
||||
Node[] constructAliasesCDumperBug()
|
||||
{
|
||||
return [Node(["today", "today"])];
|
||||
}
|
||||
|
||||
Node[] constructBinary()
|
||||
{
|
||||
auto canonical = cast(ubyte[])"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;";
|
||||
auto generic = cast(ubyte[])"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;";
|
||||
auto description = "The binary value above is a tiny arrow encoded as a gif image.";
|
||||
|
||||
return [Node([pair("canonical", canonical),
|
||||
pair("generic", generic),
|
||||
pair("description", description)])];
|
||||
}
|
||||
|
||||
Node[] constructBool()
|
||||
{
|
||||
return [Node([pair("canonical", true),
|
||||
pair("answer", false),
|
||||
pair("logical", true),
|
||||
pair("option", true),
|
||||
pair("but", [pair("y", "is a string"), pair("n", "is a string")])])];
|
||||
}
|
||||
|
||||
Node[] constructCustom()
|
||||
{
|
||||
return [Node([Node(new TestClass(1, 2, 3)),
|
||||
Node(TestStruct(10))])];
|
||||
}
|
||||
|
||||
Node[] constructFloat()
|
||||
{
|
||||
return [Node([pair("canonical", cast(real)685230.15),
|
||||
pair("exponential", cast(real)685230.15),
|
||||
pair("fixed", cast(real)685230.15),
|
||||
pair("sexagesimal", cast(real)685230.15),
|
||||
pair("negative infinity", -real.infinity),
|
||||
pair("not a number", real.nan)])];
|
||||
}
|
||||
|
||||
Node[] constructInt()
|
||||
{
|
||||
return [Node([pair("canonical", 685230L),
|
||||
pair("decimal", 685230L),
|
||||
pair("octal", 685230L),
|
||||
pair("hexadecimal", 685230L),
|
||||
pair("binary", 685230L),
|
||||
pair("sexagesimal", 685230L)])];
|
||||
}
|
||||
|
||||
Node[] constructMap()
|
||||
{
|
||||
return [Node([pair("Block style",
|
||||
[pair("Clark", "Evans"),
|
||||
pair("Brian", "Ingerson"),
|
||||
pair("Oren", "Ben-Kiki")]),
|
||||
pair("Flow style",
|
||||
[pair("Clark", "Evans"),
|
||||
pair("Brian", "Ingerson"),
|
||||
pair("Oren", "Ben-Kiki")])])];
|
||||
}
|
||||
|
||||
Node[] constructMerge()
|
||||
{
|
||||
return [Node([Node([pair("x", 1L), pair("y", 2L)]),
|
||||
Node([pair("x", 0L), pair("y", 2L)]),
|
||||
Node([pair("r", 10L)]),
|
||||
Node([pair("r", 1L)]),
|
||||
Node([pair("x", 1L), pair("y", 2L), pair("r", 10L), pair("label", "center/big")]),
|
||||
Node([pair("r", 10L), pair("label", "center/big"), pair("x", 1L), pair("y", 2L)]),
|
||||
Node([pair("label", "center/big"), pair("x", 1L), pair("y", 2L), pair("r", 10L)]),
|
||||
Node([pair("x", 1L), pair("label", "center/big"), pair("r", 10L), pair("y", 2L)])])];
|
||||
}
|
||||
|
||||
Node[] constructNull()
|
||||
{
|
||||
return [Node(YAMLNull()),
|
||||
Node([pair("empty", YAMLNull()),
|
||||
pair("canonical", YAMLNull()),
|
||||
pair("english", YAMLNull()),
|
||||
pair(YAMLNull(), "null key")]),
|
||||
Node([pair("sparse",
|
||||
[Node(YAMLNull()),
|
||||
Node("2nd entry"),
|
||||
Node(YAMLNull()),
|
||||
Node("4th entry"),
|
||||
Node(YAMLNull())])])];
|
||||
}
|
||||
|
||||
Node[] constructOMap()
|
||||
{
|
||||
return [Node([pair("Bestiary",
|
||||
[pair("aardvark", "African pig-like ant eater. Ugly."),
|
||||
pair("anteater", "South-American ant eater. Two species."),
|
||||
pair("anaconda", "South-American constrictor snake. Scaly.")]),
|
||||
pair("Numbers",[pair("one", 1L),
|
||||
pair("two", 2L),
|
||||
pair("three", 3L)])])];
|
||||
}
|
||||
|
||||
Node[] constructPairs()
|
||||
{
|
||||
return [Node([pair("Block tasks",
|
||||
Node([pair("meeting", "with team."),
|
||||
pair("meeting", "with boss."),
|
||||
pair("break", "lunch."),
|
||||
pair("meeting", "with client.")], "tag:yaml.org,2002:pairs")),
|
||||
pair("Flow tasks",
|
||||
Node([pair("meeting", "with team"),
|
||||
pair("meeting", "with boss")], "tag:yaml.org,2002:pairs"))])];
|
||||
}
|
||||
|
||||
Node[] constructSeq()
|
||||
{
|
||||
return [Node([pair("Block style",
|
||||
[Node("Mercury"), Node("Venus"), Node("Earth"), Node("Mars"),
|
||||
Node("Jupiter"), Node("Saturn"), Node("Uranus"), Node("Neptune"),
|
||||
Node("Pluto")]),
|
||||
pair("Flow style",
|
||||
[Node("Mercury"), Node("Venus"), Node("Earth"), Node("Mars"),
|
||||
Node("Jupiter"), Node("Saturn"), Node("Uranus"), Node("Neptune"),
|
||||
Node("Pluto")])])];
|
||||
}
|
||||
|
||||
Node[] constructSet()
|
||||
{
|
||||
return [Node([pair("baseball players",
|
||||
[Node("Mark McGwire"), Node("Sammy Sosa"), Node("Ken Griffey")]),
|
||||
pair("baseball teams",
|
||||
[Node("Boston Red Sox"), Node("Detroit Tigers"), Node("New York Yankees")])])];
|
||||
}
|
||||
|
||||
Node[] constructStrASCII()
|
||||
{
|
||||
return [Node("ascii string")];
|
||||
}
|
||||
|
||||
Node[] constructStr()
|
||||
{
|
||||
return [Node([pair("string", "abcd")])];
|
||||
}
|
||||
|
||||
Node[] constructStrUTF8()
|
||||
{
|
||||
return [Node("\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430")];
|
||||
}
|
||||
|
||||
Node[] constructTimestamp()
|
||||
{
|
||||
alias DT = DateTime;
|
||||
alias ST = SysTime;
|
||||
alias hnsecs = FracSec.from!"hnsecs";
|
||||
return [Node([pair("canonical", ST(DT(2001, 12, 15, 2, 59, 43), hnsecs(1000000), UTC())),
|
||||
pair("valid iso8601", ST(DT(2001, 12, 15, 2, 59, 43), hnsecs(1000000), UTC())),
|
||||
pair("space separated", ST(DT(2001, 12, 15, 2, 59, 43), hnsecs(1000000), UTC())),
|
||||
pair("no time zone (Z)", ST(DT(2001, 12, 15, 2, 59, 43), hnsecs(1000000), UTC())),
|
||||
pair("date (00:00:00Z)", ST(DT(2002, 12, 14), UTC()))])];
|
||||
}
|
||||
|
||||
Node[] constructValue()
|
||||
{
|
||||
return[Node([pair("link with",
|
||||
[Node("library1.dll"), Node("library2.dll")])]),
|
||||
Node([pair("link with",
|
||||
[Node([pair("=", "library1.dll"), pair("version", cast(real)1.2)]),
|
||||
Node([pair("=", "library2.dll"), pair("version", cast(real)2.3)])])])];
|
||||
}
|
||||
|
||||
Node[] duplicateMergeKey()
|
||||
{
|
||||
return [Node([pair("foo", "bar"),
|
||||
pair("x", 1L),
|
||||
pair("y", 2L),
|
||||
pair("z", 3L),
|
||||
pair("t", 4L)])];
|
||||
}
|
||||
|
||||
Node[] floatRepresenterBug()
|
||||
{
|
||||
return [Node([pair(cast(real)1.0, 1L),
|
||||
pair(real.infinity, 10L),
|
||||
pair(-real.infinity, -10L),
|
||||
pair(real.nan, 100L)])];
|
||||
}
|
||||
|
||||
Node[] invalidSingleQuoteBug()
|
||||
{
|
||||
return [Node([Node("foo \'bar\'"), Node("foo\n\'bar\'")])];
|
||||
}
|
||||
|
||||
Node[] moreFloats()
|
||||
{
|
||||
return [Node([Node(cast(real)0.0),
|
||||
Node(cast(real)1.0),
|
||||
Node(cast(real)-1.0),
|
||||
Node(real.infinity),
|
||||
Node(-real.infinity),
|
||||
Node(real.nan),
|
||||
Node(real.nan)])];
|
||||
}
|
||||
|
||||
Node[] negativeFloatBug()
|
||||
{
|
||||
return [Node(cast(real)-1.0)];
|
||||
}
|
||||
|
||||
Node[] singleDotFloatBug()
|
||||
{
|
||||
return [Node(".")];
|
||||
}
|
||||
|
||||
Node[] timestampBugs()
|
||||
{
|
||||
alias DT = DateTime;
|
||||
alias ST = SysTime;
|
||||
alias hnsecs = FracSec.from!"hnsecs";
|
||||
alias STZ = immutable SimpleTimeZone;
|
||||
return [Node([Node(ST(DT(2001, 12, 15, 3, 29, 43), hnsecs(1000000), UTC())),
|
||||
Node(ST(DT(2001, 12, 14, 16, 29, 43), hnsecs(1000000), UTC())),
|
||||
Node(ST(DT(2001, 12, 14, 21, 59, 43), hnsecs(10100), UTC())),
|
||||
Node(ST(DT(2001, 12, 14, 21, 59, 43), new STZ(60.dur!"minutes"))),
|
||||
Node(ST(DT(2001, 12, 14, 21, 59, 43), new STZ(-90.dur!"minutes"))),
|
||||
Node(ST(DT(2005, 7, 8, 17, 35, 4), hnsecs(5176000), UTC()))])];
|
||||
}
|
||||
|
||||
Node[] utf16be()
|
||||
{
|
||||
return [Node("UTF-16-BE")];
|
||||
}
|
||||
|
||||
Node[] utf16le()
|
||||
{
|
||||
return [Node("UTF-16-LE")];
|
||||
}
|
||||
|
||||
Node[] utf8()
|
||||
{
|
||||
return [Node("UTF-8")];
|
||||
}
|
||||
|
||||
Node[] utf8implicit()
|
||||
{
|
||||
return [Node("implicit UTF-8")];
|
||||
}
|
||||
|
||||
///Testing custom YAML class type.
|
||||
class TestClass
|
||||
{
|
||||
int x, y, z;
|
||||
|
||||
this(int x, int y, int z)
|
||||
{
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.z = z;
|
||||
}
|
||||
|
||||
//Any D:YAML type must have a custom opCmp operator.
|
||||
//This is used for ordering in mappings.
|
||||
override int opCmp(Object o)
|
||||
{
|
||||
TestClass s = cast(TestClass)o;
|
||||
if(s is null){return -1;}
|
||||
if(x != s.x){return x - s.x;}
|
||||
if(y != s.y){return y - s.y;}
|
||||
if(z != s.z){return z - s.z;}
|
||||
return 0;
|
||||
}
|
||||
|
||||
override string toString()
|
||||
{
|
||||
return format("TestClass(", x, ", ", y, ", ", z, ")");
|
||||
}
|
||||
}
|
||||
|
||||
///Testing custom YAML struct type.
|
||||
struct TestStruct
|
||||
{
|
||||
int value;
|
||||
|
||||
//Any D:YAML type must have a custom opCmp operator.
|
||||
//This is used for ordering in mappings.
|
||||
const int opCmp(ref const TestStruct s)
|
||||
{
|
||||
return value - s.value;
|
||||
}
|
||||
}
|
||||
|
||||
///Constructor function for TestClass.
|
||||
TestClass constructClass(ref Node node)
|
||||
{
|
||||
return new TestClass(node["x"].as!int, node["y"].as!int, node["z"].as!int);
|
||||
}
|
||||
|
||||
Node representClass(ref Node node, Representer representer)
|
||||
{
|
||||
auto value = node.as!TestClass;
|
||||
auto pairs = [Node.Pair("x", value.x),
|
||||
Node.Pair("y", value.y),
|
||||
Node.Pair("z", value.z)];
|
||||
auto result = representer.representMapping("!tag1", pairs);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
///Constructor function for TestStruct.
|
||||
TestStruct constructStruct(ref Node node)
|
||||
{
|
||||
return TestStruct(to!int(node.as!string));
|
||||
}
|
||||
|
||||
///Representer function for TestStruct.
|
||||
Node representStruct(ref Node node, Representer representer)
|
||||
{
|
||||
string[] keys, values;
|
||||
auto value = node.as!TestStruct;
|
||||
return representer.representScalar("!tag2", to!string(value.value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor unittest.
|
||||
*
|
||||
* Params: verbose = Print verbose output?
|
||||
* dataFilename = File name to read from.
|
||||
* codeDummy = Dummy .code filename, used to determine that
|
||||
* .data file with the same name should be used in this test.
|
||||
*/
|
||||
void testConstructor(bool verbose, string dataFilename, string codeDummy)
|
||||
{
|
||||
string base = dataFilename.baseName.stripExtension;
|
||||
enforce((base in expected) !is null,
|
||||
new Exception("Unimplemented constructor test: " ~ base));
|
||||
|
||||
auto constructor = new Constructor;
|
||||
constructor.addConstructorMapping("!tag1", &constructClass);
|
||||
constructor.addConstructorScalar("!tag2", &constructStruct);
|
||||
|
||||
auto loader = Loader(dataFilename);
|
||||
loader.constructor = constructor;
|
||||
loader.resolver = new Resolver;
|
||||
|
||||
Node[] exp = expected[base];
|
||||
|
||||
//Compare with expected results document by document.
|
||||
size_t i = 0;
|
||||
foreach(node; loader)
|
||||
{
|
||||
if(!node.equals!(No.useTag)(exp[i]))
|
||||
{
|
||||
if(verbose)
|
||||
{
|
||||
writeln("Expected value:");
|
||||
writeln(exp[i].debugString);
|
||||
writeln("\n");
|
||||
writeln("Actual value:");
|
||||
writeln(node.debugString);
|
||||
}
|
||||
assert(false);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
assert(i == exp.length);
|
||||
}
|
||||
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Constructor unittest");
|
||||
run("testConstructor", &testConstructor, ["data", "code"]);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
204
source/dyaml/testemitter.d
Normal file
204
source/dyaml/testemitter.d
Normal file
|
@ -0,0 +1,204 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011-2014.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testemitter;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.algorithm;
|
||||
import std.file;
|
||||
import std.range;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.dumper;
|
||||
import dyaml.event;
|
||||
import dyaml.testcommon;
|
||||
import dyaml.token;
|
||||
|
||||
|
||||
/// Determine if events in events1 are equivalent to events in events2.
|
||||
///
|
||||
/// Params: events1 = First event array to compare.
|
||||
/// events2 = Second event array to compare.
|
||||
///
|
||||
/// Returns: true if the events are equivalent, false otherwise.
|
||||
bool compareEvents(Event[] events1, Event[] events2)
|
||||
{
|
||||
if(events1.length != events2.length){return false;}
|
||||
|
||||
for(uint e = 0; e < events1.length; ++e)
|
||||
{
|
||||
auto e1 = events1[e];
|
||||
auto e2 = events2[e];
|
||||
|
||||
//Different event types.
|
||||
if(e1.id != e2.id){return false;}
|
||||
//Different anchor (if applicable).
|
||||
if([EventID.SequenceStart,
|
||||
EventID.MappingStart,
|
||||
EventID.Alias,
|
||||
EventID.Scalar].canFind(e1.id)
|
||||
&& e1.anchor != e2.anchor)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
//Different collection tag (if applicable).
|
||||
if([EventID.SequenceStart, EventID.MappingStart].canFind(e1.id) && e1.tag != e2.tag)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if(e1.id == EventID.Scalar)
|
||||
{
|
||||
//Different scalar tag (if applicable).
|
||||
if(![e1.implicit, e1.implicit_2, e2.implicit, e2.implicit_2].canFind(true)
|
||||
&& e1.tag != e2.tag)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
//Different scalar value.
|
||||
if(e1.value != e2.value)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/// Test emitter by getting events from parsing a file, emitting them, parsing
|
||||
/// the emitted result and comparing events from parsing the emitted result with
|
||||
/// originally parsed events.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// dataFilename = YAML file to parse.
|
||||
/// canonicalFilename = Canonical YAML file used as dummy to determine
|
||||
/// which data files to load.
|
||||
void testEmitterOnData(bool verbose, string dataFilename, string canonicalFilename)
|
||||
{
|
||||
//Must exist due to Anchor, Tags reference counts.
|
||||
auto loader = Loader(dataFilename);
|
||||
auto events = cast(Event[])loader.parse();
|
||||
auto emitStream = new MemoryStream;
|
||||
Dumper(emitStream).emit(events);
|
||||
|
||||
if(verbose)
|
||||
{
|
||||
writeln(dataFilename);
|
||||
writeln("ORIGINAL:\n", readText(dataFilename));
|
||||
writeln("OUTPUT:\n", cast(string)emitStream.data);
|
||||
}
|
||||
|
||||
auto loader2 = Loader(emitStream.data.dup);
|
||||
loader2.name = "TEST";
|
||||
loader2.constructor = new Constructor;
|
||||
loader2.resolver = new Resolver;
|
||||
auto newEvents = cast(Event[])loader2.parse();
|
||||
assert(compareEvents(events, newEvents));
|
||||
}
|
||||
|
||||
/// Test emitter by getting events from parsing a canonical YAML file, emitting
|
||||
/// them both in canonical and normal format, parsing the emitted results and
|
||||
/// comparing events from parsing the emitted result with originally parsed events.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// canonicalFilename = Canonical YAML file to parse.
|
||||
void testEmitterOnCanonical(bool verbose, string canonicalFilename)
|
||||
{
|
||||
//Must exist due to Anchor, Tags reference counts.
|
||||
auto loader = Loader(canonicalFilename);
|
||||
auto events = cast(Event[])loader.parse();
|
||||
foreach(canonical; [false, true])
|
||||
{
|
||||
auto emitStream = new MemoryStream;
|
||||
auto dumper = Dumper(emitStream);
|
||||
dumper.canonical = canonical;
|
||||
dumper.emit(events);
|
||||
if(verbose)
|
||||
{
|
||||
writeln("OUTPUT (canonical=", canonical, "):\n",
|
||||
cast(string)emitStream.data);
|
||||
}
|
||||
auto loader2 = Loader(emitStream.data.dup);
|
||||
loader2.name = "TEST";
|
||||
loader2.constructor = new Constructor;
|
||||
loader2.resolver = new Resolver;
|
||||
auto newEvents = cast(Event[])loader2.parse();
|
||||
assert(compareEvents(events, newEvents));
|
||||
}
|
||||
}
|
||||
|
||||
/// Test emitter by getting events from parsing a file, emitting them with all
|
||||
/// possible scalar and collection styles, parsing the emitted results and
|
||||
/// comparing events from parsing the emitted result with originally parsed events.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// dataFilename = YAML file to parse.
|
||||
/// canonicalFilename = Canonical YAML file used as dummy to determine
|
||||
/// which data files to load.
|
||||
void testEmitterStyles(bool verbose, string dataFilename, string canonicalFilename)
|
||||
{
|
||||
foreach(filename; [dataFilename, canonicalFilename])
|
||||
{
|
||||
//must exist due to Anchor, Tags reference counts
|
||||
auto loader = Loader(canonicalFilename);
|
||||
auto events = cast(Event[])loader.parse();
|
||||
foreach(flowStyle; [CollectionStyle.Block, CollectionStyle.Flow])
|
||||
{
|
||||
foreach(style; [ScalarStyle.Literal, ScalarStyle.Folded,
|
||||
ScalarStyle.DoubleQuoted, ScalarStyle.SingleQuoted,
|
||||
ScalarStyle.Plain])
|
||||
{
|
||||
Event[] styledEvents;
|
||||
foreach(event; events)
|
||||
{
|
||||
if(event.id == EventID.Scalar)
|
||||
{
|
||||
event = scalarEvent(Mark(), Mark(), event.anchor, event.tag,
|
||||
tuple(event.implicit, event.implicit_2),
|
||||
event.value, style);
|
||||
}
|
||||
else if(event.id == EventID.SequenceStart)
|
||||
{
|
||||
event = sequenceStartEvent(Mark(), Mark(), event.anchor,
|
||||
event.tag, event.implicit, flowStyle);
|
||||
}
|
||||
else if(event.id == EventID.MappingStart)
|
||||
{
|
||||
event = mappingStartEvent(Mark(), Mark(), event.anchor,
|
||||
event.tag, event.implicit, flowStyle);
|
||||
}
|
||||
styledEvents ~= event;
|
||||
}
|
||||
auto emitStream = new MemoryStream;
|
||||
Dumper(emitStream).emit(styledEvents);
|
||||
if(verbose)
|
||||
{
|
||||
writeln("OUTPUT (", filename, ", ", to!string(flowStyle), ", ",
|
||||
to!string(style), ")");
|
||||
writeln(emitStream.data);
|
||||
}
|
||||
auto loader2 = Loader(emitStream.data.dup);
|
||||
loader2.name = "TEST";
|
||||
loader2.constructor = new Constructor;
|
||||
loader2.resolver = new Resolver;
|
||||
auto newEvents = cast(Event[])loader2.parse();
|
||||
assert(compareEvents(events, newEvents));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Emitter unittest");
|
||||
run("testEmitterOnData", &testEmitterOnData, ["data", "canonical"]);
|
||||
run("testEmitterOnCanonical", &testEmitterOnCanonical, ["canonical"]);
|
||||
run("testEmitterStyles", &testEmitterStyles, ["data", "canonical"]);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
98
source/dyaml/testerrors.d
Normal file
98
source/dyaml/testerrors.d
Normal file
|
@ -0,0 +1,98 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011-2014
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testerrors;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.file;
|
||||
|
||||
import dyaml.testcommon;
|
||||
|
||||
|
||||
/// Loader error unittest from file stream.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// errorFilename = File name to read from.
|
||||
void testLoaderError(bool verbose, string errorFilename)
|
||||
{
|
||||
auto buffer = std.file.read(errorFilename);
|
||||
|
||||
Node[] nodes;
|
||||
try { nodes = Loader(buffer).loadAll(); }
|
||||
catch(YAMLException e)
|
||||
{
|
||||
if(verbose) { writeln(typeid(e).toString(), "\n", e); }
|
||||
return;
|
||||
}
|
||||
assert(false, "Expected an exception");
|
||||
}
|
||||
|
||||
/// Loader error unittest from string.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// errorFilename = File name to read from.
|
||||
void testLoaderErrorString(bool verbose, string errorFilename)
|
||||
{
|
||||
// Load file to a buffer, then pass that to the YAML loader.
|
||||
auto buffer = std.file.read(errorFilename);
|
||||
|
||||
try
|
||||
{
|
||||
auto nodes = Loader(buffer).loadAll();
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
if(verbose) { writeln(typeid(e).toString(), "\n", e); }
|
||||
return;
|
||||
}
|
||||
assert(false, "Expected an exception");
|
||||
}
|
||||
|
||||
/// Loader error unittest from filename.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// errorFilename = File name to read from.
|
||||
void testLoaderErrorFilename(bool verbose, string errorFilename)
|
||||
{
|
||||
try { auto nodes = Loader(errorFilename).loadAll(); }
|
||||
catch(YAMLException e)
|
||||
{
|
||||
if(verbose) { writeln(typeid(e).toString(), "\n", e); }
|
||||
return;
|
||||
}
|
||||
assert(false, "testLoaderErrorSingle(" ~ verbose.to!string ~
|
||||
", " ~ errorFilename ~ ") Expected an exception");
|
||||
}
|
||||
|
||||
/// Loader error unittest loading a single document from a file.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// errorFilename = File name to read from.
|
||||
void testLoaderErrorSingle(bool verbose, string errorFilename)
|
||||
{
|
||||
try { auto nodes = Loader(errorFilename).load(); }
|
||||
catch(YAMLException e)
|
||||
{
|
||||
if(verbose) { writeln(typeid(e).toString(), "\n", e); }
|
||||
return;
|
||||
}
|
||||
assert(false, "Expected an exception");
|
||||
}
|
||||
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Errors unittest");
|
||||
run("testLoaderError", &testLoaderError, ["loader-error"]);
|
||||
run("testLoaderErrorString", &testLoaderErrorString, ["loader-error"]);
|
||||
run("testLoaderErrorFilename", &testLoaderErrorFilename, ["loader-error"]);
|
||||
run("testLoaderErrorSingle", &testLoaderErrorSingle, ["single-loader-error"]);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
98
source/dyaml/testinputoutput.d
Normal file
98
source/dyaml/testinputoutput.d
Normal file
|
@ -0,0 +1,98 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011-2014.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testinputoutput;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.array;
|
||||
import std.file;
|
||||
import std.system;
|
||||
|
||||
import dyaml.testcommon;
|
||||
|
||||
|
||||
alias std.system.endian endian;
|
||||
|
||||
/// Get an UTF-16 byte order mark.
|
||||
///
|
||||
/// Params: wrong = Get the incorrect BOM for this system.
|
||||
///
|
||||
/// Returns: UTF-16 byte order mark.
|
||||
wchar bom16(bool wrong = false) pure
|
||||
{
|
||||
wchar little = *(cast(wchar*)ByteOrderMarks[BOM.UTF16LE]);
|
||||
wchar big = *(cast(wchar*)ByteOrderMarks[BOM.UTF16BE]);
|
||||
if(!wrong){return endian == Endian.littleEndian ? little : big;}
|
||||
return endian == Endian.littleEndian ? big : little;
|
||||
}
|
||||
|
||||
/// Get an UTF-32 byte order mark.
|
||||
///
|
||||
/// Params: wrong = Get the incorrect BOM for this system.
|
||||
///
|
||||
/// Returns: UTF-32 byte order mark.
|
||||
dchar bom32(bool wrong = false) pure
|
||||
{
|
||||
dchar little = *(cast(dchar*)ByteOrderMarks[BOM.UTF32LE]);
|
||||
dchar big = *(cast(dchar*)ByteOrderMarks[BOM.UTF32BE]);
|
||||
if(!wrong){return endian == Endian.littleEndian ? little : big;}
|
||||
return endian == Endian.littleEndian ? big : little;
|
||||
}
|
||||
|
||||
/// Unicode input unittest. Tests various encodings.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// unicodeFilename = File name to read from.
|
||||
void testUnicodeInput(bool verbose, string unicodeFilename)
|
||||
{
|
||||
string data = readText(unicodeFilename);
|
||||
string expected = data.split().join(" ");
|
||||
|
||||
Node output = Loader(cast(void[])data.to!(char[])).load();
|
||||
assert(output.as!string == expected);
|
||||
|
||||
foreach(buffer; [cast(void[])(bom16() ~ data.to!(wchar[])),
|
||||
cast(void[])(bom32() ~ data.to!(dchar[]))])
|
||||
{
|
||||
output = Loader(buffer).load();
|
||||
assert(output.as!string == expected);
|
||||
}
|
||||
}
|
||||
|
||||
/// Unicode input error unittest. Tests various encodings with incorrect BOMs.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// unicodeFilename = File name to read from.
|
||||
void testUnicodeInputErrors(bool verbose, string unicodeFilename)
|
||||
{
|
||||
string data = readText(unicodeFilename);
|
||||
foreach(buffer; [cast(void[])(data.to!(wchar[])),
|
||||
cast(void[])(data.to!(dchar[])),
|
||||
cast(void[])(bom16(true) ~ data.to!(wchar[])),
|
||||
cast(void[])(bom32(true) ~ data.to!(dchar[]))])
|
||||
{
|
||||
try { Loader(buffer).load(); }
|
||||
catch(YAMLException e)
|
||||
{
|
||||
if(verbose) { writeln(typeid(e).toString(), "\n", e); }
|
||||
continue;
|
||||
}
|
||||
assert(false, "Expected an exception");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML I/O unittest");
|
||||
run("testUnicodeInput", &testUnicodeInput, ["unicode"]);
|
||||
run("testUnicodeInputErrors", &testUnicodeInputErrors, ["unicode"]);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
53
source/dyaml/testreader.d
Normal file
53
source/dyaml/testreader.d
Normal file
|
@ -0,0 +1,53 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testreader;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import dyaml.testcommon;
|
||||
import dyaml.reader;
|
||||
|
||||
|
||||
// Try reading entire file through Reader, expecting an error (the file is invalid).
|
||||
//
|
||||
// Params: verbose = Print verbose output?
|
||||
// data = Stream to read.
|
||||
void runReader(const bool verbose, void[] fileData)
|
||||
{
|
||||
try
|
||||
{
|
||||
auto reader = new Reader(cast(ubyte[])fileData);
|
||||
while(reader.peek() != '\0') { reader.forward(); }
|
||||
}
|
||||
catch(ReaderException e)
|
||||
{
|
||||
if(verbose) { writeln(typeid(e).toString(), "\n", e); }
|
||||
return;
|
||||
}
|
||||
assert(false, "Expected an exception");
|
||||
}
|
||||
|
||||
|
||||
/// Stream error unittest. Tries to read invalid input files, expecting errors.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// errorFilename = File name to read from.
|
||||
void testStreamError(bool verbose, string errorFilename)
|
||||
{
|
||||
import std.file;
|
||||
runReader(verbose, std.file.read(errorFilename));
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Reader unittest");
|
||||
run("testStreamError", &testStreamError, ["stream-error"]);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
84
source/dyaml/testrepresenter.d
Normal file
84
source/dyaml/testrepresenter.d
Normal file
|
@ -0,0 +1,84 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testrepresenter;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.path;
|
||||
import std.exception;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.testcommon;
|
||||
import dyaml.testconstructor;
|
||||
|
||||
|
||||
/// Representer unittest.
|
||||
///
|
||||
/// Params: verbose = Print verbose output?
|
||||
/// codeFilename = File name to determine test case from.
|
||||
/// Nothing is read from this file, it only exists
|
||||
/// to specify that we need a matching unittest.
|
||||
void testRepresenterTypes(bool verbose, string codeFilename)
|
||||
{
|
||||
string baseName = codeFilename.baseName.stripExtension;
|
||||
enforce((baseName in dyaml.testconstructor.expected) !is null,
|
||||
new Exception("Unimplemented representer test: " ~ baseName));
|
||||
|
||||
Node[] expectedNodes = expected[baseName];
|
||||
foreach(encoding; [Encoding.UTF_8, Encoding.UTF_16, Encoding.UTF_32])
|
||||
{
|
||||
string output;
|
||||
Node[] readNodes;
|
||||
|
||||
scope(failure)
|
||||
{
|
||||
if(verbose)
|
||||
{
|
||||
writeln("Expected nodes:");
|
||||
foreach(ref n; expectedNodes){writeln(n.debugString, "\n---\n");}
|
||||
writeln("Read nodes:");
|
||||
foreach(ref n; readNodes){writeln(n.debugString, "\n---\n");}
|
||||
writeln("OUTPUT:\n", output);
|
||||
}
|
||||
}
|
||||
|
||||
auto emitStream = new MemoryStream;
|
||||
auto representer = new Representer;
|
||||
representer.addRepresenter!TestClass(&representClass);
|
||||
representer.addRepresenter!TestStruct(&representStruct);
|
||||
auto dumper = Dumper(emitStream);
|
||||
dumper.representer = representer;
|
||||
dumper.encoding = encoding;
|
||||
dumper.dump(expectedNodes);
|
||||
|
||||
output = cast(string)emitStream.data;
|
||||
auto constructor = new Constructor;
|
||||
constructor.addConstructorMapping("!tag1", &constructClass);
|
||||
constructor.addConstructorScalar("!tag2", &constructStruct);
|
||||
|
||||
auto loader = Loader(emitStream.data.dup);
|
||||
loader.name = "TEST";
|
||||
loader.constructor = constructor;
|
||||
readNodes = loader.loadAll();
|
||||
|
||||
assert(expectedNodes.length == readNodes.length);
|
||||
foreach(n; 0 .. expectedNodes.length)
|
||||
{
|
||||
assert(expectedNodes[n].equals!(No.useTag)(readNodes[n]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Representer unittest");
|
||||
run("testRepresenterTypes", &testRepresenterTypes, ["code"]);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
57
source/dyaml/testresolver.d
Normal file
57
source/dyaml/testresolver.d
Normal file
|
@ -0,0 +1,57 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testresolver;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.file;
|
||||
import std.string;
|
||||
|
||||
import dyaml.testcommon;
|
||||
|
||||
|
||||
/**
|
||||
* Implicit tag resolution unittest.
|
||||
*
|
||||
* Params: verbose = Print verbose output?
|
||||
* dataFilename = File with unittest data.
|
||||
* detectFilename = Dummy filename used to specify which data filenames to use.
|
||||
*/
|
||||
void testImplicitResolver(bool verbose, string dataFilename, string detectFilename)
|
||||
{
|
||||
string correctTag;
|
||||
Node node;
|
||||
|
||||
scope(failure)
|
||||
{
|
||||
if(true)
|
||||
{
|
||||
writeln("Correct tag: ", correctTag);
|
||||
writeln("Node: ", node.debugString);
|
||||
}
|
||||
}
|
||||
|
||||
correctTag = readText(detectFilename).strip();
|
||||
node = Loader(dataFilename).load();
|
||||
assert(node.isSequence);
|
||||
foreach(ref Node scalar; node)
|
||||
{
|
||||
assert(scalar.isScalar);
|
||||
assert(scalar.tag == correctTag);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Resolver unittest");
|
||||
run("testImplicitResolver", &testImplicitResolver, ["data", "detect"]);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
96
source/dyaml/testtokens.d
Normal file
96
source/dyaml/testtokens.d
Normal file
|
@ -0,0 +1,96 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testtokens;
|
||||
|
||||
|
||||
version(unittest)
|
||||
{
|
||||
|
||||
import std.array;
|
||||
import std.file;
|
||||
|
||||
import dyaml.testcommon;
|
||||
import dyaml.token;
|
||||
|
||||
|
||||
/**
|
||||
* Test tokens output by scanner.
|
||||
*
|
||||
* Params: verbose = Print verbose output?
|
||||
* dataFilename = File to scan.
|
||||
* tokensFilename = File containing expected tokens.
|
||||
*/
|
||||
void testTokens(bool verbose, string dataFilename, string tokensFilename)
|
||||
{
|
||||
//representations of YAML tokens in tokens file.
|
||||
auto replace = [TokenID.Directive : "%" ,
|
||||
TokenID.DocumentStart : "---" ,
|
||||
TokenID.DocumentEnd : "..." ,
|
||||
TokenID.Alias : "*" ,
|
||||
TokenID.Anchor : "&" ,
|
||||
TokenID.Tag : "!" ,
|
||||
TokenID.Scalar : "_" ,
|
||||
TokenID.BlockSequenceStart : "[[" ,
|
||||
TokenID.BlockMappingStart : "{{" ,
|
||||
TokenID.BlockEnd : "]}" ,
|
||||
TokenID.FlowSequenceStart : "[" ,
|
||||
TokenID.FlowSequenceEnd : "]" ,
|
||||
TokenID.FlowMappingStart : "{" ,
|
||||
TokenID.FlowMappingEnd : "}" ,
|
||||
TokenID.BlockEntry : "," ,
|
||||
TokenID.FlowEntry : "," ,
|
||||
TokenID.Key : "?" ,
|
||||
TokenID.Value : ":" ];
|
||||
|
||||
string[] tokens1;
|
||||
string[] tokens2 = readText(tokensFilename).split();
|
||||
scope(exit)
|
||||
{
|
||||
if(verbose){writeln("tokens1: ", tokens1, "\ntokens2: ", tokens2);}
|
||||
}
|
||||
|
||||
auto loader = Loader(dataFilename);
|
||||
foreach(token; loader.scan())
|
||||
{
|
||||
if(token.id != TokenID.StreamStart && token.id != TokenID.StreamEnd)
|
||||
{
|
||||
tokens1 ~= replace[token.id];
|
||||
}
|
||||
}
|
||||
|
||||
assert(tokens1 == tokens2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test scanner by scanning a file, expecting no errors.
|
||||
*
|
||||
* Params: verbose = Print verbose output?
|
||||
* dataFilename = File to scan.
|
||||
* canonicalFilename = Another file to scan, in canonical YAML format.
|
||||
*/
|
||||
void testScanner(bool verbose, string dataFilename, string canonicalFilename)
|
||||
{
|
||||
foreach(filename; [dataFilename, canonicalFilename])
|
||||
{
|
||||
string[] tokens;
|
||||
scope(exit)
|
||||
{
|
||||
if(verbose){writeln(tokens);}
|
||||
}
|
||||
auto loader = Loader(filename);
|
||||
foreach(ref token; loader.scan()){tokens ~= to!string(token.id);}
|
||||
}
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML tokens unittest");
|
||||
run("testTokens", &testTokens, ["data", "tokens"]);
|
||||
run("testScanner", &testScanner, ["data", "canonical"]);
|
||||
}
|
||||
|
||||
} // version(unittest)
|
Loading…
Add table
Add a link
Reference in a new issue