Most of emitter work done. There are some more changes left until 0.2,
and the turorials and documentation still need some updating.
This commit is contained in:
parent
967fe8c48b
commit
934df763ad
0
autoddoc.py
Executable file → Normal file
0
autoddoc.py
Executable file → Normal file
17
cdc.d
17
cdc.d
|
@ -291,16 +291,16 @@ static void compile(string[] paths, string[] options = null)
|
|||
enforceEx!CompileException(exists(src),
|
||||
"Source file/folder \"" ~ src ~ "\" does not exist.");
|
||||
//Directory of source or lib files
|
||||
if(isdir(src))
|
||||
if(isDir(src))
|
||||
{
|
||||
sources ~= scan(src, ".d");
|
||||
ddocs ~= scan(src, ".ddoc");
|
||||
libs ~= scan(src, lib_ext);
|
||||
}
|
||||
//File
|
||||
else if(isfile(src))
|
||||
else if(isFile(src))
|
||||
{
|
||||
string ext = "." ~ src.getExt();
|
||||
string ext = src.extension();
|
||||
if(ext == ".d"){sources ~= src;}
|
||||
else if(ext == lib_ext){libs ~= src;}
|
||||
}
|
||||
|
@ -413,7 +413,7 @@ static void compile(string[] paths, string[] options = null)
|
|||
{
|
||||
foreach(src; sources)
|
||||
{
|
||||
if(src.getExt != "d"){continue;}
|
||||
if(src.extension != ".d"){continue;}
|
||||
|
||||
string html = src[0 .. $ - 2] ~ ".html";
|
||||
string dest = replace(replace(html, "/", "."), "\\", ".");
|
||||
|
@ -441,7 +441,7 @@ static void compile(string[] paths, string[] options = null)
|
|||
foreach(ext; obj_ext)
|
||||
{
|
||||
//Delete object files with same name as output file that dmd sometimes leaves.
|
||||
try{remove(addExt(co.out_file, ext));}
|
||||
try{remove(co.out_file.setExtension(ext));}
|
||||
catch(FileException e){continue;}
|
||||
}
|
||||
}
|
||||
|
@ -597,7 +597,10 @@ struct CompileOptions
|
|||
}
|
||||
|
||||
///Thrown at errors in execution of other processes (e.g. compiler commands).
|
||||
class CompileException : Exception {this(in string message){super(message);}};
|
||||
class CompileException : Exception
|
||||
{
|
||||
this(in string message, in string file, in size_t line){super(message, file, line);}
|
||||
};
|
||||
|
||||
/**
|
||||
* Wrapper around execute to write compile options to a file to get around max arg lenghts on Windows.
|
||||
|
@ -665,7 +668,7 @@ string[] scan(in string directory, string extensions ...)
|
|||
string[] result;
|
||||
foreach(string name; dirEntries(directory, SpanMode.depth))
|
||||
{
|
||||
if(isfile(name) && endsWith(name, extensions)){result ~= name;}
|
||||
if(isFile(name) && endsWith(name, extensions)){result ~= name;}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
30
dyaml/anchor.d
Normal file
30
dyaml/anchor.d
Normal file
|
@ -0,0 +1,30 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///YAML anchor.
|
||||
module dyaml.anchor;
|
||||
|
||||
import dyaml.sharedobject;
|
||||
|
||||
|
||||
///YAML anchor (reference) struct. Encapsulates an anchor to save memory.
|
||||
struct Anchor
|
||||
{
|
||||
public:
|
||||
mixin SharedObject!(string, Anchor);
|
||||
|
||||
///Construct an anchor from a string representation.
|
||||
this(string anchor)
|
||||
{
|
||||
if(anchor is null || anchor == "")
|
||||
{
|
||||
index_ = uint.max;
|
||||
return;
|
||||
}
|
||||
|
||||
add(anchor);
|
||||
}
|
||||
}
|
|
@ -15,6 +15,7 @@ import std.conv;
|
|||
import std.exception;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.anchor;
|
||||
import dyaml.constructor;
|
||||
import dyaml.event;
|
||||
import dyaml.exception;
|
||||
|
@ -50,7 +51,7 @@ final class Composer
|
|||
///Constructor constructing YAML values.
|
||||
Constructor constructor_;
|
||||
///Nodes associated with anchors. Used by YAML aliases.
|
||||
Node[string] anchors_;
|
||||
Node[Anchor] anchors_;
|
||||
|
||||
public:
|
||||
/**
|
||||
|
@ -140,7 +141,7 @@ final class Composer
|
|||
parser_.getEvent();
|
||||
|
||||
//Clear anchors.
|
||||
Node[string] empty;
|
||||
Node[Anchor] empty;
|
||||
anchors_ = empty;
|
||||
return node;
|
||||
}
|
||||
|
@ -150,33 +151,37 @@ final class Composer
|
|||
{
|
||||
if(parser_.checkEvent(EventID.Alias))
|
||||
{
|
||||
Event event = parser_.getEvent();
|
||||
immutable event = parser_.getEvent();
|
||||
const anchor = event.anchor;
|
||||
enforce((anchor in anchors_) !is null,
|
||||
new ComposerException("Found undefined alias: " ~ anchor,
|
||||
new ComposerException("Found undefined alias: " ~ anchor.get,
|
||||
event.startMark));
|
||||
|
||||
//If the node referenced by the anchor is uninitialized,
|
||||
//it's not finished, i.e. we're currently composing it
|
||||
//and trying to use it recursively here.
|
||||
enforce(anchors_[anchor] != Node(),
|
||||
new ComposerException("Found recursive alias: " ~ anchor,
|
||||
new ComposerException("Found recursive alias: " ~ anchor.get,
|
||||
event.startMark));
|
||||
|
||||
return anchors_[anchor];
|
||||
}
|
||||
|
||||
Event event = parser_.peekEvent();
|
||||
immutable event = parser_.peekEvent();
|
||||
const anchor = event.anchor;
|
||||
if(anchor !is null && (anchor in anchors_) !is null)
|
||||
if(!anchor.isNull() && (anchor in anchors_) !is null)
|
||||
{
|
||||
throw new ComposerException("Found duplicate anchor: " ~ anchor,
|
||||
throw new ComposerException("Found duplicate anchor: " ~ anchor.get,
|
||||
event.startMark);
|
||||
}
|
||||
|
||||
Node result;
|
||||
//Associate the anchor, if any, with an uninitialized node.
|
||||
//used to detect duplicate and recursive anchors.
|
||||
if(anchor !is null){anchors_[anchor] = Node();}
|
||||
if(!anchor.isNull())
|
||||
{
|
||||
anchors_[anchor] = Node();
|
||||
}
|
||||
|
||||
if(parser_.checkEvent(EventID.Scalar))
|
||||
{
|
||||
|
@ -192,14 +197,17 @@ final class Composer
|
|||
}
|
||||
else{assert(false, "This code should never be reached");}
|
||||
|
||||
if(anchor !is null){anchors_[anchor] = result;}
|
||||
if(!anchor.isNull())
|
||||
{
|
||||
anchors_[anchor] = result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
///Compose a scalar node.
|
||||
Node composeScalarNode()
|
||||
{
|
||||
Event event = parser_.getEvent();
|
||||
immutable event = parser_.getEvent();
|
||||
const tag = resolver_.resolve(NodeID.Scalar, event.tag, event.value,
|
||||
event.implicit);
|
||||
|
||||
|
@ -212,7 +220,7 @@ final class Composer
|
|||
///Compose a sequence node.
|
||||
Node composeSequenceNode()
|
||||
{
|
||||
Event startEvent = parser_.getEvent();
|
||||
immutable startEvent = parser_.getEvent();
|
||||
const tag = resolver_.resolve(NodeID.Sequence, startEvent.tag, null,
|
||||
startEvent.implicit);
|
||||
|
||||
|
@ -265,7 +273,7 @@ final class Composer
|
|||
enforce(node.isType!(Node.Pair[]),
|
||||
new ConstructorException("While constructing a mapping, " ~
|
||||
"expected a mapping for merging, but found"
|
||||
~ node.typeString() ~
|
||||
~ node.type.toString ~
|
||||
"NOTE: line/column shows topmost parent "
|
||||
"to which the content is being merged",
|
||||
startMark, endMark));
|
||||
|
@ -278,7 +286,7 @@ final class Composer
|
|||
throw new ConstructorException("While constructing a mapping, " ~
|
||||
"expected a mapping or a list of mappings for "
|
||||
"merging, but found: "
|
||||
~ root.typeString() ~
|
||||
~ root.type.toString ~
|
||||
"NOTE: line/column shows topmost parent "
|
||||
"to which the content is being merged",
|
||||
startMark, endMark);
|
||||
|
@ -290,7 +298,7 @@ final class Composer
|
|||
///Compose a mapping node.
|
||||
Node composeMappingNode()
|
||||
{
|
||||
Event startEvent = parser_.getEvent();
|
||||
immutable startEvent = parser_.getEvent();
|
||||
const tag = resolver_.resolve(NodeID.Mapping, startEvent.tag, null,
|
||||
startEvent.implicit);
|
||||
|
||||
|
|
|
@ -125,6 +125,8 @@ final class Constructor
|
|||
* an array of Nodes (from sequence) or an array of Node.Pair (from mapping).
|
||||
* The value returned by this function will be stored in the resulring node.
|
||||
*
|
||||
* Only one constructor function can be set for one tag.
|
||||
*
|
||||
* Params: tag = Tag for the function to handle.
|
||||
* ctor = Constructor function.
|
||||
*/
|
||||
|
@ -182,7 +184,7 @@ final class Constructor
|
|||
{
|
||||
enforce((tag in fromScalar_) !is null,
|
||||
new ConstructorException("Could not determine a constructor from "
|
||||
"scalar for tag " ~ tag.toString(), start, end));
|
||||
"scalar for tag " ~ tag.get(), start, end));
|
||||
return Node.rawNode(fromScalar_[tag](start, end, value), start, tag);
|
||||
}
|
||||
|
||||
|
@ -200,7 +202,7 @@ final class Constructor
|
|||
{
|
||||
enforce((tag in fromSequence_) !is null,
|
||||
new ConstructorException("Could not determine a constructor from "
|
||||
"sequence for tag " ~ tag.toString(), start, end));
|
||||
"sequence for tag " ~ tag.get(), start, end));
|
||||
return Node.rawNode(fromSequence_[tag](start, end, value), start, tag);
|
||||
}
|
||||
|
||||
|
@ -218,7 +220,7 @@ final class Constructor
|
|||
{
|
||||
enforce((tag in fromMapping_) !is null,
|
||||
new ConstructorException("Could not determine a constructor from "
|
||||
"mapping for tag " ~ tag.toString(), start, end));
|
||||
"mapping for tag " ~ tag.get(), start, end));
|
||||
return Node.rawNode(fromMapping_[tag](start, end, value), start, tag);
|
||||
}
|
||||
}
|
||||
|
@ -540,16 +542,16 @@ Node.Pair[] constructOrderedMap(Mark start, Mark end, Node[] nodes)
|
|||
{
|
||||
auto pairs = getPairs("ordered map", start, end, nodes);
|
||||
|
||||
//In future, the map here should be replaced with something with deterministic
|
||||
//TODO: the map here should be replaced with something with deterministic
|
||||
//memory allocation if possible.
|
||||
//Detect duplicates.
|
||||
Node[Node] map;
|
||||
bool[Node] map;
|
||||
foreach(ref pair; pairs)
|
||||
{
|
||||
enforce((pair.key in map) is null,
|
||||
new ConstructorException("Found a duplicate entry in an ordered map",
|
||||
start, end));
|
||||
map[pair.key] = pair.value;
|
||||
map[pair.key] = true;
|
||||
}
|
||||
clear(map);
|
||||
return pairs;
|
||||
|
@ -609,6 +611,7 @@ Node[] constructSet(Mark start, Mark end, Node.Pair[] pairs)
|
|||
//memory allocation if possible.
|
||||
//Detect duplicates.
|
||||
ubyte[Node] map;
|
||||
scope(exit){clear(map);}
|
||||
Node[] nodes;
|
||||
foreach(ref pair; pairs)
|
||||
{
|
||||
|
@ -618,7 +621,6 @@ Node[] constructSet(Mark start, Mark end, Node.Pair[] pairs)
|
|||
nodes ~= pair.key;
|
||||
}
|
||||
|
||||
clear(map);
|
||||
return nodes;
|
||||
}
|
||||
unittest
|
||||
|
@ -673,17 +675,16 @@ Node[] constructSequence(Mark start, Mark end, Node[] nodes)
|
|||
///Construct an unordered map (unordered set of key: value _pairs without duplicates) node.
|
||||
Node.Pair[] constructMap(Mark start, Mark end, Node.Pair[] pairs)
|
||||
{
|
||||
//In future, the map here should be replaced with something with deterministic
|
||||
//TODO: the map here should be replaced with something with deterministic
|
||||
//memory allocation if possible.
|
||||
//Detect duplicates.
|
||||
Node[Node] map;
|
||||
bool[Node] map;
|
||||
scope(exit){clear(map);}
|
||||
foreach(ref pair; pairs)
|
||||
{
|
||||
enforce((pair.key in map) is null,
|
||||
new ConstructorException("Found a duplicate entry in a map", start, end));
|
||||
map[pair.key] = pair.value;
|
||||
map[pair.key] = true;
|
||||
}
|
||||
|
||||
clear(map);
|
||||
return pairs;
|
||||
}
|
||||
|
|
336
dyaml/dumper.d
Normal file
336
dyaml/dumper.d
Normal file
|
@ -0,0 +1,336 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* YAML dumper.
|
||||
* Code based on PyYAML: http://www.pyyaml.org
|
||||
*/
|
||||
module dyaml.dumper;
|
||||
|
||||
|
||||
import std.stream;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.anchor;
|
||||
import dyaml.emitter;
|
||||
import dyaml.encoding;
|
||||
import dyaml.event;
|
||||
import dyaml.exception;
|
||||
import dyaml.linebreak;
|
||||
import dyaml.node;
|
||||
import dyaml.representer;
|
||||
import dyaml.resolver;
|
||||
import dyaml.serializer;
|
||||
import dyaml.tagdirectives;
|
||||
|
||||
|
||||
/**
|
||||
* Dumps YAML documents to files or streams.
|
||||
*
|
||||
* User specified Representer and/or Resolver can be used to support new
|
||||
* tags / data types.
|
||||
*
|
||||
* Setters are provided to affect the output (style, encoding)..
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* Write to a file:
|
||||
* --------------------
|
||||
* auto node = Node([1, 2, 3, 4, 5]);
|
||||
* Dumper("file.txt").dump(node);
|
||||
* --------------------
|
||||
*
|
||||
* Write multiple YAML documents to a file:
|
||||
* --------------------
|
||||
* auto node1 = Node([1, 2, 3, 4, 5]);
|
||||
* auto node2 = Node("This document contains only one string");
|
||||
* Dumper("file.txt").dump(node1, node2);
|
||||
* --------------------
|
||||
*
|
||||
* Write to memory:
|
||||
* --------------------
|
||||
* import std.stream;
|
||||
* auto stream = new MemoryStream();
|
||||
* auto node = Node([1, 2, 3, 4, 5]);
|
||||
* Dumper(stream).dump(node);
|
||||
* --------------------
|
||||
*
|
||||
* Use a custom representer/resolver to support custom data types and/or implicit tags:
|
||||
* --------------------
|
||||
* auto node = Node([1, 2, 3, 4, 5]);
|
||||
* auto representer = new Representer();
|
||||
* auto resolver = new Resolver();
|
||||
*
|
||||
* //Add representer functions / resolver expressions here...
|
||||
* --------------------
|
||||
* auto dumper = Dumper("file.txt");
|
||||
* dumper.representer = representer;
|
||||
* dumper.resolver = resolver;
|
||||
* dumper.dump(node);
|
||||
* --------------------
|
||||
*/
|
||||
struct Dumper
|
||||
{
|
||||
unittest
|
||||
{
|
||||
auto node = Node([1, 2, 3, 4, 5]);
|
||||
Dumper(new MemoryStream()).dump(node);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
auto node1 = Node([1, 2, 3, 4, 5]);
|
||||
auto node2 = Node("This document contains only one string");
|
||||
Dumper(new MemoryStream()).dump(node1, node2);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
import std.stream;
|
||||
auto stream = new MemoryStream();
|
||||
auto node = Node([1, 2, 3, 4, 5]);
|
||||
Dumper(stream).dump(node);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
auto node = Node([1, 2, 3, 4, 5]);
|
||||
auto representer = new Representer();
|
||||
auto resolver = new Resolver();
|
||||
auto dumper = Dumper(new MemoryStream());
|
||||
dumper.representer = representer;
|
||||
dumper.resolver = resolver;
|
||||
dumper.dump(node);
|
||||
}
|
||||
|
||||
private:
|
||||
///Resolver to resolve tags.
|
||||
Resolver resolver_;
|
||||
///Representer to represent data types.
|
||||
Representer representer_;
|
||||
|
||||
///Stream to write to.
|
||||
Stream stream_;
|
||||
|
||||
///Write scalars in canonical form?
|
||||
bool canonical_;
|
||||
///Indentation width.
|
||||
int indent_ = 2;
|
||||
///Preferred text width.
|
||||
uint textWidth_ = 80;
|
||||
///Line break to use.
|
||||
LineBreak lineBreak_ = LineBreak.Unix;
|
||||
///Character encoding to use.
|
||||
Encoding encoding_ = Encoding.UTF_8;
|
||||
///YAML version string.
|
||||
string YAMLVersion_ = "1.1";
|
||||
///Tag directives to use.
|
||||
TagDirectives tags_ = TagDirectives();
|
||||
///Always write document start?
|
||||
bool explicitStart_ = false;
|
||||
///Always write document end?
|
||||
bool explicitEnd_ = false;
|
||||
|
||||
public:
|
||||
@disable this();
|
||||
|
||||
/**
|
||||
* Construct a Dumper writing to a file.
|
||||
*
|
||||
* Params: filename = File name to write to.
|
||||
*
|
||||
* Throws: YAMLException if the file can not be dumped to (e.g. cannot be read).
|
||||
*/
|
||||
this(string filename)
|
||||
{
|
||||
try{this(new File(filename));}
|
||||
catch(StreamException e)
|
||||
{
|
||||
throw new YAMLException("Unable to use file for YAML dumping " ~ filename ~ " : " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
||||
///Construct a Dumper writing to a stream. This is useful to e.g. write to memory.
|
||||
this(Stream stream)
|
||||
{
|
||||
resolver_ = new Resolver();
|
||||
representer_ = new Representer();
|
||||
stream_ = stream;
|
||||
Anchor.addReference();
|
||||
TagDirectives.addReference();
|
||||
}
|
||||
|
||||
///Destroy the Dumper.
|
||||
~this()
|
||||
{
|
||||
Anchor.removeReference();
|
||||
TagDirectives.removeReference();
|
||||
YAMLVersion_ = null;
|
||||
}
|
||||
|
||||
///Specify custom Resolver to use.
|
||||
void resolver(Resolver resolver)
|
||||
{
|
||||
clear(resolver_);
|
||||
resolver_ = resolver;
|
||||
}
|
||||
|
||||
///Specify custom Representer to use.
|
||||
void representer(Representer representer)
|
||||
{
|
||||
clear(representer_);
|
||||
representer_ = representer;
|
||||
}
|
||||
|
||||
///Write scalars in canonical form?
|
||||
void canonical(in bool canonical)
|
||||
{
|
||||
canonical_ = canonical;
|
||||
}
|
||||
|
||||
///Set indentation width. 2 by default. Must not be zero.
|
||||
void indent(in uint indent)
|
||||
in
|
||||
{
|
||||
assert(indent != 0, "Can't use zero YAML indent width");
|
||||
}
|
||||
body
|
||||
{
|
||||
indent_ = indent;
|
||||
}
|
||||
|
||||
///Set preferred text width.
|
||||
void textWidth(in uint width)
|
||||
{
|
||||
textWidth_ = width;
|
||||
}
|
||||
|
||||
///Set line break to use. Unix by default.
|
||||
void lineBreak(in LineBreak lineBreak)
|
||||
{
|
||||
lineBreak_ = lineBreak;
|
||||
}
|
||||
|
||||
///Set character encoding to use. UTF-8 by default.
|
||||
void encoding(in Encoding encoding)
|
||||
{
|
||||
encoding_ = encoding;
|
||||
}
|
||||
|
||||
///Always explicitly write document start?
|
||||
void explicitStart(in bool explicit)
|
||||
{
|
||||
explicitStart_ = explicit;
|
||||
}
|
||||
|
||||
///Always explicitly write document end?
|
||||
void explicitEnd(in bool explicit)
|
||||
{
|
||||
explicitEnd_ = explicit;
|
||||
}
|
||||
|
||||
///Specify YAML version string. "1.1" by default.
|
||||
void YAMLVersion(in string YAMLVersion)
|
||||
{
|
||||
YAMLVersion_ = YAMLVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify tag directives.
|
||||
*
|
||||
* A tag directive specifies a shorthand notation for specifying tags.
|
||||
* Each tag directive associates a handle with a prefix. This allows for
|
||||
* compact tag notation.
|
||||
*
|
||||
* Each handle specified MUST start and end with a '!' character
|
||||
* (a single character "!" handle is allowed as well).
|
||||
*
|
||||
* Only alphanumeric characters, '-', and '_' may be used in handles.
|
||||
*
|
||||
* Each prefix MUST not be empty.
|
||||
*
|
||||
* The "!!" handle is used for default YAML tags with prefix
|
||||
* "tag:yaml.org,2002:". This can be overridden.
|
||||
*
|
||||
* Params: tags = Tag directives (keys are handles, values are prefixes).
|
||||
*
|
||||
* Example:
|
||||
* --------------------
|
||||
* Dumper dumper = Dumper("file.txt");
|
||||
* //This will emit tags starting with "tag:long.org,2011"
|
||||
* //with a "!short!" prefix instead.
|
||||
* dumper.tags("short", "tag:long.org,2011:");
|
||||
* dumper.dump(Node("foo"));
|
||||
* --------------------
|
||||
*/
|
||||
void tagDirectives(string[string] tags)
|
||||
{
|
||||
Tuple!(string, string)[] t;
|
||||
foreach(handle, prefix; tags)
|
||||
{
|
||||
assert(handle.length >= 1 && handle[0] == '!' && handle[$ - 1] == '!',
|
||||
"A tag handle is empty or does not start and end with a "
|
||||
"'!' character : " ~ handle);
|
||||
assert(prefix.length >= 1, "A tag prefix is empty");
|
||||
t ~= tuple(handle, prefix);
|
||||
}
|
||||
tags_ = TagDirectives(t);
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump one or more YAML documents to the file/stream.
|
||||
*
|
||||
* Note that while you can call dump() multiple times on the same
|
||||
* dumper, you will end up writing multiple YAML "files" to the same
|
||||
* file/stream.
|
||||
*
|
||||
* Params: documents = Documents to dump (root nodes of the documents).
|
||||
*
|
||||
* Throws: YAMLException on error (e.g. invalid nodes,
|
||||
* unable to write to file/stream).
|
||||
*/
|
||||
void dump(Node[] documents ...)
|
||||
{
|
||||
try
|
||||
{
|
||||
auto emitter = Emitter(stream_, canonical_, indent_, textWidth_, lineBreak_);
|
||||
auto serializer = Serializer(emitter, resolver_, encoding_, explicitStart_,
|
||||
explicitEnd_, YAMLVersion_, tags_);
|
||||
foreach(ref document; documents)
|
||||
{
|
||||
representer_.represent(serializer, document);
|
||||
}
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to dump YAML: " ~ e.msg);
|
||||
}
|
||||
}
|
||||
|
||||
package:
|
||||
/*
|
||||
* Emit specified events. Used for debugging/testing.
|
||||
*
|
||||
* Params: events = Events to emit.
|
||||
*
|
||||
* Throws: YAMLException if unable to emit.
|
||||
*/
|
||||
void emit(in Event[] events)
|
||||
{
|
||||
try
|
||||
{
|
||||
auto emitter = Emitter(stream_, canonical_, indent_, textWidth_, lineBreak_);
|
||||
foreach(ref event; events)
|
||||
{
|
||||
emitter.emit(event);
|
||||
}
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
throw new YAMLException("Unable to emit YAML: " ~ e.msg);
|
||||
}
|
||||
}
|
||||
}
|
1657
dyaml/emitter.d
Normal file
1657
dyaml/emitter.d
Normal file
File diff suppressed because it is too large
Load diff
16
dyaml/encoding.d
Normal file
16
dyaml/encoding.d
Normal file
|
@ -0,0 +1,16 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.encoding;
|
||||
|
||||
|
||||
///Text encodings.
|
||||
enum Encoding : ubyte
|
||||
{
|
||||
UTF_8,
|
||||
UTF_16,
|
||||
UTF_32
|
||||
}
|
|
@ -14,9 +14,12 @@ import std.array;
|
|||
import std.conv;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.anchor;
|
||||
import dyaml.encoding;
|
||||
import dyaml.exception;
|
||||
import dyaml.reader;
|
||||
import dyaml.tag;
|
||||
import dyaml.tagdirectives;
|
||||
import dyaml.token;
|
||||
|
||||
|
||||
|
@ -42,30 +45,44 @@ enum EventID : ubyte
|
|||
*
|
||||
* 48 bytes on 64bit.
|
||||
*/
|
||||
immutable struct Event
|
||||
struct Event
|
||||
{
|
||||
///Value of the event, if any.
|
||||
string value;
|
||||
///Start position of the event in file/stream.
|
||||
Mark startMark;
|
||||
///End position of the event in file/stream.
|
||||
Mark endMark;
|
||||
///Anchor of the event, if any.
|
||||
string anchor;
|
||||
///Value of the event, if any.
|
||||
string value;
|
||||
Anchor anchor;
|
||||
///Tag of the event, if any.
|
||||
Tag tag;
|
||||
///Event type.
|
||||
EventID id;
|
||||
EventID id = EventID.Invalid;
|
||||
///Style of scalar event, if this is a scalar event.
|
||||
ScalarStyle style;
|
||||
ScalarStyle scalarStyle;
|
||||
///Should the tag be implicitly resolved?
|
||||
bool implicit;
|
||||
///TODO figure this out - Unknown, used by PyYAML with Scalar events.
|
||||
bool implicit_2;
|
||||
/**
|
||||
* Is this document event explicit?
|
||||
*
|
||||
* Used if this is a DocumentStart or DocumentEnd.
|
||||
*/
|
||||
alias implicit explicitDocument;
|
||||
///Tag directives, if this is a DocumentStart.
|
||||
TagDirectives tagDirectives;
|
||||
///Encoding of the stream, if this is a StreamStart.
|
||||
Encoding encoding;
|
||||
///Collection style, if this is a SequenceStart or MappingStart.
|
||||
CollectionStyle collectionStyle;
|
||||
|
||||
///Is this a null (uninitialized) event?
|
||||
@property bool isNull() const
|
||||
{
|
||||
return id == EventID.Invalid;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -75,9 +92,9 @@ immutable struct Event
|
|||
* end = End position of the event in the file/stream.
|
||||
* anchor = Anchor, if this is an alias event.
|
||||
*/
|
||||
Event event(EventID id)(in Mark start, in Mark end, in string anchor = null) pure
|
||||
Event event(EventID id)(in Mark start, in Mark end, in Anchor anchor = Anchor()) pure
|
||||
{
|
||||
return Event(start, end, anchor, null, Tag(), id);
|
||||
return Event(null, start, end, anchor, Tag(), id);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -89,16 +106,30 @@ Event event(EventID id)(in Mark start, in Mark end, in string anchor = null) pur
|
|||
* tag = Tag of the sequence, if specified.
|
||||
* implicit = Should the tag be implicitly resolved?
|
||||
*/
|
||||
Event collectionStartEvent(EventID id)(in Mark start, in Mark end, in string anchor,
|
||||
in Tag tag, in bool implicit)
|
||||
Event collectionStartEvent(EventID id)(in Mark start, in Mark end, in Anchor anchor,
|
||||
in Tag tag, in bool implicit,
|
||||
in CollectionStyle style)
|
||||
{
|
||||
static assert(id == EventID.SequenceStart || id == EventID.SequenceEnd ||
|
||||
id == EventID.MappingStart || id == EventID.MappingEnd);
|
||||
return Event(start, end, anchor, null, tag, id, ScalarStyle.Invalid, implicit);
|
||||
return Event(null, start, end, anchor, tag, id, ScalarStyle.Invalid, implicit,
|
||||
false, TagDirectives(), Encoding.UTF_8, style);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a stream start event.
|
||||
*
|
||||
* Params: start = Start position of the event in the file/stream.
|
||||
* end = End position of the event in the file/stream.
|
||||
* encoding = Encoding of the stream.
|
||||
*/
|
||||
Event streamStartEvent(in Mark start, in Mark end, Encoding encoding)
|
||||
{
|
||||
return Event(null, start, end, Anchor(), Tag(), EventID.StreamStart,
|
||||
ScalarStyle.Invalid, false, false, TagDirectives(), encoding);
|
||||
}
|
||||
|
||||
///Aliases for simple events.
|
||||
alias event!(EventID.StreamStart) streamStartEvent;
|
||||
alias event!(EventID.StreamEnd) streamEndEvent;
|
||||
alias event!(EventID.Alias) aliasEvent;
|
||||
alias event!(EventID.SequenceEnd) sequenceEndEvent;
|
||||
|
@ -111,15 +142,17 @@ alias collectionStartEvent!(EventID.MappingStart) mappingStartEvent;
|
|||
/**
|
||||
* Construct a document start event.
|
||||
*
|
||||
* Params: start = Start position of the event in the file/stream.
|
||||
* end = End position of the event in the file/stream.
|
||||
* explicit = Is this an explicit document start?
|
||||
* YAMLVersion = YAML version string of the document.
|
||||
* Params: start = Start position of the event in the file/stream.
|
||||
* end = End position of the event in the file/stream.
|
||||
* explicit = Is this an explicit document start?
|
||||
* YAMLVersion = YAML version string of the document.
|
||||
* tagDirectives = Tag directives of the document.
|
||||
*/
|
||||
Event documentStartEvent(Mark start, Mark end, bool explicit, string YAMLVersion) pure
|
||||
Event documentStartEvent(Mark start, Mark end, bool explicit, string YAMLVersion,
|
||||
TagDirectives tagDirectives)
|
||||
{
|
||||
return Event(start, end, null, YAMLVersion, Tag(), EventID.DocumentStart,
|
||||
ScalarStyle.Invalid, explicit);
|
||||
return Event(YAMLVersion, start, end, Anchor(), Tag(), EventID.DocumentStart,
|
||||
ScalarStyle.Invalid, explicit, false, tagDirectives);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -131,7 +164,7 @@ Event documentStartEvent(Mark start, Mark end, bool explicit, string YAMLVersion
|
|||
*/
|
||||
Event documentEndEvent(Mark start, Mark end, bool explicit)
|
||||
{
|
||||
return Event(start, end, null, null, Tag(), EventID.DocumentEnd,
|
||||
return Event(null, start, end, Anchor(), Tag(), EventID.DocumentEnd,
|
||||
ScalarStyle.Invalid, explicit);
|
||||
}
|
||||
|
||||
|
@ -146,9 +179,10 @@ Event documentEndEvent(Mark start, Mark end, bool explicit)
|
|||
* value = String value of the scalar.
|
||||
* style = Scalar style.
|
||||
*/
|
||||
Event scalarEvent(in Mark start, in Mark end, in string anchor, in Tag tag,
|
||||
in bool implicit, in string value,
|
||||
Event scalarEvent(in Mark start, in Mark end, in Anchor anchor, in Tag tag,
|
||||
in bool[2] implicit, in string value,
|
||||
in ScalarStyle style = ScalarStyle.Invalid)
|
||||
{
|
||||
return Event(start, end, anchor, value, tag, EventID.Scalar, style, implicit);
|
||||
return Event(value, start, end, anchor, tag, EventID.Scalar, style, implicit[0],
|
||||
implicit[1]);
|
||||
}
|
||||
|
|
89
dyaml/flags.d
Normal file
89
dyaml/flags.d
Normal file
|
@ -0,0 +1,89 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///Compact storage of multiple boolean values.
|
||||
module dyaml.flags;
|
||||
|
||||
|
||||
import std.conv;
|
||||
|
||||
|
||||
package:
|
||||
|
||||
/**
|
||||
* Struct holding multiple named boolean values in a single byte.
|
||||
*
|
||||
* Can hold at most 8 values.
|
||||
*
|
||||
* Example:
|
||||
* --------------------
|
||||
* Flags!("empty", "multiline") flags;
|
||||
* assert(flags.empty == false && flags.multiline == false);
|
||||
* flags.multiline = true;
|
||||
* assert(flags.empty == false && flags.multiline == true);
|
||||
* flags.empty = true;
|
||||
* assert(flags.empty == true && flags.multiline == true);
|
||||
* flags.multiline = false;
|
||||
* assert(flags.empty == true && flags.multiline == false);
|
||||
* flags.empty = false;
|
||||
* assert(flags.empty == false && flags.multiline == false);
|
||||
* --------------------
|
||||
*/
|
||||
struct Flags(names ...) if(names.length <= 8)
|
||||
{
|
||||
private:
|
||||
///Byte storing the flags.
|
||||
ubyte flags_;
|
||||
|
||||
///Generate a setter and a getter for each flag.
|
||||
static string flags(string[] names ...)
|
||||
in
|
||||
{
|
||||
assert(names.length <= 8, "Flags struct can only hold 8 flags");
|
||||
}
|
||||
body
|
||||
{
|
||||
string result;
|
||||
foreach(index, name; names)
|
||||
{
|
||||
string istr = to!string(index);
|
||||
result ~= "\n"
|
||||
"@property bool " ~ name ~ "(bool value)\n"
|
||||
"{\n"
|
||||
" flags_ = value ? flags_ | (1 <<" ~ istr ~ ")\n"
|
||||
" : flags_ & (0xFF ^ (1 << " ~ istr ~"));\n"
|
||||
" return value;\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"@property bool " ~ name ~ "() const pure\n"
|
||||
"{\n"
|
||||
" return (flags_ >> " ~ istr ~ ") & 1;\n"
|
||||
"}\n";
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public:
|
||||
///Flag accessors.
|
||||
mixin(flags(names));
|
||||
}
|
||||
unittest
|
||||
{
|
||||
import std.stdio;
|
||||
writeln("Flags unittest");
|
||||
|
||||
Flags!("empty", "multiline") flags;
|
||||
assert(flags.empty == false && flags.multiline == false);
|
||||
flags.multiline = true;
|
||||
assert(flags.empty == false && flags.multiline == true);
|
||||
flags.empty = true;
|
||||
assert(flags.empty == true && flags.multiline == true);
|
||||
flags.multiline = false;
|
||||
assert(flags.empty == true && flags.multiline == false);
|
||||
flags.empty = false;
|
||||
assert(flags.empty == false && flags.multiline == false);
|
||||
}
|
||||
|
28
dyaml/linebreak.d
Normal file
28
dyaml/linebreak.d
Normal file
|
@ -0,0 +1,28 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///Line break enum.
|
||||
module dyaml.linebreak;
|
||||
|
||||
|
||||
///Enumerates platform specific line breaks.
|
||||
enum LineBreak
|
||||
{
|
||||
Unix,
|
||||
Windows,
|
||||
Macintosh
|
||||
}
|
||||
|
||||
///Get line break string for specified line break.
|
||||
string lineBreak(in LineBreak b) pure
|
||||
{
|
||||
final switch(b)
|
||||
{
|
||||
case LineBreak.Unix: return "\n";
|
||||
case LineBreak.Windows: return "\r";
|
||||
case LineBreak.Macintosh: return "\r\n";
|
||||
}
|
||||
}
|
|
@ -13,16 +13,18 @@ module dyaml.loader;
|
|||
import std.exception;
|
||||
import std.stream;
|
||||
|
||||
import dyaml.event;
|
||||
import dyaml.node;
|
||||
import dyaml.anchor;
|
||||
import dyaml.composer;
|
||||
import dyaml.constructor;
|
||||
import dyaml.resolver;
|
||||
import dyaml.event;
|
||||
import dyaml.exception;
|
||||
import dyaml.node;
|
||||
import dyaml.parser;
|
||||
import dyaml.reader;
|
||||
import dyaml.resolver;
|
||||
import dyaml.scanner;
|
||||
import dyaml.tagdirectives;
|
||||
import dyaml.token;
|
||||
import dyaml.exception;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -119,7 +121,6 @@ Node[] loadAll(in string filename)
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Load all YAML documents from a stream.
|
||||
*
|
||||
|
@ -224,6 +225,8 @@ struct Loader
|
|||
constructor_ = constructor;
|
||||
composer_ = new Composer(parser_, resolver_, constructor_);
|
||||
name_ = name;
|
||||
Anchor.addReference();
|
||||
TagDirectives.addReference();
|
||||
}
|
||||
catch(YAMLException e)
|
||||
{
|
||||
|
@ -287,6 +290,8 @@ struct Loader
|
|||
///Destroy the Loader.
|
||||
~this()
|
||||
{
|
||||
Anchor.removeReference();
|
||||
TagDirectives.removeReference();
|
||||
clear(reader_);
|
||||
clear(scanner_);
|
||||
clear(parser_);
|
||||
|
|
238
dyaml/node.d
238
dyaml/node.d
|
@ -17,6 +17,7 @@ import std.datetime;
|
|||
import std.exception;
|
||||
import std.math;
|
||||
import std.stdio;
|
||||
import std.string;
|
||||
import std.traits;
|
||||
import std.typecons;
|
||||
import std.variant;
|
||||
|
@ -57,36 +58,52 @@ struct YAMLNull{}
|
|||
package struct YAMLMerge{}
|
||||
|
||||
///Base class for YAMLContainer - used for user defined YAML types.
|
||||
private abstract class YAMLObject
|
||||
package abstract class YAMLObject
|
||||
{
|
||||
protected:
|
||||
public:
|
||||
///Get type of the stored value.
|
||||
@property TypeInfo type() const;
|
||||
|
||||
protected:
|
||||
///Test for equality with another YAMLObject.
|
||||
bool equals(const YAMLObject rhs) const;
|
||||
}
|
||||
|
||||
//Stores a user defined YAML data type.
|
||||
private class YAMLContainer(T) : YAMLObject
|
||||
package class YAMLContainer(T) : YAMLObject
|
||||
{
|
||||
private:
|
||||
//Stored value.
|
||||
T value_;
|
||||
|
||||
//Construct a YAMLContainer holding specified value.
|
||||
this(T value){value_ = value;}
|
||||
|
||||
protected:
|
||||
public:
|
||||
//Get type of the stored value.
|
||||
@property override TypeInfo type() const {return typeid(T);}
|
||||
|
||||
//Get string representation of the container.
|
||||
override string toString()
|
||||
{
|
||||
static if(!hasMember!(T, "toString"))
|
||||
{
|
||||
return super.toString();
|
||||
}
|
||||
else
|
||||
{
|
||||
return format("YAMLContainer(", value_.toString(), ")");
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
//Test for equality with another YAMLObject.
|
||||
override bool equals(const YAMLObject rhs) const
|
||||
{
|
||||
if(rhs.type !is typeid(T)){return false;}
|
||||
return value_ == (cast(YAMLContainer)rhs).value_;
|
||||
}
|
||||
|
||||
private:
|
||||
//Construct a YAMLContainer holding specified value.
|
||||
this(T value){value_ = value;}
|
||||
}
|
||||
|
||||
|
||||
|
@ -165,10 +182,12 @@ struct Node
|
|||
* otherwise emitting will fail.
|
||||
*
|
||||
* Params: value = Value to store in the node.
|
||||
* tag = Tag override. If specified, the tag of the node
|
||||
* when emitted will be this tag, regardless of
|
||||
* what Representer determines. Can be used when a
|
||||
* single D data type needs to use multiple YAML tags.
|
||||
* tag = Overrides tag of the node when emitted, regardless
|
||||
* of tag determined by Representer. Representer uses
|
||||
* this to determine YAML data type when a D data type
|
||||
* maps to multiple different YAML data types. Tag must
|
||||
* be in full form, e.g. "tag:yaml.org,2002:omap", not
|
||||
* a shortcut, like "!!omap".
|
||||
*/
|
||||
this(T)(T value, in string tag = null) if (isSomeString!T ||
|
||||
(!isArray!T && !isAssociativeArray!T))
|
||||
|
@ -209,14 +228,23 @@ struct Node
|
|||
* node, and those nodes are stored.
|
||||
*
|
||||
* Params: array = Values to store in the node.
|
||||
* tag = Tag override. If specified, the tag of the node
|
||||
* when emitted will be this tag, regardless of
|
||||
* what Representer determines. Can be used when a
|
||||
* single D data type needs to use multiple YAML tags.
|
||||
* In particular, this can be used to differentiate
|
||||
* between YAML sequences (!!seq) and sets (!!set),
|
||||
* which both are internally represented as an array_
|
||||
* of nodes.
|
||||
* tag = Overrides tag of the node when emitted, regardless
|
||||
* of tag determined by Representer. Representer uses
|
||||
* this to determine YAML data type when a D data type
|
||||
* maps to multiple different YAML data types.
|
||||
* This is used to differentiate between YAML sequences
|
||||
* (!!seq) and sets (!!set), which both are internally
|
||||
* represented as an array_ of nodes. Tag must be in
|
||||
* full form, e.g. "tag:yaml.org,2002:set", not a
|
||||
* shortcut, like "!!set".
|
||||
*
|
||||
* Examples:
|
||||
* --------------------
|
||||
* //Will be emitted as a sequence (default for arrays)
|
||||
* auto seq = Node([1, 2, 3, 4, 5]);
|
||||
* //Will be emitted as a set (overriden tag)
|
||||
* auto set = Node([1, 2, 3, 4, 5], "tag:yaml.org,2002:set");
|
||||
* --------------------
|
||||
*/
|
||||
this(T)(T[] array, in string tag = null) if (!isSomeString!(T[]))
|
||||
{
|
||||
|
@ -226,6 +254,11 @@ struct Node
|
|||
{
|
||||
value_ = Value(array);
|
||||
}
|
||||
//Need to handle byte buffers separately
|
||||
else static if(is(T == byte) || is(T == ubyte))
|
||||
{
|
||||
value_ = Value(cast(ubyte[]) array);
|
||||
}
|
||||
else
|
||||
{
|
||||
Node[] nodes;
|
||||
|
@ -241,6 +274,11 @@ struct Node
|
|||
assert(length == 3);
|
||||
assert(opIndex(2).get!int == 3);
|
||||
}
|
||||
|
||||
//Will be emitted as a sequence (default for arrays)
|
||||
auto seq = Node([1, 2, 3, 4, 5]);
|
||||
//Will be emitted as a set (overriden tag)
|
||||
auto set = Node([1, 2, 3, 4, 5], "tag:yaml.org,2002:set");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -250,21 +288,33 @@ struct Node
|
|||
* directly. Otherwise they are converted to nodes and then stored.
|
||||
*
|
||||
* Params: array = Values to store in the node.
|
||||
* tag = Tag override. If specified, the tag of the node
|
||||
* when emitted will be this tag, regardless of
|
||||
* what Representer determines. Can be used when a
|
||||
* single D data type needs to use multiple YAML tags.
|
||||
* In particular, this can be used to differentiate
|
||||
* between YAML unordered maps, (!!map) ordered maps,
|
||||
* (!!omap) and pairs (!!pairs), which are all
|
||||
* internally represented as an array_ of node pairs.
|
||||
* tag = Overrides tag of the node when emitted, regardless
|
||||
* of tag determined by Representer. Representer uses
|
||||
* this to determine YAML data type when a D data type
|
||||
* maps to multiple different YAML data types.
|
||||
* This is used to differentiate between YAML unordered
|
||||
* mappings (!!map), ordered mappings (!!omap), and
|
||||
* pairs (!!pairs) which are all internally represented
|
||||
* as an array_ of node pairs. Tag must be in full
|
||||
* form, e.g. "tag:yaml.org,2002:omap", not a shortcut,
|
||||
* like "!!omap".
|
||||
*
|
||||
* Examples:
|
||||
* --------------------
|
||||
* //Will be emitted as an unordered mapping (default for mappings)
|
||||
* auto map = Node([1 : "a", 2 : "b"]);
|
||||
* //Will be emitted as an ordered map (overriden tag)
|
||||
* auto omap = Node([1 : "a", 2 : "b"], "tag:yaml.org,2002:omap");
|
||||
* //Will be emitted as pairs (overriden tag)
|
||||
* auto pairs = Node([1 : "a", 2 : "b"], "tag:yaml.org,2002:pairs");
|
||||
* --------------------
|
||||
*/
|
||||
this(K, V)(V[K] array, in string tag = null)
|
||||
{
|
||||
tag_ = Tag(tag);
|
||||
|
||||
Node.Pair[] pairs;
|
||||
foreach(ref key, ref value; array){pairs ~= Pair(key, value);}
|
||||
foreach(key, ref value; array){pairs ~= Pair(key, value);}
|
||||
value_ = Value(pairs);
|
||||
}
|
||||
unittest
|
||||
|
@ -278,13 +328,20 @@ struct Node
|
|||
assert(length == 2);
|
||||
assert(opIndex("2").get!int == 2);
|
||||
}
|
||||
|
||||
//Will be emitted as an unordered mapping (default for mappings)
|
||||
auto map = Node([1 : "a", 2 : "b"]);
|
||||
//Will be emitted as an ordered map (overriden tag)
|
||||
auto omap = Node([1 : "a", 2 : "b"], "tag:yaml.org,2002:omap");
|
||||
//Will be emitted as pairs (overriden tag)
|
||||
auto pairs = Node([1 : "a", 2 : "b"], "tag:yaml.org,2002:pairs");
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a node from arrays of keys_ and values_.
|
||||
*
|
||||
* Constructs a mapping node with key-value pairs from
|
||||
* keys and values, keeping their order. Useful when order
|
||||
* keys_ and values_, keeping their order. Useful when order
|
||||
* is important (ordered maps, pairs).
|
||||
*
|
||||
* keys and values must have equal length.
|
||||
|
@ -294,16 +351,29 @@ struct Node
|
|||
*
|
||||
* Params: keys = Keys of the mapping, from first to last pair.
|
||||
* values = Values of the mapping, from first to last pair.
|
||||
* tag = Tag override. If specified, the tag of the node
|
||||
* when emitted will be this tag, regardless of
|
||||
* what Representer determines. Can be used when a
|
||||
* single D data type needs to use multiple YAML tags.
|
||||
* In particular, this can be used to differentiate
|
||||
* between YAML unordered maps, (!!map) ordered maps,
|
||||
* (!!omap) and pairs (!!pairs), which are all
|
||||
* internally represented as an array_ of node pairs.
|
||||
* tag = Overrides tag of the node when emitted, regardless
|
||||
* of tag determined by Representer. Representer uses
|
||||
* this to determine YAML data type when a D data type
|
||||
* maps to multiple different YAML data types.
|
||||
* This is used to differentiate between YAML unordered
|
||||
* mappings (!!map), ordered mappings (!!omap), and
|
||||
* pairs (!!pairs) which are all internally represented
|
||||
* as an array_ of node pairs. Tag must be in full
|
||||
* form, e.g. "tag:yaml.org,2002:omap", not a shortcut,
|
||||
* like "!!omap".
|
||||
*
|
||||
* Examples:
|
||||
* --------------------
|
||||
* //Will be emitted as an unordered mapping (default for mappings)
|
||||
* auto map = Node([1, 2], ["a", "b"]);
|
||||
* //Will be emitted as an ordered map (overriden tag)
|
||||
* auto omap = Node([1, 2], ["a", "b"], "tag:yaml.org,2002:omap");
|
||||
* //Will be emitted as pairs (overriden tag)
|
||||
* auto pairs = Node([1, 2], ["a", "b"], "tag:yaml.org,2002:pairs");
|
||||
* --------------------
|
||||
*/
|
||||
this(K, V)(K[] keys, V[] values, in string tag = null)
|
||||
this(K, V)(K[] keys, V[] values, in string tag = null)
|
||||
if(!(isSomeString!(K[]) || isSomeString!(V[])))
|
||||
in
|
||||
{
|
||||
assert(keys.length == values.length,
|
||||
|
@ -326,6 +396,13 @@ struct Node
|
|||
assert(length == 2);
|
||||
assert(opIndex("2").get!int == 2);
|
||||
}
|
||||
|
||||
//Will be emitted as an unordered mapping (default for mappings)
|
||||
auto map = Node([1, 2], ["a", "b"]);
|
||||
//Will be emitted as an ordered map (overriden tag)
|
||||
auto omap = Node([1, 2], ["a", "b"], "tag:yaml.org,2002:omap");
|
||||
//Will be emitted as pairs (overriden tag)
|
||||
auto pairs = Node([1, 2], ["a", "b"], "tag:yaml.org,2002:pairs");
|
||||
}
|
||||
|
||||
///Is this node valid (initialized)?
|
||||
|
@ -442,6 +519,13 @@ struct Node
|
|||
//we're getting the default value.
|
||||
if(isMapping){return this["="].get!T;}
|
||||
|
||||
void throwUnexpectedType()
|
||||
{
|
||||
//Can't get the value.
|
||||
throw new NodeException("Node has unexpected type " ~ type.toString ~
|
||||
". Expected " ~ typeid(T).toString, startMark_);
|
||||
}
|
||||
|
||||
static if(isSomeString!T)
|
||||
{
|
||||
//Try to convert to string.
|
||||
|
@ -484,12 +568,14 @@ struct Node
|
|||
target = to!T(temp);
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
throwUnexpectedType();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
//Can't get the value.
|
||||
throw new NodeException("Node has unexpected type " ~ typeString ~
|
||||
". Expected " ~ typeid(T).toString, startMark_);
|
||||
throwUnexpectedType();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -553,15 +639,15 @@ struct Node
|
|||
|
||||
alias Node.Value Value;
|
||||
alias Node.Pair Pair;
|
||||
Node n1 = Node(Value(cast(long)11));
|
||||
Node n2 = Node(Value(cast(long)12));
|
||||
Node n3 = Node(Value(cast(long)13));
|
||||
Node n4 = Node(Value(cast(long)14));
|
||||
Node n1 = Node(cast(long)11);
|
||||
Node n2 = Node(cast(long)12);
|
||||
Node n3 = Node(cast(long)13);
|
||||
Node n4 = Node(cast(long)14);
|
||||
|
||||
Node k1 = Node(Value("11"));
|
||||
Node k2 = Node(Value("12"));
|
||||
Node k3 = Node(Value("13"));
|
||||
Node k4 = Node(Value("14"));
|
||||
Node k1 = Node("11");
|
||||
Node k2 = Node("12");
|
||||
Node k3 = Node("13");
|
||||
Node k4 = Node("14");
|
||||
|
||||
Node narray = Node(Value([n1, n2, n3, n4]));
|
||||
Node nmap = Node(Value([Pair(k1, n1),
|
||||
|
@ -631,6 +717,8 @@ struct Node
|
|||
}
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Node opIndexAssign unittest");
|
||||
|
||||
with(Node([1, 2, 3, 4, 3]))
|
||||
{
|
||||
opIndexAssign(42, 3);
|
||||
|
@ -825,6 +913,8 @@ struct Node
|
|||
}
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Node add unittest 1");
|
||||
|
||||
with(Node([1, 2, 3, 4]))
|
||||
{
|
||||
add(5.0f);
|
||||
|
@ -860,6 +950,7 @@ struct Node
|
|||
}
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Node add unittest 2");
|
||||
with(Node([1, 2], [3, 4]))
|
||||
{
|
||||
add(5, "6");
|
||||
|
@ -901,7 +992,7 @@ struct Node
|
|||
if(idx >= 0)
|
||||
{
|
||||
auto pairs = get!(Node.Pair[])();
|
||||
copy(pairs[idx + 1 .. $], pairs[idx .. $ - 1]);
|
||||
moveAll(pairs[idx + 1 .. $], pairs[idx .. $ - 1]);
|
||||
pairs.length = pairs.length - 1;
|
||||
value_ = Value(pairs);
|
||||
}
|
||||
|
@ -912,6 +1003,7 @@ struct Node
|
|||
}
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Node remove unittest");
|
||||
with(Node([1, 2, 3, 4, 3]))
|
||||
{
|
||||
remove(3);
|
||||
|
@ -954,7 +1046,7 @@ struct Node
|
|||
static if(isIntegral!T)
|
||||
{
|
||||
auto nodes = value_.get!(Node[]);
|
||||
copy(nodes[index + 1 .. $], nodes[index .. $ - 1]);
|
||||
moveAll(nodes[index + 1 .. $], nodes[index .. $ - 1]);
|
||||
nodes.length = nodes.length - 1;
|
||||
value_ = Value(nodes);
|
||||
return;
|
||||
|
@ -967,7 +1059,7 @@ struct Node
|
|||
if(idx >= 0)
|
||||
{
|
||||
auto pairs = get!(Node.Pair[])();
|
||||
copy(pairs[idx + 1 .. $], pairs[idx .. $ - 1]);
|
||||
moveAll(pairs[idx + 1 .. $], pairs[idx .. $ - 1]);
|
||||
pairs.length = pairs.length - 1;
|
||||
value_ = Value(pairs);
|
||||
}
|
||||
|
@ -978,6 +1070,7 @@ struct Node
|
|||
}
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Node removeAt unittest");
|
||||
with(Node([1, 2, 3, 4, 3]))
|
||||
{
|
||||
removeAt(3);
|
||||
|
@ -1034,6 +1127,7 @@ struct Node
|
|||
{
|
||||
auto seq1 = get!(Node[]);
|
||||
auto seq2 = rhs.get!(Node[]);
|
||||
if(seq1 is seq2){return true;}
|
||||
if(seq1.length != seq2.length){return false;}
|
||||
foreach(node; 0 .. seq1.length)
|
||||
{
|
||||
|
@ -1045,6 +1139,7 @@ struct Node
|
|||
{
|
||||
auto map1 = get!(Node.Pair[]);
|
||||
auto map2 = rhs.get!(Node.Pair[]);
|
||||
if(map1 is map2){return true;}
|
||||
if(map1.length != map2.length){return false;}
|
||||
foreach(pair; 0 .. map1.length)
|
||||
{
|
||||
|
@ -1064,10 +1159,17 @@ struct Node
|
|||
if(!rhs.isFloat){return false;}
|
||||
real r1 = get!real;
|
||||
real r2 = rhs.get!real;
|
||||
bool equals(real r1, real r2)
|
||||
{
|
||||
return r1 <= r2 + real.epsilon && r1 >= r2 - real.epsilon;
|
||||
}
|
||||
if(isNaN(r1)){return isNaN(r2);}
|
||||
return r1 == r2;
|
||||
return equals(r1, r2);
|
||||
}
|
||||
else
|
||||
{
|
||||
return value_ == rhs.value_;
|
||||
}
|
||||
else{return value_ == rhs.value_;}
|
||||
}
|
||||
assert(false, "Unknown kind of node");
|
||||
}
|
||||
|
@ -1115,7 +1217,7 @@ struct Node
|
|||
if(isScalar)
|
||||
{
|
||||
return indent ~ "scalar(" ~
|
||||
(convertsTo!string ? get!string : typeString) ~ ")\n";
|
||||
(convertsTo!string ? get!string : type.toString) ~ ")\n";
|
||||
}
|
||||
assert(false);
|
||||
}
|
||||
|
@ -1126,10 +1228,9 @@ struct Node
|
|||
return Value(cast(YAMLObject)new YAMLContainer!T(value));
|
||||
}
|
||||
|
||||
//Return string representation of the type of the node.
|
||||
@property string typeString() const {return to!string(value_.type);}
|
||||
//Get type of the node value (YAMLObject for user types).
|
||||
@property TypeInfo type() const {return value_.type;}
|
||||
|
||||
private:
|
||||
/*
|
||||
* Determine if the value stored by the node is of specified type.
|
||||
*
|
||||
|
@ -1137,12 +1238,22 @@ struct Node
|
|||
*/
|
||||
@property bool isType(T)() const {return value_.type is typeid(T);}
|
||||
|
||||
//Return tag of the node.
|
||||
@property Tag tag() const {return tag_;}
|
||||
|
||||
//Set tag of the node.
|
||||
@property void tag(Tag tag) {tag_ = tag;}
|
||||
|
||||
private:
|
||||
//Is the value an integer of some kind?
|
||||
alias isType!long isInt;
|
||||
|
||||
//Is the value a floating point number of some kind?
|
||||
alias isType!real isFloat;
|
||||
|
||||
//Is the value a string of some kind?
|
||||
alias isType!string isString;
|
||||
|
||||
//Does given node have the same type as this node?
|
||||
bool hasEqualType(ref Node node)
|
||||
{
|
||||
|
@ -1193,13 +1304,20 @@ struct Node
|
|||
}
|
||||
else
|
||||
{
|
||||
if(node.get!T == index){return idx;}
|
||||
try
|
||||
{
|
||||
if(node.get!T == index){return idx;}
|
||||
}
|
||||
catch(NodeException e)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
///Check if index is integral and in range.
|
||||
//Check if index is integral and in range.
|
||||
void checkSequenceIndex(T)(T index)
|
||||
{
|
||||
static if(!isIntegral!T)
|
||||
|
|
132
dyaml/parser.d
132
dyaml/parser.d
|
@ -14,12 +14,15 @@ module dyaml.parser;
|
|||
import std.array;
|
||||
import std.conv;
|
||||
import std.exception;
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.anchor;
|
||||
import dyaml.event;
|
||||
import dyaml.exception;
|
||||
import dyaml.scanner;
|
||||
import dyaml.token;
|
||||
import dyaml.tag;
|
||||
import dyaml.tagdirectives;
|
||||
|
||||
|
||||
package:
|
||||
|
@ -104,29 +107,24 @@ class ParserException : MarkedYAMLException
|
|||
///Generates events from tokens provided by a Scanner.
|
||||
final class Parser
|
||||
{
|
||||
invariant()
|
||||
{
|
||||
assert(currentEvent_.length <= 1);
|
||||
}
|
||||
|
||||
private:
|
||||
///Default tag handle shortcuts and replacements.
|
||||
static string[string] defaultTags_;
|
||||
static Tuple!(string, string)[] defaultTags_;
|
||||
static this()
|
||||
{
|
||||
defaultTags_ = ["!" : "!", "!!" : "tag:yaml.org,2002:"];
|
||||
defaultTags_ = [tuple("!", "!"), tuple("!!", "tag:yaml.org,2002:")];
|
||||
}
|
||||
|
||||
///Scanner providing YAML tokens.
|
||||
Scanner scanner_;
|
||||
|
||||
///Holds zero or one event.
|
||||
Event[] currentEvent_;
|
||||
///Event produced by the most recent state.
|
||||
Event currentEvent_;
|
||||
|
||||
///YAML version string.
|
||||
string YAMLVersion_ = null;
|
||||
///Tag handle shortcuts and replacements.
|
||||
string[string] tagHandles_;
|
||||
Tuple!(string, string)[] tagHandles_;
|
||||
|
||||
///Stack of states.
|
||||
Event delegate()[] states_;
|
||||
|
@ -147,7 +145,6 @@ final class Parser
|
|||
~this()
|
||||
{
|
||||
clear(currentEvent_);
|
||||
currentEvent_ = null;
|
||||
clear(tagHandles_);
|
||||
tagHandles_ = null;
|
||||
clear(states_);
|
||||
|
@ -170,17 +167,17 @@ final class Parser
|
|||
bool checkEvent(EventID[] ids...)
|
||||
{
|
||||
//Check if the next event is one of specified types.
|
||||
if(currentEvent_.empty && state_ !is null)
|
||||
if(currentEvent_.isNull && state_ !is null)
|
||||
{
|
||||
currentEvent_ ~= state_();
|
||||
currentEvent_ = state_();
|
||||
}
|
||||
|
||||
if(!currentEvent_.empty)
|
||||
if(!currentEvent_.isNull)
|
||||
{
|
||||
if(ids.length == 0){return true;}
|
||||
else
|
||||
{
|
||||
const nextId = currentEvent_.front.id;
|
||||
const nextId = currentEvent_.id;
|
||||
foreach(id; ids)
|
||||
{
|
||||
if(nextId == id){return true;}
|
||||
|
@ -198,11 +195,11 @@ final class Parser
|
|||
*/
|
||||
Event peekEvent()
|
||||
{
|
||||
if(currentEvent_.empty && state_ !is null)
|
||||
if(currentEvent_.isNull && state_ !is null)
|
||||
{
|
||||
currentEvent_ ~= state_();
|
||||
currentEvent_ = state_();
|
||||
}
|
||||
if(!currentEvent_.empty){return currentEvent_[0];}
|
||||
if(!currentEvent_.isNull){return currentEvent_;}
|
||||
assert(false, "No event left to peek");
|
||||
}
|
||||
|
||||
|
@ -214,15 +211,15 @@ final class Parser
|
|||
Event getEvent()
|
||||
{
|
||||
//Get the next event and proceed further.
|
||||
if(currentEvent_.empty && state_ !is null)
|
||||
if(currentEvent_.isNull && state_ !is null)
|
||||
{
|
||||
currentEvent_ ~= state_();
|
||||
currentEvent_ = state_();
|
||||
}
|
||||
|
||||
if(!currentEvent_.empty)
|
||||
if(!currentEvent_.isNull)
|
||||
{
|
||||
Event result = currentEvent_[0];
|
||||
currentEvent_.length = 0;
|
||||
immutable Event result = currentEvent_;
|
||||
clear(currentEvent_);
|
||||
return result;
|
||||
}
|
||||
assert(false, "No event left to get");
|
||||
|
@ -260,7 +257,7 @@ final class Parser
|
|||
{
|
||||
Token token = scanner_.getToken();
|
||||
state_ = &parseImplicitDocumentStart;
|
||||
return streamStartEvent(token.startMark, token.endMark);
|
||||
return streamStartEvent(token.startMark, token.endMark, token.encoding);
|
||||
}
|
||||
|
||||
///Parse implicit document start, unless explicit is detected: if so, parse explicit.
|
||||
|
@ -275,8 +272,8 @@ final class Parser
|
|||
|
||||
states_ ~= &parseDocumentEnd;
|
||||
state_ = &parseBlockNode;
|
||||
|
||||
return documentStartEvent(token.startMark, token.endMark, false, null);
|
||||
|
||||
return documentStartEvent(token.startMark, token.endMark, false, null, TagDirectives());
|
||||
}
|
||||
return parseDocumentStart();
|
||||
}
|
||||
|
@ -292,7 +289,7 @@ final class Parser
|
|||
{
|
||||
const startMark = scanner_.peekToken().startMark;
|
||||
|
||||
processDirectives();
|
||||
auto tagDirectives = processDirectives();
|
||||
enforce(scanner_.checkToken(TokenID.DocumentStart),
|
||||
new ParserException("Expected document start but found " ~
|
||||
to!string(scanner_.peekToken().id),
|
||||
|
@ -301,7 +298,7 @@ final class Parser
|
|||
const endMark = scanner_.getToken().endMark;
|
||||
states_ ~= &parseDocumentEnd;
|
||||
state_ = &parseDocumentContent;
|
||||
return documentStartEvent(startMark, endMark, true, YAMLVersion_);
|
||||
return documentStartEvent(startMark, endMark, true, YAMLVersion_, tagDirectives);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -339,19 +336,18 @@ final class Parser
|
|||
}
|
||||
|
||||
///Process directives at the beginning of a document.
|
||||
void processDirectives()
|
||||
TagDirectives processDirectives()
|
||||
{
|
||||
//Destroy version and tag handles from previous document.
|
||||
YAMLVersion_ = null;
|
||||
string[string] empty;
|
||||
tagHandles_ = empty;
|
||||
tagHandles_.length = 0;
|
||||
|
||||
//Process directives.
|
||||
while(scanner_.checkToken(TokenID.Directive))
|
||||
{
|
||||
Token token = scanner_.getToken();
|
||||
//Name and value are separated by '\0'.
|
||||
const parts = token.value.split("\0");
|
||||
auto parts = token.value.split("\0");
|
||||
const name = parts[0];
|
||||
if(name == "YAML")
|
||||
{
|
||||
|
@ -367,22 +363,38 @@ final class Parser
|
|||
else if(name == "TAG")
|
||||
{
|
||||
assert(parts.length == 3, "Tag directive stored incorrectly in a token");
|
||||
const handle = parts[1];
|
||||
auto handle = parts[1];
|
||||
|
||||
foreach(h, replacement; tagHandles_)
|
||||
foreach(ref pair; tagHandles_)
|
||||
{
|
||||
//handle
|
||||
auto h = pair[0];
|
||||
auto replacement = pair[1];
|
||||
enforce(h != handle, new ParserException("Duplicate tag handle: " ~
|
||||
handle, token.startMark));
|
||||
}
|
||||
tagHandles_[handle] = parts[2];
|
||||
tagHandles_ ~= tuple(handle, parts[2]);
|
||||
}
|
||||
}
|
||||
|
||||
TagDirectives value = tagHandles_.length == 0 ? TagDirectives() : TagDirectives(tagHandles_);
|
||||
|
||||
//Add any default tag handles that haven't been overridden.
|
||||
foreach(key, value; defaultTags_)
|
||||
foreach(ref defaultPair; defaultTags_)
|
||||
{
|
||||
if((key in tagHandles_) is null){tagHandles_[key] = value;}
|
||||
bool found = false;
|
||||
foreach(ref pair; tagHandles_)
|
||||
{
|
||||
if(defaultPair[0] == pair[0] )
|
||||
{
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(!found){tagHandles_ ~= defaultPair;}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -410,7 +422,7 @@ final class Parser
|
|||
{
|
||||
Token token = scanner_.getToken();
|
||||
state_ = popState();
|
||||
return aliasEvent(token.startMark, token.endMark, token.value);
|
||||
return aliasEvent(token.startMark, token.endMark, Anchor(token.value));
|
||||
}
|
||||
|
||||
string anchor = null;
|
||||
|
@ -448,47 +460,51 @@ final class Parser
|
|||
{
|
||||
state_ = &parseIndentlessSequenceEntry;
|
||||
return sequenceStartEvent(startMark, scanner_.peekToken().endMark,
|
||||
anchor, Tag(tag), implicit);
|
||||
Anchor(anchor), Tag(tag), implicit,
|
||||
CollectionStyle.Block);
|
||||
}
|
||||
|
||||
if(scanner_.checkToken(TokenID.Scalar))
|
||||
{
|
||||
Token token = scanner_.getToken();
|
||||
|
||||
//PyYAML uses a Tuple!(bool, bool) here, but the second bool
|
||||
//is never used after that - so we don't use it.
|
||||
implicit = (token.style == ScalarStyle.Plain && tag is null) || tag == "!";
|
||||
bool implicit_2 = (!implicit) && tag is null;
|
||||
state_ = popState();
|
||||
return scalarEvent(startMark, token.endMark, anchor, Tag(tag),
|
||||
implicit, token.value, token.style);
|
||||
return scalarEvent(startMark, token.endMark, Anchor(anchor), Tag(tag),
|
||||
[implicit, implicit_2], token.value, token.style);
|
||||
}
|
||||
|
||||
if(scanner_.checkToken(TokenID.FlowSequenceStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseFlowSequenceEntry!true;
|
||||
return sequenceStartEvent(startMark, endMark, anchor, Tag(tag), implicit);
|
||||
return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Flow);
|
||||
}
|
||||
|
||||
if(scanner_.checkToken(TokenID.FlowMappingStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseFlowMappingKey!true;
|
||||
return mappingStartEvent(startMark, endMark, anchor, Tag(tag), implicit);
|
||||
return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Flow);
|
||||
}
|
||||
|
||||
if(block && scanner_.checkToken(TokenID.BlockSequenceStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseBlockSequenceEntry!true;
|
||||
return sequenceStartEvent(startMark, endMark, anchor, Tag(tag), implicit);
|
||||
return sequenceStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Block);
|
||||
}
|
||||
|
||||
if(block && scanner_.checkToken(TokenID.BlockMappingStart))
|
||||
{
|
||||
endMark = scanner_.peekToken().endMark;
|
||||
state_ = &parseBlockMappingKey!true;
|
||||
return mappingStartEvent(startMark, endMark, anchor, Tag(tag), implicit);
|
||||
return mappingStartEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
implicit, CollectionStyle.Block);
|
||||
}
|
||||
|
||||
if(anchor != null || tag !is null)
|
||||
|
@ -499,7 +515,8 @@ final class Parser
|
|||
//but the second bool is never used after that - so we don't use it.
|
||||
|
||||
//Empty scalars are allowed even if a tag or an anchor is specified.
|
||||
return scalarEvent(startMark, endMark, anchor, Tag(tag), implicit , "");
|
||||
return scalarEvent(startMark, endMark, Anchor(anchor), Tag(tag),
|
||||
[implicit, false] , "");
|
||||
}
|
||||
|
||||
Token token = scanner_.peekToken();
|
||||
|
@ -525,11 +542,21 @@ final class Parser
|
|||
|
||||
if(handle.length > 0)
|
||||
{
|
||||
string replacement = null;
|
||||
foreach(ref pair; tagHandles_)
|
||||
{
|
||||
//pair[0] is handle, pair[1] replacement.
|
||||
if(pair[0] == handle)
|
||||
{
|
||||
replacement = pair[1];
|
||||
break;
|
||||
}
|
||||
}
|
||||
//handle must be in tagHandles_
|
||||
enforce((handle in tagHandles_) !is null,
|
||||
enforce(replacement !is null,
|
||||
new ParserException("While parsing a node", startMark,
|
||||
"found undefined tag handle: " ~ handle, tagMark));
|
||||
return tagHandles_[handle] ~ suffix;
|
||||
return replacement ~ suffix;
|
||||
}
|
||||
return suffix;
|
||||
}
|
||||
|
@ -699,7 +726,8 @@ final class Parser
|
|||
{
|
||||
Token token = scanner_.peekToken();
|
||||
state_ = &parseFlowSequenceEntryMappingKey;
|
||||
return mappingStartEvent(token.startMark, token.endMark, null, Tag(), true);
|
||||
return mappingStartEvent(token.startMark, token.endMark,
|
||||
Anchor(), Tag(), true, CollectionStyle.Flow);
|
||||
}
|
||||
else if(!scanner_.checkToken(TokenID.FlowSequenceEnd))
|
||||
{
|
||||
|
@ -838,6 +866,6 @@ final class Parser
|
|||
{
|
||||
//PyYAML uses a Tuple!(true, false) for the second last arg here,
|
||||
//but the second bool is never used after that - so we don't use it.
|
||||
return scalarEvent(mark, mark, null, Tag(), true, "");
|
||||
return scalarEvent(mark, mark, Anchor(), Tag(), [true, false], "");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import std.string;
|
|||
import std.system;
|
||||
import std.utf;
|
||||
|
||||
import dyaml.encoding;
|
||||
import dyaml.exception;
|
||||
|
||||
|
||||
|
@ -29,22 +30,10 @@ class ReaderException : YAMLException
|
|||
this(string msg){super("Error reading YAML stream: " ~ msg);}
|
||||
}
|
||||
|
||||
|
||||
///Reads data from a stream and converts it to UTF-32 (dchar) data.
|
||||
final class Reader
|
||||
{
|
||||
private:
|
||||
///Unicode encodings.
|
||||
enum UTF
|
||||
{
|
||||
///UTF-8.
|
||||
_8,
|
||||
///UTF-16.
|
||||
_16,
|
||||
///UTF-32.
|
||||
_32
|
||||
}
|
||||
|
||||
///Input stream.
|
||||
EndianStream stream_;
|
||||
///Buffer of currently loaded characters.
|
||||
|
@ -54,7 +43,7 @@ final class Reader
|
|||
///Index of the current character in the stream.
|
||||
size_t charIndex_ = 0;
|
||||
///Encoding of the input stream.
|
||||
UTF utf_= UTF._8;
|
||||
Encoding encoding_;
|
||||
///Current line in file.
|
||||
uint line_;
|
||||
///Current column in file.
|
||||
|
@ -92,7 +81,7 @@ final class Reader
|
|||
//handle files short enough not to have a BOM
|
||||
if(stream_.available < 2)
|
||||
{
|
||||
utf_ = UTF._8;
|
||||
encoding_ = Encoding.UTF_8;
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -106,10 +95,10 @@ final class Reader
|
|||
rawBuffer8_[1] = cast(char)(bytes / 256);
|
||||
rawUsed_ = 2;
|
||||
goto case 0;
|
||||
case 0: utf_ = UTF._8; break;
|
||||
case 0: encoding_ = Encoding.UTF_8; break;
|
||||
case 1, 2:
|
||||
//readBOM() eats two more bytes in this case so get them back
|
||||
utf_ = UTF._16;
|
||||
encoding_ = Encoding.UTF_16;
|
||||
rawBuffer16_[0] = stream_.getcw();
|
||||
rawUsed_ = 1;
|
||||
enforce(stream_.available % 2 == 0,
|
||||
|
@ -118,7 +107,7 @@ final class Reader
|
|||
case 3, 4:
|
||||
enforce(stream_.available % 4 == 0,
|
||||
new ReaderException("Number of bytes in an UTF-32 stream not divisible by 4"));
|
||||
utf_ = UTF._32;
|
||||
encoding_ = Encoding.UTF_32;
|
||||
break;
|
||||
default: assert(false, "Unknown UTF BOM");
|
||||
}
|
||||
|
@ -221,7 +210,7 @@ final class Reader
|
|||
++bufferOffset_;
|
||||
++charIndex_;
|
||||
//new line
|
||||
if(['\n', '\x85', '\u2028', '\u2029'].canFind(c) ||
|
||||
if(['\n', '\u0085', '\u2028', '\u2029'].canFind(c) ||
|
||||
(c == '\r' && buffer_[bufferOffset_] != '\n'))
|
||||
{
|
||||
++line_;
|
||||
|
@ -244,6 +233,9 @@ final class Reader
|
|||
///Get index of the current character in the stream.
|
||||
@property size_t charIndex() const {return charIndex_;}
|
||||
|
||||
///Get encoding of the input stream.
|
||||
@property Encoding encoding() const {return encoding_;}
|
||||
|
||||
private:
|
||||
/**
|
||||
* Update buffer to be able to read length characters after buffer offset.
|
||||
|
@ -308,9 +300,9 @@ final class Reader
|
|||
*/
|
||||
dchar getDChar(in size_t available)
|
||||
{
|
||||
switch(utf_)
|
||||
switch(encoding_)
|
||||
{
|
||||
case UTF._8:
|
||||
case Encoding.UTF_8:
|
||||
//Temp buffer for moving data in rawBuffer8_.
|
||||
char[bufferLength8_] temp;
|
||||
//Shortcut for ASCII.
|
||||
|
@ -341,7 +333,7 @@ final class Reader
|
|||
temp[0 .. rawUsed_] = rawBuffer8_[idx .. len];
|
||||
rawBuffer8_[0 .. rawUsed_] = temp[0 .. rawUsed_];
|
||||
return result;
|
||||
case UTF._16:
|
||||
case Encoding.UTF_16:
|
||||
//Temp buffer for moving data in rawBuffer8_.
|
||||
wchar[bufferLength16_] temp;
|
||||
//Words to read.
|
||||
|
@ -366,7 +358,7 @@ final class Reader
|
|||
temp[0 .. rawUsed_] = rawBuffer16_[idx .. len];
|
||||
rawBuffer16_[0 .. rawUsed_] = temp[0 .. rawUsed_];
|
||||
return result;
|
||||
case UTF._32:
|
||||
case Encoding.UTF_32:
|
||||
dchar result;
|
||||
stream_.read(result);
|
||||
return result;
|
||||
|
@ -407,7 +399,7 @@ final class Reader
|
|||
*
|
||||
* Returns: True if all the characters are printable, false otherwise.
|
||||
*/
|
||||
static pure bool printable(const ref dchar[] chars)
|
||||
static bool printable(const ref dchar[] chars)
|
||||
{
|
||||
foreach(c; chars)
|
||||
{
|
||||
|
@ -426,24 +418,24 @@ final class Reader
|
|||
@property bool done()
|
||||
{
|
||||
return (stream_.available == 0 &&
|
||||
((utf_ == UTF._8 && rawUsed_ == 0) ||
|
||||
(utf_ == UTF._16 && rawUsed_ == 0) ||
|
||||
utf_ == UTF._32));
|
||||
((encoding_ == Encoding.UTF_8 && rawUsed_ == 0) ||
|
||||
(encoding_ == Encoding.UTF_16 && rawUsed_ == 0) ||
|
||||
encoding_ == Encoding.UTF_32));
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML reader endian unittest");
|
||||
void endian_test(ubyte[] data, UTF utf_expected, Endian endian_expected)
|
||||
void endian_test(ubyte[] data, Encoding encoding_expected, Endian endian_expected)
|
||||
{
|
||||
auto reader = new Reader(new MemoryStream(data));
|
||||
assert(reader.utf_ == utf_expected);
|
||||
assert(reader.encoding_ == encoding_expected);
|
||||
assert(reader.stream_.endian == endian_expected);
|
||||
}
|
||||
ubyte[] little_endian_utf_16 = [0xFF, 0xFE, 0x7A, 0x00];
|
||||
ubyte[] big_endian_utf_16 = [0xFE, 0xFF, 0x00, 0x7A];
|
||||
endian_test(little_endian_utf_16, UTF._16, Endian.LittleEndian);
|
||||
endian_test(big_endian_utf_16, UTF._16, Endian.BigEndian);
|
||||
endian_test(little_endian_utf_16, Encoding.UTF_16, Endian.littleEndian);
|
||||
endian_test(big_endian_utf_16, Encoding.UTF_16, Endian.bigEndian);
|
||||
}
|
||||
unittest
|
||||
{
|
||||
|
@ -476,7 +468,7 @@ final class Reader
|
|||
assert(reader.peek(3) == 'a');
|
||||
}
|
||||
utf_test!char(to!(char[])(data), BOM.UTF8);
|
||||
utf_test!wchar(to!(wchar[])(data), endian == Endian.BigEndian ? BOM.UTF16BE : BOM.UTF16LE);
|
||||
utf_test(data, endian == Endian.BigEndian ? BOM.UTF32BE : BOM.UTF32LE);
|
||||
utf_test!wchar(to!(wchar[])(data), endian == Endian.bigEndian ? BOM.UTF16BE : BOM.UTF16LE);
|
||||
utf_test(data, endian == Endian.bigEndian ? BOM.UTF32BE : BOM.UTF32LE);
|
||||
}
|
||||
}
|
||||
|
|
541
dyaml/representer.d
Normal file
541
dyaml/representer.d
Normal file
|
@ -0,0 +1,541 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* YAML representer.
|
||||
* Code based on PyYAML: http://www.pyyaml.org
|
||||
*/
|
||||
module dyaml.representer;
|
||||
|
||||
|
||||
import std.algorithm;
|
||||
import std.array;
|
||||
import std.base64;
|
||||
import std.conv;
|
||||
import std.datetime;
|
||||
import std.exception;
|
||||
import std.format;
|
||||
import std.math;
|
||||
import std.stream;
|
||||
|
||||
import dyaml.exception;
|
||||
import dyaml.node;
|
||||
import dyaml.serializer;
|
||||
import dyaml.tag;
|
||||
|
||||
|
||||
///Exception thrown on Representer errors.
|
||||
class RepresenterException : YAMLException
|
||||
{
|
||||
public:
|
||||
///Construct an RepresenterException with specified message.
|
||||
this(string msg){super(msg);}
|
||||
}
|
||||
|
||||
///Used to represent YAML nodes various data types into scalar/sequence/mapping nodes ready for output.
|
||||
final class Representer
|
||||
{
|
||||
private:
|
||||
Node function(ref Node, Representer)[TypeInfo] representers_;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Construct a Representer.
|
||||
*
|
||||
* Params: useDefaultRepresenters = Use defualt representer functions
|
||||
* for default YAML types? This can be
|
||||
* disabled to use custom representer
|
||||
* functions for default types.
|
||||
*/
|
||||
this(bool useDefaultRepresenters = true)
|
||||
{
|
||||
addRepresenter!YAMLNull(&representNull);
|
||||
addRepresenter!string(&representString);
|
||||
addRepresenter!(ubyte[])(&representBytes);
|
||||
addRepresenter!bool(&representBool);
|
||||
addRepresenter!long(&representLong);
|
||||
addRepresenter!real(&representReal);
|
||||
addRepresenter!(Node[])(&representNodes);
|
||||
addRepresenter!(Node.Pair[])(&representPairs);
|
||||
addRepresenter!SysTime(&representSysTime);
|
||||
}
|
||||
|
||||
///Destroy the Representer.
|
||||
~this()
|
||||
{
|
||||
clear(representers_);
|
||||
representers_ = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a function to represent nodes with a specific data type.
|
||||
*
|
||||
* The representer function takes a reference to a Node storing the data
|
||||
* type and to the Representer. It returns the represented node and may
|
||||
* throw a RepresenterException. See the example for more information.
|
||||
*
|
||||
* Only one function may be specified for one data type. Default data
|
||||
* types already have representer functions unless disabled in these
|
||||
* Representer constructor.
|
||||
*
|
||||
* Params: representer = Representer function to add.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* Representing a simple struct:
|
||||
* --------------------
|
||||
* import std.string;
|
||||
*
|
||||
* import yaml;
|
||||
*
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
* }
|
||||
*
|
||||
* Node representMyStruct(ref Node node, Representer representer)
|
||||
* {
|
||||
* //The node is guaranteed to be MyStruct as we add representer for MyStruct.
|
||||
* auto value = node.get!MyStruct;
|
||||
* //Using custom scalar format, x:y:z.
|
||||
* auto scalar = format(value.x, ":", value.y, ":", value.z);
|
||||
* //Representing as a scalar, with custom tag to specify this data type.
|
||||
* return representer.representScalar("!mystruct.tag", scalar);
|
||||
* }
|
||||
*
|
||||
* void main()
|
||||
* {
|
||||
* auto dumper = Dumper("file.txt");
|
||||
* auto representer = new Representer;
|
||||
* representer.addRepresenter!MyStruct(&representMyStruct);
|
||||
* dumper.representer = representer;
|
||||
* dumper.dump(Node(MyStruct(1,2,3)));
|
||||
* }
|
||||
* --------------------
|
||||
*
|
||||
* Representing a class:
|
||||
* --------------------
|
||||
* import std.string;
|
||||
*
|
||||
* import yaml;
|
||||
*
|
||||
* class MyClass
|
||||
* {
|
||||
* int x, y, z;
|
||||
*
|
||||
* this(int x, int y, int z)
|
||||
* {
|
||||
* this.x = x;
|
||||
* this.y = y;
|
||||
* this.z = z;
|
||||
* }
|
||||
*
|
||||
* ///We need custom opEquals for node equality, as default opEquals compares references.
|
||||
* override bool opEquals(Object rhs)
|
||||
* {
|
||||
* if(typeid(rhs) != typeid(MyClass)){return false;}
|
||||
* auto t = cast(MyClass)rhs;
|
||||
* return x == t.x && y == t.y && z == t.z;
|
||||
* }
|
||||
*
|
||||
* ///Useful for Node.get!string .
|
||||
* override string toString()
|
||||
* {
|
||||
* return format("MyClass(", x, ", ", y, ", ", z, ")");
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* //Same as representMyStruct.
|
||||
* Node representMyClass(ref Node node, Representer representer)
|
||||
* {
|
||||
* //The node is guaranteed to be MyClass as we add representer for MyClass.
|
||||
* auto value = node.get!MyClass;
|
||||
* //Using custom scalar format, x:y:z.
|
||||
* auto scalar = format(value.x, ":", value.y, ":", value.z);
|
||||
* //Representing as a scalar, with custom tag to specify this data type.
|
||||
* return representer.representScalar("!myclass.tag", scalar);
|
||||
* }
|
||||
*
|
||||
* void main()
|
||||
* {
|
||||
* auto dumper = Dumper("file.txt");
|
||||
* auto representer = new Representer;
|
||||
* representer.addRepresenter!MyClass(&representMyClass);
|
||||
* dumper.representer = representer;
|
||||
* dumper.dump(Node(new MyClass(1,2,3)));
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
void addRepresenter(T)(Node function(ref Node, Representer) representer)
|
||||
{
|
||||
assert((typeid(T) in representers_) is null,
|
||||
"Representer function for data type " ~ typeid(T).toString() ~
|
||||
" already specified. Can't specify another one");
|
||||
representers_[typeid(T)] = representer;
|
||||
}
|
||||
|
||||
//If profiling shows a bottleneck on tag construction in these 3 methods,
|
||||
//we'll need to take Tag directly and have string based wrappers for
|
||||
//user code.
|
||||
|
||||
/**
|
||||
* Represent a scalar with specified tag.
|
||||
*
|
||||
* This is used by representer functions that produce scalars.
|
||||
*
|
||||
* Params: tag = Tag of the scalar.
|
||||
* scalar = Scalar value.
|
||||
*
|
||||
* Returns: The represented node.
|
||||
*
|
||||
* Example:
|
||||
* --------------------
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
* }
|
||||
*
|
||||
* Node representMyStruct(ref Node node, Representer representer)
|
||||
* {
|
||||
* auto value = node.get!MyStruct;
|
||||
* auto scalar = format(value.x, ":", value.y, ":", value.z);
|
||||
* return representer.representScalar("!mystruct.tag", scalar);
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
Node representScalar(in string tag, string scalar)
|
||||
{
|
||||
return Node.rawNode(Node.Value(scalar), Mark(), Tag(tag));
|
||||
}
|
||||
|
||||
/**
|
||||
* Represent a sequence with specified tag, representing children first.
|
||||
*
|
||||
* This is used by representer functions that produce sequences.
|
||||
*
|
||||
* Params: tag = Tag of the sequence.
|
||||
* sequence = Sequence of nodes.
|
||||
*
|
||||
* Returns: The represented node.
|
||||
*
|
||||
* Throws: RepresenterException if a child could not be represented.
|
||||
*
|
||||
* Example:
|
||||
* --------------------
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
* }
|
||||
*
|
||||
* Node representMyStruct(ref Node node, Representer representer)
|
||||
* {
|
||||
* auto value = node.get!MyStruct;
|
||||
* auto nodes = [Node(value.x), Node(value.y), Node(value.z)];
|
||||
* return representer.representSequence("!mystruct.tag", nodes);
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
Node representSequence(in string tag, Node[] sequence)
|
||||
{
|
||||
Node[] value;
|
||||
value.length = sequence.length;
|
||||
foreach(idx, ref item; sequence)
|
||||
{
|
||||
value[idx] = representData(item);
|
||||
}
|
||||
return Node.rawNode(Node.Value(value), Mark(), Tag(tag));
|
||||
}
|
||||
|
||||
/**
|
||||
* Represent a mapping with specified tag, representing children first.
|
||||
*
|
||||
* This is used by representer functions that produce mappings.
|
||||
*
|
||||
* Params: tag = Tag of the mapping.
|
||||
* pairs = Key-value pairs of the mapping.
|
||||
*
|
||||
* Returns: The represented node.
|
||||
*
|
||||
* Throws: RepresenterException if a child could not be represented.
|
||||
*
|
||||
* Example:
|
||||
* --------------------
|
||||
* struct MyStruct
|
||||
* {
|
||||
* int x, y, z;
|
||||
* }
|
||||
*
|
||||
* Node representMyStruct(ref Node node, Representer representer)
|
||||
* {
|
||||
* auto value = node.get!MyStruct;
|
||||
* auto pairs = [Node.Pair("x", value.x),
|
||||
* Node.Pair("y", value.y),
|
||||
* Node.Pair("z", value.z)];
|
||||
* return representer.representMapping("!mystruct.tag", pairs);
|
||||
* }
|
||||
* --------------------
|
||||
*/
|
||||
Node representMapping(in string tag, Node.Pair[] pairs)
|
||||
{
|
||||
Node.Pair[] value;
|
||||
value.length = pairs.length;
|
||||
foreach(idx, ref pair; pairs)
|
||||
{
|
||||
value[idx] = Node.Pair(representData(pair.key), representData(pair.value));
|
||||
}
|
||||
return Node.rawNode(Node.Value(value), Mark(), Tag(tag));
|
||||
}
|
||||
|
||||
package:
|
||||
///Represent a node based on its type, and return the represented result.
|
||||
Node representData(ref Node data)
|
||||
{
|
||||
//User types are wrapped in YAMLObject.
|
||||
auto type = data.isUserType ? data.get!YAMLObject.type : data.type;
|
||||
|
||||
enforce((type in representers_) !is null,
|
||||
new RepresenterException("No YAML representer function for type "
|
||||
~ type.toString() ~ " cannot represent."));
|
||||
Node result = representers_[type](data, this);
|
||||
if(!data.tag.isNull()){result.tag = data.tag;}
|
||||
return result;
|
||||
}
|
||||
|
||||
///Represent a node, serializing with specified Serializer.
|
||||
void represent(ref Serializer serializer, ref Node node)
|
||||
{
|
||||
auto data = representData(node);
|
||||
serializer.serialize(data);
|
||||
}
|
||||
}
|
||||
|
||||
///Represent a null node as a null.
|
||||
Node representNull(ref Node node, Representer representer)
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:null", "null");
|
||||
}
|
||||
|
||||
///Represent a string node as a string scalar.
|
||||
Node representString(ref Node node, Representer representer)
|
||||
{
|
||||
string value = node.get!string;
|
||||
return value is null ? representNull(node, representer)
|
||||
: representer.representScalar("tag:yaml.org,2002:str", value);
|
||||
}
|
||||
|
||||
///Represent a bytes node as a binary scalar.
|
||||
Node representBytes(ref Node node, Representer representer)
|
||||
{
|
||||
const ubyte[] value = node.get!(ubyte[]);
|
||||
if(value is null){return representNull(node, representer);}
|
||||
return representer.representScalar("tag:yaml.org,2002:binary",
|
||||
cast(string)Base64.encode(value));
|
||||
}
|
||||
|
||||
///Represent a bool node as a bool scalar.
|
||||
Node representBool(ref Node node, Representer representer)
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:bool",
|
||||
node.get!bool ? "true" : "false");
|
||||
}
|
||||
|
||||
///Represent a long node as an integer scalar.
|
||||
Node representLong(ref Node node, Representer representer)
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:int",
|
||||
to!string(node.get!long));
|
||||
}
|
||||
|
||||
///Represent a real node as a floating point scalar.
|
||||
Node representReal(ref Node node, Representer representer)
|
||||
{
|
||||
real f = node.get!real;
|
||||
string value = isNaN(f) ? ".nan":
|
||||
f == real.infinity ? ".inf":
|
||||
f == -1.0 * real.infinity ? "-.inf":
|
||||
{auto a = appender!string;
|
||||
formattedWrite(a, "%12f", f);
|
||||
return a.data;}();
|
||||
|
||||
return representer.representScalar("tag:yaml.org,2002:float", value);
|
||||
}
|
||||
|
||||
///Represent a sequence node as sequence/set.
|
||||
Node representNodes(ref Node node, Representer representer)
|
||||
{
|
||||
auto nodes = node.get!(Node[]);
|
||||
if(node.tag == Tag("tag:yaml.org,2002:set"))
|
||||
{
|
||||
///YAML sets are mapping with null values.
|
||||
Node.Pair[] pairs;
|
||||
pairs.length = nodes.length;
|
||||
Node dummy;
|
||||
foreach(idx, ref key; nodes)
|
||||
{
|
||||
pairs[idx] = Node.Pair(key, representNull(dummy, representer));
|
||||
}
|
||||
return representer.representMapping(node.tag.get, pairs);
|
||||
}
|
||||
else
|
||||
{
|
||||
return representer.representSequence("tag:yaml.org,2002:seq", nodes);
|
||||
}
|
||||
}
|
||||
|
||||
///Represent a mapping node as map/ordered map/pairs.
|
||||
Node representPairs(ref Node node, Representer representer)
|
||||
{
|
||||
auto pairs = node.get!(Node.Pair[]);
|
||||
|
||||
bool hasDuplicates(Node.Pair[] pairs)
|
||||
{
|
||||
//TODO The map here should be replaced with something with deterministic.
|
||||
//memory allocation if possible.
|
||||
bool[Node] map;
|
||||
scope(exit){clear(map);}
|
||||
foreach(ref pair; pairs)
|
||||
{
|
||||
if((pair.key in map) !is null){return true;}
|
||||
map[pair.key] = true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
Node[] mapToSequence(Node.Pair[] pairs)
|
||||
{
|
||||
Node[] nodes;
|
||||
nodes.length = pairs.length;
|
||||
foreach(idx, ref pair; pairs)
|
||||
{
|
||||
nodes[idx] = representer.representMapping("tag:yaml.org,2002:map", [pair]);
|
||||
}
|
||||
return nodes;
|
||||
}
|
||||
|
||||
if(node.tag == Tag("tag:yaml.org,2002:omap"))
|
||||
{
|
||||
enforce(!hasDuplicates(pairs),
|
||||
new RepresenterException("Found a duplicate entry "
|
||||
"in an ordered map"));
|
||||
return representer.representSequence(node.tag.get, mapToSequence(pairs));
|
||||
}
|
||||
else if(node.tag == Tag("tag:yaml.org,2002:pairs"))
|
||||
{
|
||||
return representer.representSequence(node.tag.get, mapToSequence(pairs));
|
||||
}
|
||||
else
|
||||
{
|
||||
enforce(!hasDuplicates(pairs),
|
||||
new RepresenterException("Found a duplicate entry "
|
||||
"in an unordered map"));
|
||||
return representer.representMapping("tag:yaml.org,2002:map", pairs);
|
||||
}
|
||||
}
|
||||
|
||||
///Represent a SysTime node as a timestamp.
|
||||
Node representSysTime(ref Node node, Representer representer)
|
||||
{
|
||||
return representer.representScalar("tag:yaml.org,2002:timestamp",
|
||||
node.get!SysTime.toISOExtString());
|
||||
}
|
||||
|
||||
//Unittests
|
||||
private:
|
||||
|
||||
import std.string;
|
||||
|
||||
import dyaml.dumper;
|
||||
|
||||
struct MyStruct
|
||||
{
|
||||
int x, y, z;
|
||||
}
|
||||
|
||||
Node representMyStruct(ref Node node, Representer representer)
|
||||
{
|
||||
//The node is guaranteed to be MyStruct as we add representer for MyStruct.
|
||||
auto value = node.get!MyStruct;
|
||||
//Using custom scalar format, x:y:z.
|
||||
auto scalar = format(value.x, ":", value.y, ":", value.z);
|
||||
//Representing as a scalar, with custom tag to specify this data type.
|
||||
return representer.representScalar("!mystruct.tag", scalar);
|
||||
}
|
||||
|
||||
Node representMyStructSeq(ref Node node, Representer representer)
|
||||
{
|
||||
auto value = node.get!MyStruct;
|
||||
auto nodes = [Node(value.x), Node(value.y), Node(value.z)];
|
||||
return representer.representSequence("!mystruct.tag", nodes);
|
||||
}
|
||||
|
||||
Node representMyStructMap(ref Node node, Representer representer)
|
||||
{
|
||||
auto value = node.get!MyStruct;
|
||||
auto pairs = [Node.Pair("x", value.x),
|
||||
Node.Pair("y", value.y),
|
||||
Node.Pair("z", value.z)];
|
||||
return representer.representMapping("!mystruct.tag", pairs);
|
||||
}
|
||||
|
||||
class MyClass
|
||||
{
|
||||
int x, y, z;
|
||||
|
||||
this(int x, int y, int z)
|
||||
{
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.z = z;
|
||||
}
|
||||
|
||||
///We need custom opEquals for node equality, as default opEquals compares references.
|
||||
override bool opEquals(Object rhs)
|
||||
{
|
||||
if(typeid(rhs) != typeid(MyClass)){return false;}
|
||||
auto t = cast(MyClass)rhs;
|
||||
return x == t.x && y == t.y && z == t.z;
|
||||
}
|
||||
|
||||
///Useful for Node.get!string .
|
||||
override string toString()
|
||||
{
|
||||
return format("MyClass(", x, ", ", y, ", ", z, ")");
|
||||
}
|
||||
}
|
||||
|
||||
//Same as representMyStruct.
|
||||
Node representMyClass(ref Node node, Representer representer)
|
||||
{
|
||||
//The node is guaranteed to be MyClass as we add representer for MyClass.
|
||||
auto value = node.get!MyClass;
|
||||
//Using custom scalar format, x:y:z.
|
||||
auto scalar = format(value.x, ":", value.y, ":", value.z);
|
||||
//Representing as a scalar, with custom tag to specify this data type.
|
||||
return representer.representScalar("!myclass.tag", scalar);
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
foreach(r; [&representMyStruct,
|
||||
&representMyStructSeq,
|
||||
&representMyStructMap])
|
||||
{
|
||||
auto dumper = Dumper(new MemoryStream());
|
||||
auto representer = new Representer;
|
||||
representer.addRepresenter!MyStruct(r);
|
||||
dumper.representer = representer;
|
||||
dumper.dump(Node(MyStruct(1,2,3)));
|
||||
}
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
auto dumper = Dumper(new MemoryStream());
|
||||
auto representer = new Representer;
|
||||
representer.addRepresenter!MyClass(&representMyClass);
|
||||
dumper.representer = representer;
|
||||
dumper.dump(Node(new MyClass(1,2,3)));
|
||||
}
|
|
@ -116,7 +116,7 @@ final class Resolver
|
|||
*/
|
||||
Tag resolve(NodeID kind, Tag tag, string value, in bool implicit)
|
||||
{
|
||||
if(!tag.isNull() && tag.toString() != "!"){return tag;}
|
||||
if(!tag.isNull() && tag.get() != "!"){return tag;}
|
||||
|
||||
if(kind == NodeID.Scalar)
|
||||
{
|
||||
|
@ -186,6 +186,15 @@ final class Resolver
|
|||
assert(tagMatch("tag:yaml.org,2002:yaml", ["!", "&", "*"]));
|
||||
}
|
||||
|
||||
///Return default scalar tag.
|
||||
@property Tag defaultScalarTag() const {return defaultScalarTag_;}
|
||||
|
||||
///Return default sequence tag.
|
||||
@property Tag defaultSequenceTag() const {return defaultSequenceTag_;}
|
||||
|
||||
///Return default mapping tag.
|
||||
@property Tag defaultMappingTag() const {return defaultMappingTag_;}
|
||||
|
||||
private:
|
||||
///Add default implicit resolvers.
|
||||
void addImplicitResolvers()
|
||||
|
|
|
@ -409,7 +409,7 @@ final class Scanner
|
|||
///Add STREAM-START token.
|
||||
void fetchStreamStart()
|
||||
{
|
||||
tokens_ ~= streamStartToken(reader_.mark, reader_.mark);
|
||||
tokens_ ~= streamStartToken(reader_.mark, reader_.mark, reader_.encoding);
|
||||
}
|
||||
|
||||
///Add STREAM-END token.
|
||||
|
@ -1040,7 +1040,6 @@ final class Scanner
|
|||
return tagToken(startMark, reader_.mark, to!string(handle ~ '\0' ~ suffix));
|
||||
}
|
||||
|
||||
|
||||
///Scan a block scalar token with specified style.
|
||||
Token scanBlockScalar(ScalarStyle style)
|
||||
{
|
||||
|
@ -1057,7 +1056,7 @@ final class Scanner
|
|||
//Determine the indentation level and go to the first non-empty line.
|
||||
Mark endMark;
|
||||
dchar[] breaks;
|
||||
uint indent = min(1, indent_ + 1);
|
||||
uint indent = max(1, indent_ + 1);
|
||||
if(increment == int.min)
|
||||
{
|
||||
auto indentation = scanBlockScalarIndentation();
|
||||
|
@ -1076,7 +1075,7 @@ final class Scanner
|
|||
dstring lineBreak = "";
|
||||
|
||||
//Used to construct the result.
|
||||
auto appender = Appender!string();
|
||||
auto appender = appender!string();
|
||||
|
||||
//Scan the inner part of the block scalar.
|
||||
while(reader_.column == indent && reader_.peek() != '\0')
|
||||
|
@ -1223,7 +1222,7 @@ final class Scanner
|
|||
const startMark = reader_.mark;
|
||||
const quote = reader_.get();
|
||||
|
||||
auto appender = Appender!dstring();
|
||||
auto appender = appender!dstring();
|
||||
appender.put(scanFlowScalarNonSpaces(quotes, startMark));
|
||||
while(reader_.peek() != quote)
|
||||
{
|
||||
|
@ -1252,7 +1251,7 @@ final class Scanner
|
|||
' ': '\x20',
|
||||
'\"': '\"',
|
||||
'\\': '\\',
|
||||
'N': '\x85',
|
||||
'N': '\u0085',
|
||||
'_': '\xA0',
|
||||
'L': '\u2028',
|
||||
'P': '\u2029'];
|
||||
|
@ -1343,7 +1342,7 @@ final class Scanner
|
|||
new ScannerException("While scanning a quoted scalar", startMark,
|
||||
"found unexpected end of stream", reader_.mark));
|
||||
|
||||
auto appender = Appender!dstring();
|
||||
auto appender = appender!dstring();
|
||||
if(isBreak(c))
|
||||
{
|
||||
const lineBreak = scanLineBreak();
|
||||
|
@ -1360,7 +1359,7 @@ final class Scanner
|
|||
///Scan line breaks in a flow scalar.
|
||||
dstring scanFlowScalarBreaks(in Mark startMark)
|
||||
{
|
||||
auto appender = Appender!dstring();
|
||||
auto appender = appender!dstring();
|
||||
for(;;)
|
||||
{
|
||||
//Instead of checking indentation, we check for document separators.
|
||||
|
@ -1385,7 +1384,7 @@ final class Scanner
|
|||
{
|
||||
//We keep track of the allowSimpleKey_ flag here.
|
||||
//Indentation rules are loosed for the flow context
|
||||
auto appender = Appender!dstring();
|
||||
auto appender = appender!dstring();
|
||||
const startMark = reader_.mark;
|
||||
Mark endMark = startMark;
|
||||
const indent = indent_ + 1;
|
||||
|
@ -1447,7 +1446,7 @@ final class Scanner
|
|||
{
|
||||
///The specification is really confusing about tabs in plain scalars.
|
||||
///We just forbid them completely. Do not use tabs in YAML!
|
||||
auto appender = Appender!dstring();
|
||||
auto appender = appender!dstring();
|
||||
|
||||
uint length = 0;
|
||||
while(reader_.peek(length) == ' '){++length;}
|
||||
|
@ -1524,7 +1523,7 @@ final class Scanner
|
|||
dstring scanTagURI(string name, in Mark startMark)
|
||||
{
|
||||
//Note: we do not check if URI is well-formed.
|
||||
auto appender = Appender!dstring();
|
||||
auto appender = appender!dstring();
|
||||
uint length = 0;
|
||||
|
||||
dchar c = reader_.peek();
|
||||
|
@ -1606,7 +1605,7 @@ final class Scanner
|
|||
* '\r\n' : '\n'
|
||||
* '\r' : '\n'
|
||||
* '\n' : '\n'
|
||||
* '\x85' : '\n'
|
||||
* '\u0085' : '\n'
|
||||
* '\u2028' : '\u2028'
|
||||
* '\u2029 : '\u2029'
|
||||
* no break : '\0'
|
||||
|
@ -1615,7 +1614,7 @@ final class Scanner
|
|||
{
|
||||
const c = reader_.peek();
|
||||
|
||||
dchar[] plainLineBreaks = ['\r', '\n', '\x85'];
|
||||
dchar[] plainLineBreaks = ['\r', '\n', '\u0085'];
|
||||
if(plainLineBreaks.canFind(c))
|
||||
{
|
||||
if(reader_.prefix(2) == "\r\n"){reader_.forward(2);}
|
||||
|
|
233
dyaml/serializer.d
Normal file
233
dyaml/serializer.d
Normal file
|
@ -0,0 +1,233 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
/**
|
||||
* YAML serializer.
|
||||
* Code based on PyYAML: http://www.pyyaml.org
|
||||
*/
|
||||
module dyaml.serializer;
|
||||
|
||||
|
||||
import std.array;
|
||||
import std.format;
|
||||
|
||||
import dyaml.anchor;
|
||||
import dyaml.emitter;
|
||||
import dyaml.encoding;
|
||||
import dyaml.event;
|
||||
import dyaml.exception;
|
||||
import dyaml.node;
|
||||
import dyaml.resolver;
|
||||
import dyaml.tag;
|
||||
import dyaml.tagdirectives;
|
||||
import dyaml.token;
|
||||
|
||||
|
||||
package:
|
||||
|
||||
///Serializes represented YAML nodes, generating events which are then emitted by Emitter.
|
||||
struct Serializer
|
||||
{
|
||||
private:
|
||||
///Emitter to emit events produced.
|
||||
Emitter* emitter_;
|
||||
///Resolver used to determine which tags are automaticaly resolvable.
|
||||
Resolver resolver_;
|
||||
|
||||
///Do all document starts have to be specified explicitly?
|
||||
bool explicitStart_;
|
||||
///Do all document ends have to be specified explicitly?
|
||||
bool explicitEnd_;
|
||||
///YAML version string.
|
||||
string YAMLVersion_;
|
||||
|
||||
///Tag directives to emit.
|
||||
TagDirectives tagDirectives_;
|
||||
|
||||
//TODO Use something with more deterministic memory usage.
|
||||
///Nodes with assigned anchors.
|
||||
Anchor[Node] anchors_;
|
||||
///Nodes with assigned anchors that are already serialized.
|
||||
bool[Node] serializedNodes_;
|
||||
///ID of the last anchor generated.
|
||||
uint lastAnchorID_ = 0;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Construct a Serializer.
|
||||
*
|
||||
* Params: emitter = Emitter to emit events produced.
|
||||
* resolver = Resolver used to determine which tags are automaticaly resolvable.
|
||||
* encoding = Character encoding to use.
|
||||
* explicitStart = Do all document starts have to be specified explicitly?
|
||||
* explicitEnd = Do all document ends have to be specified explicitly?
|
||||
* YAMLVersion = YAML version string.
|
||||
* tagDirectives = Tag directives to emit.
|
||||
*/
|
||||
this(ref Emitter emitter, Resolver resolver, Encoding encoding,
|
||||
bool explicitStart, bool explicitEnd, string YAMLVersion,
|
||||
TagDirectives tagDirectives)
|
||||
{
|
||||
emitter_ = &emitter;
|
||||
resolver_ = resolver;
|
||||
explicitStart_ = explicitStart;
|
||||
explicitEnd_ = explicitEnd;
|
||||
YAMLVersion_ = YAMLVersion;
|
||||
tagDirectives_ = tagDirectives;
|
||||
|
||||
emitter_.emit(streamStartEvent(Mark(), Mark(), encoding));
|
||||
}
|
||||
|
||||
///Destroy the Serializer.
|
||||
~this()
|
||||
{
|
||||
emitter_.emit(streamEndEvent(Mark(), Mark()));
|
||||
clear(YAMLVersion_);
|
||||
YAMLVersion_ = null;
|
||||
clear(serializedNodes_);
|
||||
serializedNodes_ = null;
|
||||
clear(anchors_);
|
||||
anchors_ = null;
|
||||
}
|
||||
|
||||
///Serialize a node, emitting it in the process.
|
||||
void serialize(ref Node node)
|
||||
{
|
||||
emitter_.emit(documentStartEvent(Mark(), Mark(), explicitStart_,
|
||||
YAMLVersion_, tagDirectives_));
|
||||
anchorNode(node);
|
||||
serializeNode(node);
|
||||
emitter_.emit(documentEndEvent(Mark(), Mark(), explicitEnd_));
|
||||
clear(serializedNodes_);
|
||||
clear(anchors_);
|
||||
Anchor[Node] emptyAnchors;
|
||||
anchors_ = emptyAnchors;
|
||||
lastAnchorID_ = 0;
|
||||
}
|
||||
|
||||
private:
|
||||
/**
|
||||
* Determine if it's a good idea to add an anchor to a node.
|
||||
*
|
||||
* Used to prevent associating every single repeating scalar with an
|
||||
* anchor/alias - only nodes long enough can use anchors.
|
||||
*
|
||||
* Params: node = Node to check for anchorability.
|
||||
*
|
||||
* Returns: True if the node is anchorable, false otherwise.
|
||||
*/
|
||||
static bool anchorable(ref Node node)
|
||||
{
|
||||
if(node.isScalar)
|
||||
{
|
||||
return node.isType!string ? node.get!string.length > 64 :
|
||||
node.isType!(ubyte[]) ? node.get!(ubyte[]).length > 64:
|
||||
false;
|
||||
}
|
||||
return node.length > 2;
|
||||
}
|
||||
|
||||
///Add an anchor to the node if it's anchorable and not anchored yet.
|
||||
void anchorNode(ref Node node)
|
||||
{
|
||||
if(!anchorable(node)){return;}
|
||||
|
||||
if((node in anchors_) !is null)
|
||||
{
|
||||
if(anchors_[node].isNull())
|
||||
{
|
||||
anchors_[node] = generateAnchor();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
anchors_[node] = Anchor(null);
|
||||
if(node.isSequence)
|
||||
{
|
||||
foreach(ref Node item; node)
|
||||
{
|
||||
anchorNode(item);
|
||||
}
|
||||
}
|
||||
else if(node.isMapping)
|
||||
{
|
||||
foreach(ref Node key, ref Node value; node)
|
||||
{
|
||||
anchorNode(key);
|
||||
anchorNode(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
///Generate and return a new anchor.
|
||||
Anchor generateAnchor()
|
||||
{
|
||||
++lastAnchorID_;
|
||||
auto appender = appender!string;
|
||||
formattedWrite(appender, "id%03d", lastAnchorID_);
|
||||
return Anchor(appender.data);
|
||||
}
|
||||
|
||||
///Serialize a node and all its subnodes.
|
||||
void serializeNode(ref Node node)
|
||||
{
|
||||
//If the node has an anchor, emit an anchor (as aliasEvent) on the
|
||||
//first occurrence, save it in serializedNodes_, and emit an alias
|
||||
//if it reappears.
|
||||
Anchor aliased = Anchor(null);
|
||||
if(anchorable(node) && (node in anchors_) !is null)
|
||||
{
|
||||
aliased = anchors_[node];
|
||||
if((node in serializedNodes_) !is null)
|
||||
{
|
||||
emitter_.emit(aliasEvent(Mark(), Mark(), aliased));
|
||||
return;
|
||||
}
|
||||
serializedNodes_[node] = true;
|
||||
}
|
||||
|
||||
if(node.isScalar)
|
||||
{
|
||||
assert(node.isType!string, "Scalar node type must be string before serialized");
|
||||
auto value = node.get!string;
|
||||
Tag detectedTag = resolver_.resolve(NodeID.Scalar, Tag(null), value, true);
|
||||
Tag defaultTag = resolver_.resolve(NodeID.Scalar, Tag(null), value, false);
|
||||
|
||||
emitter_.emit(scalarEvent(Mark(), Mark(), aliased, node.tag,
|
||||
[node.tag == detectedTag, node.tag == defaultTag],
|
||||
value, ScalarStyle.Invalid));
|
||||
return;
|
||||
}
|
||||
if(node.isSequence)
|
||||
{
|
||||
auto defaultTag = resolver_.defaultSequenceTag;
|
||||
bool implicit = node.tag == defaultTag;
|
||||
emitter_.emit(sequenceStartEvent(Mark(), Mark(), aliased, node.tag,
|
||||
implicit, CollectionStyle.Invalid));
|
||||
foreach(ref Node item; node)
|
||||
{
|
||||
serializeNode(item);
|
||||
}
|
||||
emitter_.emit(sequenceEndEvent(Mark(), Mark()));
|
||||
return;
|
||||
}
|
||||
if(node.isMapping)
|
||||
{
|
||||
auto defaultTag = resolver_.defaultMappingTag;
|
||||
bool implicit = node.tag == defaultTag;
|
||||
emitter_.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag,
|
||||
implicit, CollectionStyle.Invalid));
|
||||
foreach(ref Node key, ref Node value; node)
|
||||
{
|
||||
serializeNode(key);
|
||||
serializeNode(value);
|
||||
}
|
||||
emitter_.emit(mappingEndEvent(Mark(), Mark()));
|
||||
return;
|
||||
}
|
||||
assert(false, "This code should never be reached");
|
||||
}
|
||||
}
|
99
dyaml/sharedobject.d
Normal file
99
dyaml/sharedobject.d
Normal file
|
@ -0,0 +1,99 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///Shared object.
|
||||
module dyaml.sharedobject;
|
||||
|
||||
/**
|
||||
* Mixin for shared objects (need a better name).
|
||||
*
|
||||
* This works as an index to a static array of type T. Any new object created is
|
||||
* checked for presence in the array to prevent duplication.
|
||||
*
|
||||
* This is useful for e.g. token/event data that rarely needs to be
|
||||
* stored (e.g. tag directives) to prevent inflation of these structs,
|
||||
* and when there are many instances of a data type that are mostly
|
||||
* duplicates (e.g. tags).
|
||||
*
|
||||
* Disadvantage is, this is not thread-safe (and neither is D:YAML, at the
|
||||
* moment). That might be fixed in futurere, though.
|
||||
*
|
||||
* This is not the most elegant way to store the extra data and change in future.
|
||||
*/
|
||||
template SharedObject(T, MixedIn)
|
||||
{
|
||||
private:
|
||||
///Index of the object in objects_.
|
||||
uint index_ = uint.max;
|
||||
|
||||
/**
|
||||
* Reference count.
|
||||
*
|
||||
* When this reaches zero, objects_ are cleared. This count is not
|
||||
* the number of shared objects, but rather of objects using this kind
|
||||
* of shared object. This is used e.g. with Anchor, but not with Tag
|
||||
* - tags can be stored by the user in Nodes so there is no way to know
|
||||
* when there are no Tags anymore.
|
||||
*/
|
||||
static int referenceCount_ = 0;
|
||||
|
||||
/**
|
||||
* All known objects of this type are in this array.
|
||||
*
|
||||
* Note that this is not shared among threads.
|
||||
* Working the same YAML file in multiple threads is NOT safe with D:YAML.
|
||||
*/
|
||||
static T[] objects_;
|
||||
|
||||
///Add a new object, checking if identical object already exists.
|
||||
void add(ref T object)
|
||||
{
|
||||
foreach(uint index, known; objects_)
|
||||
{
|
||||
if(object == known)
|
||||
{
|
||||
index_ = index;
|
||||
return;
|
||||
}
|
||||
}
|
||||
index_ = cast(uint)objects_.length;
|
||||
objects_ ~= object;
|
||||
}
|
||||
|
||||
public:
|
||||
///Increment the reference count.
|
||||
static void addReference()
|
||||
{
|
||||
assert(referenceCount_ >= 0);
|
||||
++referenceCount_;
|
||||
}
|
||||
|
||||
///Decrement the reference count and clear the constructed objects if zero.
|
||||
static void removeReference()
|
||||
{
|
||||
--referenceCount_;
|
||||
assert(referenceCount_ >= 0);
|
||||
if(referenceCount_ == 0){objects_ = [];}
|
||||
}
|
||||
|
||||
///Get the object.
|
||||
@property T get() const
|
||||
in{assert(!isNull());}
|
||||
body
|
||||
{
|
||||
return objects_[index_];
|
||||
}
|
||||
|
||||
///Test for equality with another object.
|
||||
bool opEquals(const ref MixedIn object) const
|
||||
{
|
||||
return object.index_ == index_;
|
||||
}
|
||||
|
||||
///Is this object null (invalid)?
|
||||
bool isNull() const {return index_ == uint.max;}
|
||||
}
|
||||
|
44
dyaml/tag.d
44
dyaml/tag.d
|
@ -7,25 +7,15 @@
|
|||
///YAML tag.
|
||||
module dyaml.tag;
|
||||
|
||||
import core.sync.mutex;
|
||||
import dyaml.sharedobject;
|
||||
|
||||
|
||||
///YAML tag (data type) struct. Encapsulates a tag to save memory and speed-up comparison.
|
||||
struct Tag
|
||||
{
|
||||
private:
|
||||
///Index of the tag in tags_.
|
||||
uint index_ = uint.max;
|
||||
|
||||
/**
|
||||
* All known tags are in this array.
|
||||
*
|
||||
* Note that this is not shared among threads.
|
||||
* Working the same YAML file in multiple threads is NOT safe with D:YAML.
|
||||
*/
|
||||
static string[] tags_;
|
||||
|
||||
public:
|
||||
mixin SharedObject!(string, Tag);
|
||||
|
||||
///Construct a tag from a string representation.
|
||||
this(string tag)
|
||||
{
|
||||
|
@ -35,32 +25,6 @@ struct Tag
|
|||
return;
|
||||
}
|
||||
|
||||
foreach(uint index, knownTag; tags_)
|
||||
{
|
||||
if(tag == knownTag)
|
||||
{
|
||||
index_ = index;
|
||||
return;
|
||||
}
|
||||
}
|
||||
index_ = cast(uint)tags_.length;
|
||||
tags_ ~= tag;
|
||||
add(tag);
|
||||
}
|
||||
|
||||
///Get string representation of the tag.
|
||||
string toString() const
|
||||
in{assert(!isNull());}
|
||||
body
|
||||
{
|
||||
return tags_[index_];
|
||||
}
|
||||
|
||||
///Test for equality with another tag.
|
||||
bool opEquals(const ref Tag tag) const
|
||||
{
|
||||
return tag.index_ == index_;
|
||||
}
|
||||
|
||||
///Is this tag null (invalid)?
|
||||
bool isNull() const {return index_ == uint.max;}
|
||||
}
|
||||
|
|
26
dyaml/tagdirectives.d
Normal file
26
dyaml/tagdirectives.d
Normal file
|
@ -0,0 +1,26 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
///Tag directives.
|
||||
module dyaml.tagdirectives;
|
||||
|
||||
import std.typecons;
|
||||
|
||||
import dyaml.sharedobject;
|
||||
|
||||
|
||||
///Tag directives stored in Event.
|
||||
struct TagDirectives
|
||||
{
|
||||
public:
|
||||
mixin SharedObject!(Tuple!(string, string)[], TagDirectives);
|
||||
|
||||
///Construct a tags object from an array of tag directives.
|
||||
this(Tuple!(string, string)[] tagDirectives)
|
||||
{
|
||||
add(tagDirectives);
|
||||
}
|
||||
}
|
|
@ -11,6 +11,7 @@
|
|||
module dyaml.token;
|
||||
|
||||
|
||||
import dyaml.encoding;
|
||||
import dyaml.exception;
|
||||
import dyaml.reader;
|
||||
|
||||
|
@ -53,6 +54,14 @@ enum ScalarStyle : ubyte
|
|||
DoubleQuoted /// Double quoted scalar
|
||||
}
|
||||
|
||||
///Collection styles.
|
||||
enum CollectionStyle : ubyte
|
||||
{
|
||||
Invalid = 0, /// Invalid (uninitialized) style
|
||||
Block, /// Block style.
|
||||
Flow /// Flow style.
|
||||
}
|
||||
|
||||
/**
|
||||
* Token produced by scanner.
|
||||
*
|
||||
|
@ -70,6 +79,8 @@ immutable struct Token
|
|||
TokenID id;
|
||||
///Style of scalar token, if this is a scalar token.
|
||||
ScalarStyle style;
|
||||
///Encoding, if this is a stream start token.
|
||||
Encoding encoding;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -96,8 +107,19 @@ Token simpleToken(TokenID id)(in Mark start, in Mark end) pure
|
|||
return Token(null, start, end, id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a stream start token.
|
||||
*
|
||||
* Params: start = Start position of the token.
|
||||
* end = End position of the token.
|
||||
* encoding = Encoding of the stream.
|
||||
*/
|
||||
Token streamStartToken(in Mark start, in Mark end, in Encoding encoding)
|
||||
{
|
||||
return Token(null, start, end, TokenID.StreamStart, ScalarStyle.Invalid, encoding);
|
||||
}
|
||||
|
||||
///Aliases for construction of simple token types.
|
||||
alias simpleToken!(TokenID.StreamStart) streamStartToken;
|
||||
alias simpleToken!(TokenID.StreamEnd) streamEndToken;
|
||||
alias simpleToken!(TokenID.BlockSequenceStart) blockSequenceStartToken;
|
||||
alias simpleToken!(TokenID.BlockMappingStart) blockMappingStartToken;
|
||||
|
|
|
@ -8,14 +8,13 @@ module dyaml.util;
|
|||
|
||||
package:
|
||||
|
||||
|
||||
///Is given character YAML whitespace (space or tab)?
|
||||
bool isSpace(in dchar c){return c == ' ' || c == '\t';}
|
||||
|
||||
///Is given character YAML line break?
|
||||
bool isBreak(in dchar c)
|
||||
{
|
||||
return c == '\n' || c == '\r' || c == '\x85' || c == '\u2028' || c == '\u2029';
|
||||
return c == '\n' || c == '\r' || c == '\u0085' || c == '\u2028' || c == '\u2029';
|
||||
}
|
||||
|
||||
///Is c the checked character?
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
dumper = yaml.Dumper(StringIO())
|
||||
dumper.open()
|
||||
dumper.open()
|
|
@ -1,4 +0,0 @@
|
|||
dumper = yaml.Dumper(StringIO())
|
||||
dumper.open()
|
||||
dumper.close()
|
||||
dumper.open()
|
|
@ -1,4 +0,0 @@
|
|||
dumper = yaml.Dumper(StringIO())
|
||||
dumper.open()
|
||||
dumper.close()
|
||||
dumper.serialize(yaml.ScalarNode(tag='!foo', value='bar'))
|
|
@ -1,2 +0,0 @@
|
|||
dumper = yaml.Dumper(StringIO())
|
||||
dumper.close()
|
|
@ -1,2 +0,0 @@
|
|||
dumper = yaml.Dumper(StringIO())
|
||||
dumper.serialize(yaml.ScalarNode(tag='!foo', value='bar'))
|
|
@ -1 +0,0 @@
|
|||
yaml.safe_dump(object)
|
|
@ -1 +0,0 @@
|
|||
tag:yaml.org,2002:yaml
|
|
@ -93,9 +93,10 @@ string[][string] findTestFilenames(in string dir)
|
|||
{
|
||||
if(isFile(name))
|
||||
{
|
||||
string base = name.getName();
|
||||
string ext = name.getExt();
|
||||
string base = name.stripExtension();
|
||||
string ext = name.extension();
|
||||
if(ext is null){ext = "";}
|
||||
if(ext[0] == '.'){ext = ext[1 .. $];}
|
||||
|
||||
//If the base name doesn't exist yet, add it; otherwise add new extension.
|
||||
names[base] = ((base in names) is null) ? [ext] : names[base] ~ ext;
|
||||
|
|
|
@ -10,6 +10,7 @@ module dyaml.testconstructor;
|
|||
import std.datetime;
|
||||
import std.exception;
|
||||
import std.path;
|
||||
import std.string;
|
||||
|
||||
import dyaml.tag;
|
||||
import dyaml.testcommon;
|
||||
|
@ -21,68 +22,64 @@ Node[][string] expected;
|
|||
///Initialize expected.
|
||||
static this()
|
||||
{
|
||||
expected["construct-binary.data"] = constructBinary();
|
||||
expected["construct-bool.data"] = constructBool();
|
||||
expected["construct-custom.data"] = constructCustom();
|
||||
expected["construct-float.data"] = constructFloat();
|
||||
expected["construct-int.data"] = constructInt();
|
||||
expected["construct-map.data"] = constructMap();
|
||||
expected["construct-merge.data"] = constructMerge();
|
||||
expected["construct-null.data"] = constructNull();
|
||||
expected["construct-omap.data"] = constructOMap();
|
||||
expected["construct-pairs.data"] = constructPairs();
|
||||
expected["construct-seq.data"] = constructSeq();
|
||||
expected["construct-set.data"] = constructSet();
|
||||
expected["construct-str-ascii.data"] = constructStrASCII();
|
||||
expected["construct-str.data"] = constructStr();
|
||||
expected["construct-str-utf8.data"] = constructStrUTF8();
|
||||
expected["construct-timestamp.data"] = constructTimestamp();
|
||||
expected["construct-value.data"] = constructValue();
|
||||
expected["duplicate-merge-key.data"] = duplicateMergeKey();
|
||||
expected["float-representer-2.3-bug.data"] = floatRepresenterBug();
|
||||
expected["invalid-single-quote-bug.data"] = invalidSingleQuoteBug();
|
||||
expected["more-floats.data"] = moreFloats();
|
||||
expected["negative-float-bug.data"] = negativeFloatBug();
|
||||
expected["single-dot-is-not-float-bug.data"] = singleDotFloatBug();
|
||||
expected["timestamp-bugs.data"] = timestampBugs();
|
||||
expected["utf16be.data"] = utf16be();
|
||||
expected["utf16le.data"] = utf16le();
|
||||
expected["utf8.data"] = utf8();
|
||||
expected["utf8-implicit.data"] = utf8implicit();
|
||||
}
|
||||
|
||||
///Construct a node with specified value.
|
||||
Node node(T)(T value)
|
||||
{
|
||||
static if(Node.Value.allowed!T){return Node.rawNode(Node.Value(value));}
|
||||
else{return Node.rawNode(Node.userValue(value));}
|
||||
expected["aliases-cdumper-bug"] = constructAliasesCDumperBug();
|
||||
expected["construct-binary"] = constructBinary();
|
||||
expected["construct-bool"] = constructBool();
|
||||
expected["construct-custom"] = constructCustom();
|
||||
expected["construct-float"] = constructFloat();
|
||||
expected["construct-int"] = constructInt();
|
||||
expected["construct-map"] = constructMap();
|
||||
expected["construct-merge"] = constructMerge();
|
||||
expected["construct-null"] = constructNull();
|
||||
expected["construct-omap"] = constructOMap();
|
||||
expected["construct-pairs"] = constructPairs();
|
||||
expected["construct-seq"] = constructSeq();
|
||||
expected["construct-set"] = constructSet();
|
||||
expected["construct-str-ascii"] = constructStrASCII();
|
||||
expected["construct-str"] = constructStr();
|
||||
expected["construct-str-utf8"] = constructStrUTF8();
|
||||
expected["construct-timestamp"] = constructTimestamp();
|
||||
expected["construct-value"] = constructValue();
|
||||
expected["duplicate-merge-key"] = duplicateMergeKey();
|
||||
expected["float-representer-2.3-bug"] = floatRepresenterBug();
|
||||
expected["invalid-single-quote-bug"] = invalidSingleQuoteBug();
|
||||
expected["more-floats"] = moreFloats();
|
||||
expected["negative-float-bug"] = negativeFloatBug();
|
||||
expected["single-dot-is-not-float-bug"] = singleDotFloatBug();
|
||||
expected["timestamp-bugs"] = timestampBugs();
|
||||
expected["utf16be"] = utf16be();
|
||||
expected["utf16le"] = utf16le();
|
||||
expected["utf8"] = utf8();
|
||||
expected["utf8-implicit"] = utf8implicit();
|
||||
}
|
||||
|
||||
///Construct a pair of nodes with specified values.
|
||||
Node.Pair pair(A, B)(A a, B b)
|
||||
{
|
||||
static if(is(A == Node) && is(B == Node)){return Node.Pair(a, b);}
|
||||
else static if(is(A == Node)) {return Node.Pair(a, node(b));}
|
||||
else static if(is(B == Node)) {return Node.Pair(node(a), b);}
|
||||
else {return Node.Pair(node(a), node(b));}
|
||||
return Node.Pair(a,b);
|
||||
}
|
||||
|
||||
///Test cases:
|
||||
|
||||
Node[] constructAliasesCDumperBug()
|
||||
{
|
||||
return [Node(["today", "today"])];
|
||||
}
|
||||
|
||||
Node[] constructBinary()
|
||||
{
|
||||
auto canonical = cast(ubyte[])"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;";
|
||||
auto generic = cast(ubyte[])"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;";
|
||||
auto description = "The binary value above is a tiny arrow encoded as a gif image.";
|
||||
|
||||
return [node([pair("canonical", canonical),
|
||||
return [Node([pair("canonical", canonical),
|
||||
pair("generic", generic),
|
||||
pair("description", description)])];
|
||||
}
|
||||
|
||||
Node[] constructBool()
|
||||
{
|
||||
return [node([pair("canonical", true),
|
||||
return [Node([pair("canonical", true),
|
||||
pair("answer", false),
|
||||
pair("logical", true),
|
||||
pair("option", true),
|
||||
|
@ -91,14 +88,14 @@ Node[] constructBool()
|
|||
|
||||
Node[] constructCustom()
|
||||
{
|
||||
return [node([node(new TestClass(1, 0, 0)),
|
||||
node(new TestClass(1, 2, 3)),
|
||||
node(TestStruct(10))])];
|
||||
return [Node([Node(new TestClass(1, 0, 0)),
|
||||
Node(new TestClass(1, 2, 3)),
|
||||
Node(TestStruct(10))])];
|
||||
}
|
||||
|
||||
Node[] constructFloat()
|
||||
{
|
||||
return [node([pair("canonical", cast(real)685230.15),
|
||||
return [Node([pair("canonical", cast(real)685230.15),
|
||||
pair("exponential", cast(real)685230.15),
|
||||
pair("fixed", cast(real)685230.15),
|
||||
pair("sexagesimal", cast(real)685230.15),
|
||||
|
@ -108,7 +105,7 @@ Node[] constructFloat()
|
|||
|
||||
Node[] constructInt()
|
||||
{
|
||||
return [node([pair("canonical", 685230L),
|
||||
return [Node([pair("canonical", 685230L),
|
||||
pair("decimal", 685230L),
|
||||
pair("octal", 685230L),
|
||||
pair("hexadecimal", 685230L),
|
||||
|
@ -118,7 +115,7 @@ Node[] constructInt()
|
|||
|
||||
Node[] constructMap()
|
||||
{
|
||||
return [node([pair("Block style",
|
||||
return [Node([pair("Block style",
|
||||
[pair("Clark", "Evans"),
|
||||
pair("Brian", "Ingerson"),
|
||||
pair("Oren", "Ben-Kiki")]),
|
||||
|
@ -130,34 +127,34 @@ Node[] constructMap()
|
|||
|
||||
Node[] constructMerge()
|
||||
{
|
||||
return [node([node([pair("x", 1L), pair("y", 2L)]),
|
||||
node([pair("x", 0L), pair("y", 2L)]),
|
||||
node([pair("r", 10L)]),
|
||||
node([pair("r", 1L)]),
|
||||
node([pair("x", 1L), pair("y", 2L), pair("r", 10L), pair("label", "center/big")]),
|
||||
node([pair("r", 10L), pair("label", "center/big"), pair("x", 1L), pair("y", 2L)]),
|
||||
node([pair("label", "center/big"), pair("x", 1L), pair("y", 2L), pair("r", 10L)]),
|
||||
node([pair("x", 1L), pair("label", "center/big"), pair("r", 10L), pair("y", 2L)])])];
|
||||
return [Node([Node([pair("x", 1L), pair("y", 2L)]),
|
||||
Node([pair("x", 0L), pair("y", 2L)]),
|
||||
Node([pair("r", 10L)]),
|
||||
Node([pair("r", 1L)]),
|
||||
Node([pair("x", 1L), pair("y", 2L), pair("r", 10L), pair("label", "center/big")]),
|
||||
Node([pair("r", 10L), pair("label", "center/big"), pair("x", 1L), pair("y", 2L)]),
|
||||
Node([pair("label", "center/big"), pair("x", 1L), pair("y", 2L), pair("r", 10L)]),
|
||||
Node([pair("x", 1L), pair("label", "center/big"), pair("r", 10L), pair("y", 2L)])])];
|
||||
}
|
||||
|
||||
Node[] constructNull()
|
||||
{
|
||||
return [node(YAMLNull()),
|
||||
node([pair("empty", YAMLNull()),
|
||||
return [Node(YAMLNull()),
|
||||
Node([pair("empty", YAMLNull()),
|
||||
pair("canonical", YAMLNull()),
|
||||
pair("english", YAMLNull()),
|
||||
pair(YAMLNull(), "null key")]),
|
||||
node([pair("sparse",
|
||||
[node(YAMLNull()),
|
||||
node("2nd entry"),
|
||||
node(YAMLNull()),
|
||||
node("4th entry"),
|
||||
node(YAMLNull())])])];
|
||||
Node([pair("sparse",
|
||||
[Node(YAMLNull()),
|
||||
Node("2nd entry"),
|
||||
Node(YAMLNull()),
|
||||
Node("4th entry"),
|
||||
Node(YAMLNull())])])];
|
||||
}
|
||||
|
||||
Node[] constructOMap()
|
||||
{
|
||||
return [node([pair("Bestiary",
|
||||
return [Node([pair("Bestiary",
|
||||
[pair("aardvark", "African pig-like ant eater. Ugly."),
|
||||
pair("anteater", "South-American ant eater. Two species."),
|
||||
pair("anaconda", "South-American constrictor snake. Scaly.")]),
|
||||
|
@ -168,54 +165,54 @@ Node[] constructOMap()
|
|||
|
||||
Node[] constructPairs()
|
||||
{
|
||||
return [node([pair("Block tasks",
|
||||
[pair("meeting", "with team."),
|
||||
pair("meeting", "with boss."),
|
||||
pair("break", "lunch."),
|
||||
pair("meeting", "with client.")]),
|
||||
return [Node([pair("Block tasks",
|
||||
Node([pair("meeting", "with team."),
|
||||
pair("meeting", "with boss."),
|
||||
pair("break", "lunch."),
|
||||
pair("meeting", "with client.")], "tag:yaml.org,2002:pairs")),
|
||||
pair("Flow tasks",
|
||||
[pair("meeting", "with team"),
|
||||
pair("meeting", "with boss")])])];
|
||||
Node([pair("meeting", "with team"),
|
||||
pair("meeting", "with boss")], "tag:yaml.org,2002:pairs"))])];
|
||||
}
|
||||
|
||||
Node[] constructSeq()
|
||||
{
|
||||
return [node([pair("Block style",
|
||||
[node("Mercury"), node("Venus"), node("Earth"), node("Mars"),
|
||||
node("Jupiter"), node("Saturn"), node("Uranus"), node("Neptune"),
|
||||
node("Pluto")]),
|
||||
return [Node([pair("Block style",
|
||||
[Node("Mercury"), Node("Venus"), Node("Earth"), Node("Mars"),
|
||||
Node("Jupiter"), Node("Saturn"), Node("Uranus"), Node("Neptune"),
|
||||
Node("Pluto")]),
|
||||
pair("Flow style",
|
||||
[node("Mercury"), node("Venus"), node("Earth"), node("Mars"),
|
||||
node("Jupiter"), node("Saturn"), node("Uranus"), node("Neptune"),
|
||||
node("Pluto")])])];
|
||||
[Node("Mercury"), Node("Venus"), Node("Earth"), Node("Mars"),
|
||||
Node("Jupiter"), Node("Saturn"), Node("Uranus"), Node("Neptune"),
|
||||
Node("Pluto")])])];
|
||||
}
|
||||
|
||||
Node[] constructSet()
|
||||
{
|
||||
return [node([pair("baseball players",
|
||||
[node("Mark McGwire"), node("Sammy Sosa"), node("Ken Griffey")]),
|
||||
return [Node([pair("baseball players",
|
||||
[Node("Mark McGwire"), Node("Sammy Sosa"), Node("Ken Griffey")]),
|
||||
pair("baseball teams",
|
||||
[node("Boston Red Sox"), node("Detroit Tigers"), node("New York Yankees")])])];
|
||||
[Node("Boston Red Sox"), Node("Detroit Tigers"), Node("New York Yankees")])])];
|
||||
}
|
||||
|
||||
Node[] constructStrASCII()
|
||||
{
|
||||
return [node("ascii string")];
|
||||
return [Node("ascii string")];
|
||||
}
|
||||
|
||||
Node[] constructStr()
|
||||
{
|
||||
return [node([pair("string", "abcd")])];
|
||||
return [Node([pair("string", "abcd")])];
|
||||
}
|
||||
|
||||
Node[] constructStrUTF8()
|
||||
{
|
||||
return [node("\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430")];
|
||||
return [Node("\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430")];
|
||||
}
|
||||
|
||||
Node[] constructTimestamp()
|
||||
{
|
||||
return [node([pair("canonical", SysTime(DateTime(2001, 12, 15, 2, 59, 43), FracSec.from!"hnsecs"(1000000), UTC())),
|
||||
return [Node([pair("canonical", SysTime(DateTime(2001, 12, 15, 2, 59, 43), FracSec.from!"hnsecs"(1000000), UTC())),
|
||||
pair("valid iso8601", SysTime(DateTime(2001, 12, 15, 2, 59, 43), FracSec.from!"hnsecs"(1000000), UTC())),
|
||||
pair("space separated", SysTime(DateTime(2001, 12, 15, 2, 59, 43), FracSec.from!"hnsecs"(1000000), UTC())),
|
||||
pair("no time zone (Z)", SysTime(DateTime(2001, 12, 15, 2, 59, 43), FracSec.from!"hnsecs"(1000000), UTC())),
|
||||
|
@ -224,16 +221,16 @@ Node[] constructTimestamp()
|
|||
|
||||
Node[] constructValue()
|
||||
{
|
||||
return[node([pair("link with",
|
||||
[node("library1.dll"), node("library2.dll")])]),
|
||||
node([pair("link with",
|
||||
[node([pair("=", "library1.dll"), pair("version", cast(real)1.2)]),
|
||||
node([pair("=", "library2.dll"), pair("version", cast(real)2.3)])])])];
|
||||
return[Node([pair("link with",
|
||||
[Node("library1.dll"), Node("library2.dll")])]),
|
||||
Node([pair("link with",
|
||||
[Node([pair("=", "library1.dll"), pair("version", cast(real)1.2)]),
|
||||
Node([pair("=", "library2.dll"), pair("version", cast(real)2.3)])])])];
|
||||
}
|
||||
|
||||
Node[] duplicateMergeKey()
|
||||
{
|
||||
return [node([pair("foo", "bar"),
|
||||
return [Node([pair("foo", "bar"),
|
||||
pair("x", 1L),
|
||||
pair("y", 2L),
|
||||
pair("z", 3L),
|
||||
|
@ -242,7 +239,7 @@ Node[] duplicateMergeKey()
|
|||
|
||||
Node[] floatRepresenterBug()
|
||||
{
|
||||
return [node([pair(cast(real)1.0, 1L),
|
||||
return [Node([pair(cast(real)1.0, 1L),
|
||||
pair(real.infinity, 10L),
|
||||
pair(-real.infinity, -10L),
|
||||
pair(real.nan, 100L)])];
|
||||
|
@ -250,58 +247,58 @@ Node[] floatRepresenterBug()
|
|||
|
||||
Node[] invalidSingleQuoteBug()
|
||||
{
|
||||
return [node([node("foo \'bar\'"), node("foo\n\'bar\'")])];
|
||||
return [Node([Node("foo \'bar\'"), Node("foo\n\'bar\'")])];
|
||||
}
|
||||
|
||||
Node[] moreFloats()
|
||||
{
|
||||
return [node([node(cast(real)0.0),
|
||||
node(cast(real)1.0),
|
||||
node(cast(real)-1.0),
|
||||
node(real.infinity),
|
||||
node(-real.infinity),
|
||||
node(real.nan),
|
||||
node(real.nan)])];
|
||||
return [Node([Node(cast(real)0.0),
|
||||
Node(cast(real)1.0),
|
||||
Node(cast(real)-1.0),
|
||||
Node(real.infinity),
|
||||
Node(-real.infinity),
|
||||
Node(real.nan),
|
||||
Node(real.nan)])];
|
||||
}
|
||||
|
||||
Node[] negativeFloatBug()
|
||||
{
|
||||
return [node(cast(real)-1.0)];
|
||||
return [Node(cast(real)-1.0)];
|
||||
}
|
||||
|
||||
Node[] singleDotFloatBug()
|
||||
{
|
||||
return [node(".")];
|
||||
return [Node(".")];
|
||||
}
|
||||
|
||||
Node[] timestampBugs()
|
||||
{
|
||||
return [node([node(SysTime(DateTime(2001, 12, 15, 3, 29, 43), FracSec.from!"hnsecs"(1000000), UTC())),
|
||||
node(SysTime(DateTime(2001, 12, 14, 16, 29, 43), FracSec.from!"hnsecs"(1000000), UTC())),
|
||||
node(SysTime(DateTime(2001, 12, 14, 21, 59, 43), FracSec.from!"hnsecs"(10100), UTC())),
|
||||
node(SysTime(DateTime(2001, 12, 14, 21, 59, 43), new SimpleTimeZone(60))),
|
||||
node(SysTime(DateTime(2001, 12, 14, 21, 59, 43), new SimpleTimeZone(-90))),
|
||||
node(SysTime(DateTime(2005, 7, 8, 17, 35, 4), FracSec.from!"hnsecs"(5176000), UTC()))])];
|
||||
return [Node([Node(SysTime(DateTime(2001, 12, 15, 3, 29, 43), FracSec.from!"hnsecs"(1000000), UTC())),
|
||||
Node(SysTime(DateTime(2001, 12, 14, 16, 29, 43), FracSec.from!"hnsecs"(1000000), UTC())),
|
||||
Node(SysTime(DateTime(2001, 12, 14, 21, 59, 43), FracSec.from!"hnsecs"(10100), UTC())),
|
||||
Node(SysTime(DateTime(2001, 12, 14, 21, 59, 43), new SimpleTimeZone(60))),
|
||||
Node(SysTime(DateTime(2001, 12, 14, 21, 59, 43), new SimpleTimeZone(-90))),
|
||||
Node(SysTime(DateTime(2005, 7, 8, 17, 35, 4), FracSec.from!"hnsecs"(5176000), UTC()))])];
|
||||
}
|
||||
|
||||
Node[] utf16be()
|
||||
{
|
||||
return [node("UTF-16-BE")];
|
||||
return [Node("UTF-16-BE")];
|
||||
}
|
||||
|
||||
Node[] utf16le()
|
||||
{
|
||||
return [node("UTF-16-LE")];
|
||||
return [Node("UTF-16-LE")];
|
||||
}
|
||||
|
||||
Node[] utf8()
|
||||
{
|
||||
return [node("UTF-8")];
|
||||
return [Node("UTF-8")];
|
||||
}
|
||||
|
||||
Node[] utf8implicit()
|
||||
{
|
||||
return [node("implicit UTF-8")];
|
||||
return [Node("implicit UTF-8")];
|
||||
}
|
||||
|
||||
///Testing custom YAML class type.
|
||||
|
@ -322,6 +319,11 @@ class TestClass
|
|||
auto t = cast(TestClass)rhs;
|
||||
return x == t.x && y == t.y && z == t.z;
|
||||
}
|
||||
|
||||
override string toString()
|
||||
{
|
||||
return format("TestClass(", x, ", ", y, ", ", z, ")");
|
||||
}
|
||||
}
|
||||
|
||||
///Testing custom YAML struct type.
|
||||
|
@ -352,6 +354,17 @@ TestClass constructClass(Mark start, Mark end, Node.Pair[] pairs)
|
|||
|
||||
return new TestClass(x, y, z);
|
||||
}
|
||||
|
||||
Node representClass(ref Node node, Representer representer)
|
||||
{
|
||||
auto value = node.get!TestClass;
|
||||
auto pairs = [Node.Pair("x", value.x),
|
||||
Node.Pair("y", value.y),
|
||||
Node.Pair("z", value.z)];
|
||||
auto result = representer.representMapping("!tag1", pairs);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
///Constructor function for TestStruct.
|
||||
TestStruct constructStruct(Mark start, Mark end, string value)
|
||||
|
@ -359,6 +372,14 @@ TestStruct constructStruct(Mark start, Mark end, string value)
|
|||
return TestStruct(to!int(value));
|
||||
}
|
||||
|
||||
///Representer function for TestStruct.
|
||||
Node representStruct(ref Node node, Representer representer)
|
||||
{
|
||||
string[] keys, values;
|
||||
auto value = node.get!TestStruct;
|
||||
return representer.representScalar("!tag2", to!string(value.value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor unittest.
|
||||
*
|
||||
|
@ -369,7 +390,7 @@ TestStruct constructStruct(Mark start, Mark end, string value)
|
|||
*/
|
||||
void testConstructor(bool verbose, string dataFilename, string codeDummy)
|
||||
{
|
||||
string base = dataFilename.basename;
|
||||
string base = dataFilename.baseName.stripExtension;
|
||||
enforce((base in expected) !is null,
|
||||
new Exception("Unimplemented constructor test: " ~ base));
|
||||
|
||||
|
@ -380,16 +401,18 @@ void testConstructor(bool verbose, string dataFilename, string codeDummy)
|
|||
auto resolver = new Resolver;
|
||||
auto loader = Loader(dataFilename, constructor, resolver);
|
||||
|
||||
Node[] exp = expected[base];
|
||||
|
||||
//Compare with expected results document by document.
|
||||
size_t i = 0;
|
||||
foreach(node; loader)
|
||||
{
|
||||
if(!node.equals!(Node, false)(expected[base][i]))
|
||||
if(!node.equals!(Node, false)(exp[i]))
|
||||
{
|
||||
if(verbose)
|
||||
{
|
||||
writeln("Expected value:");
|
||||
writeln(expected[base][i].debugString);
|
||||
writeln(exp[i].debugString);
|
||||
writeln("\n");
|
||||
writeln("Actual value:");
|
||||
writeln(node.debugString);
|
||||
|
@ -398,7 +421,7 @@ void testConstructor(bool verbose, string dataFilename, string codeDummy)
|
|||
}
|
||||
++i;
|
||||
}
|
||||
assert(i == expected[base].length);
|
||||
assert(i == exp.length);
|
||||
}
|
||||
|
||||
|
||||
|
|
196
test/src/emitter.d
Normal file
196
test/src/emitter.d
Normal file
|
@ -0,0 +1,196 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testemitter;
|
||||
|
||||
|
||||
import std.algorithm;
|
||||
import std.file;
|
||||
import std.range;
|
||||
|
||||
import dyaml.dumper;
|
||||
import dyaml.event;
|
||||
import dyaml.testcommon;
|
||||
import dyaml.token;
|
||||
|
||||
|
||||
/**
|
||||
* Determine if events in events1 are equivalent to events in events2.
|
||||
*
|
||||
* Params: events1 = First event array to compare.
|
||||
* events2 = Second event array to compare.
|
||||
*
|
||||
* Returns: true if the events are equivalent, false otherwise.
|
||||
*/
|
||||
bool compareEvents(Event[] events1, Event[] events2)
|
||||
{
|
||||
if(events1.length != events2.length){return false;}
|
||||
|
||||
for(uint e = 0; e < events1.length; ++e)
|
||||
{
|
||||
auto e1 = events1[e];
|
||||
auto e2 = events2[e];
|
||||
|
||||
//Different event types.
|
||||
if(e1.id != e2.id){return false;}
|
||||
//Different anchor (if applicable).
|
||||
if([EventID.SequenceStart,
|
||||
EventID.MappingStart,
|
||||
EventID.Alias,
|
||||
EventID.Scalar].canFind(e1.id)
|
||||
&& e1.anchor != e2.anchor)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
//Different collection tag (if applicable).
|
||||
if([EventID.SequenceStart, EventID.MappingStart].canFind(e1.id) && e1.tag != e2.tag)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if(e1.id == EventID.Scalar)
|
||||
{
|
||||
//Different scalar tag (if applicable).
|
||||
if(![e1.implicit, e1.implicit_2, e2.implicit, e2.implicit_2].canFind(true)
|
||||
&& e1.tag != e2.tag)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
//Different scalar value.
|
||||
if(e1.value != e2.value)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test emitter by getting events from parsing a file, emitting them, parsing
|
||||
* the emitted result and comparing events from parsing the emitted result with
|
||||
* originally parsed events.
|
||||
*
|
||||
* Params: verbose = Print verbose output?
|
||||
* dataFilename = YAML file to parse.
|
||||
* canonicalFilename = Canonical YAML file used as dummy to determine
|
||||
* which data files to load.
|
||||
*/
|
||||
void testEmitterOnData(bool verbose, string dataFilename, string canonicalFilename)
|
||||
{
|
||||
//Must exist due to Anchor, Tags reference counts.
|
||||
auto loader = Loader(dataFilename);
|
||||
auto events = loader.parse();
|
||||
auto emitStream = new MemoryStream;
|
||||
Dumper(emitStream).emit(events);
|
||||
|
||||
if(verbose)
|
||||
{
|
||||
writeln(dataFilename);
|
||||
writeln("ORIGINAL:\n", readText(dataFilename));
|
||||
writeln("OUTPUT:\n", cast(string)emitStream.data);
|
||||
}
|
||||
auto loadStream = new MemoryStream(emitStream.data);
|
||||
auto newEvents = Loader(loadStream, "DUMMY", new Constructor, new Resolver).parse();
|
||||
assert(compareEvents(events, newEvents));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test emitter by getting events from parsing a canonical YAML file, emitting
|
||||
* them both in canonical and normal format, parsing the emitted results and
|
||||
* comparing events from parsing the emitted result with originally parsed events.
|
||||
*
|
||||
* Params: verbose = Print verbose output?
|
||||
* canonicalFilename = Canonical YAML file to parse.
|
||||
*/
|
||||
void testEmitterOnCanonical(bool verbose, string canonicalFilename)
|
||||
{
|
||||
//Must exist due to Anchor, Tags reference counts.
|
||||
auto loader = Loader(canonicalFilename);
|
||||
auto events = loader.parse();
|
||||
foreach(canonical; [false, true])
|
||||
{
|
||||
auto emitStream = new MemoryStream;
|
||||
auto dumper = Dumper(emitStream);
|
||||
dumper.canonical = canonical;
|
||||
dumper.emit(events);
|
||||
if(verbose)
|
||||
{
|
||||
writeln("OUTPUT (canonical=", canonical, "):\n",
|
||||
cast(string)emitStream.data);
|
||||
}
|
||||
auto loadStream = new MemoryStream(emitStream.data);
|
||||
auto newEvents = Loader(loadStream, "DUMMY", new Constructor, new Resolver).parse();
|
||||
assert(compareEvents(events, newEvents));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test emitter by getting events from parsing a file, emitting them with all
|
||||
* possible scalar and collection styles, parsing the emitted results and
|
||||
* comparing events from parsing the emitted result with originally parsed events.
|
||||
*
|
||||
* Params: verbose = Print verbose output?
|
||||
* dataFilename = YAML file to parse.
|
||||
* canonicalFilename = Canonical YAML file used as dummy to determine
|
||||
* which data files to load.
|
||||
*/
|
||||
void testEmitterStyles(bool verbose, string dataFilename, string canonicalFilename)
|
||||
{
|
||||
foreach(filename; [dataFilename, canonicalFilename])
|
||||
{
|
||||
//must exist due to Anchor, Tags reference counts
|
||||
auto loader = Loader(canonicalFilename);
|
||||
auto events = loader.parse();
|
||||
foreach(flowStyle; [CollectionStyle.Block, CollectionStyle.Flow])
|
||||
{
|
||||
foreach(style; [ScalarStyle.Literal, ScalarStyle.Folded,
|
||||
ScalarStyle.DoubleQuoted, ScalarStyle.SingleQuoted,
|
||||
ScalarStyle.Plain])
|
||||
{
|
||||
Event[] styledEvents;
|
||||
foreach(event; events)
|
||||
{
|
||||
if(event.id == EventID.Scalar)
|
||||
{
|
||||
event = scalarEvent(Mark(), Mark(), event.anchor, event.tag,
|
||||
[event.implicit, event.implicit_2],
|
||||
event.value, style);
|
||||
}
|
||||
else if(event.id == EventID.SequenceStart)
|
||||
{
|
||||
event = sequenceStartEvent(Mark(), Mark(), event.anchor,
|
||||
event.tag, event.implicit, flowStyle);
|
||||
}
|
||||
else if(event.id == EventID.MappingStart)
|
||||
{
|
||||
event = mappingStartEvent(Mark(), Mark(), event.anchor,
|
||||
event.tag, event.implicit, flowStyle);
|
||||
}
|
||||
styledEvents ~= event;
|
||||
}
|
||||
auto emitStream = new MemoryStream;
|
||||
Dumper(emitStream).emit(styledEvents);
|
||||
if(verbose)
|
||||
{
|
||||
writeln("OUTPUT (", filename, ", ", to!string(flowStyle), ", ",
|
||||
to!string(style), ")");
|
||||
writeln(emitStream.data);
|
||||
}
|
||||
auto loadStream = new MemoryStream(emitStream.data);
|
||||
auto newEvents = Loader(loadStream, "DUMMY", new Constructor, new Resolver).parse();
|
||||
assert(compareEvents(events, newEvents));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Emitter unittest");
|
||||
run("testEmitterOnData", &testEmitterOnData, ["data", "canonical"]);
|
||||
run("testEmitterOnCanonical", &testEmitterOnCanonical, ["canonical"]);
|
||||
run("testEmitterStyles", &testEmitterStyles, ["data", "canonical"]);
|
||||
}
|
|
@ -27,8 +27,8 @@ wchar bom16(bool wrong = false) pure
|
|||
{
|
||||
wchar little = *(cast(wchar*)ByteOrderMarks[BOM.UTF16LE]);
|
||||
wchar big = *(cast(wchar*)ByteOrderMarks[BOM.UTF16BE]);
|
||||
if(!wrong){return endian == Endian.LittleEndian ? little : big;}
|
||||
return endian == Endian.LittleEndian ? big : little;
|
||||
if(!wrong){return endian == Endian.littleEndian ? little : big;}
|
||||
return endian == Endian.littleEndian ? big : little;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -42,8 +42,8 @@ dchar bom32(bool wrong = false) pure
|
|||
{
|
||||
dchar little = *(cast(dchar*)ByteOrderMarks[BOM.UTF32LE]);
|
||||
dchar big = *(cast(dchar*)ByteOrderMarks[BOM.UTF32BE]);
|
||||
if(!wrong){return endian == Endian.LittleEndian ? little : big;}
|
||||
return endian == Endian.LittleEndian ? big : little;
|
||||
if(!wrong){return endian == Endian.littleEndian ? little : big;}
|
||||
return endian == Endian.littleEndian ? big : little;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
80
test/src/representer.d
Normal file
80
test/src/representer.d
Normal file
|
@ -0,0 +1,80 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testrepresenter;
|
||||
|
||||
|
||||
import std.path;
|
||||
import std.exception;
|
||||
|
||||
import dyaml.testcommon;
|
||||
import dyaml.testconstructor;
|
||||
|
||||
|
||||
/**
|
||||
* Representer unittest.
|
||||
*
|
||||
* Params: verbose = Print verbose output?
|
||||
* codeFilename = File name to determine test case from.
|
||||
* Nothing is read from this file, it only exists
|
||||
* to specify that we need a matching unittest.
|
||||
*/
|
||||
void testRepresenterTypes(bool verbose, string codeFilename)
|
||||
{
|
||||
string baseName = codeFilename.baseName.stripExtension;
|
||||
enforce((baseName in dyaml.testconstructor.expected) !is null,
|
||||
new Exception("Unimplemented representer test: " ~ baseName));
|
||||
|
||||
Node[] expectedNodes = expected[baseName];
|
||||
foreach(encoding; [Encoding.UTF_8, Encoding.UTF_16, Encoding.UTF_32])
|
||||
{
|
||||
string output;
|
||||
Node[] readNodes;
|
||||
|
||||
scope(failure)
|
||||
{
|
||||
if(verbose)
|
||||
{
|
||||
writeln("Expected nodes:");
|
||||
foreach(ref n; expectedNodes){writeln(n.debugString, "\n---\n");}
|
||||
writeln("Read nodes:");
|
||||
foreach(ref n; readNodes){writeln(n.debugString, "\n---\n");}
|
||||
writeln("OUTPUT:\n", output);
|
||||
}
|
||||
}
|
||||
|
||||
auto emitStream = new MemoryStream;
|
||||
auto representer = new Representer;
|
||||
representer.addRepresenter!TestClass(&representClass);
|
||||
representer.addRepresenter!TestStruct(&representStruct);
|
||||
auto dumper = Dumper(emitStream);
|
||||
dumper.representer = representer;
|
||||
dumper.encoding = encoding;
|
||||
dumper.dump(expectedNodes);
|
||||
|
||||
output = cast(string)emitStream.data;
|
||||
auto loadStream = new MemoryStream(emitStream.data);
|
||||
auto constructor = new Constructor;
|
||||
constructor.addConstructor("!tag1", &constructClass);
|
||||
constructor.addConstructor("!tag2", &constructStruct);
|
||||
|
||||
auto resolver = new Resolver;
|
||||
auto loader = Loader(loadStream, "DUMMY", constructor, resolver);
|
||||
foreach(node; loader){readNodes ~= node;}
|
||||
|
||||
assert(expectedNodes.length == readNodes.length);
|
||||
foreach(n; 0 .. expectedNodes.length)
|
||||
{
|
||||
assert(expectedNodes[n].equals!(Node, false)(readNodes[n]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Representer unittest");
|
||||
run("testRepresenterTypes", &testRepresenterTypes, ["code"]);
|
||||
}
|
48
test/src/resolver.d
Normal file
48
test/src/resolver.d
Normal file
|
@ -0,0 +1,48 @@
|
|||
|
||||
// Copyright Ferdinand Majerech 2011.
|
||||
// Distributed under the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||
// http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
module dyaml.testresolver;
|
||||
|
||||
|
||||
import std.file;
|
||||
import std.string;
|
||||
|
||||
import dyaml.testcommon;
|
||||
|
||||
|
||||
/**
|
||||
* Implicit tag resolution unittest.
|
||||
*
|
||||
* Params: verbose = Print verbose output?
|
||||
* dataFilename = TODO
|
||||
* detectFilename = TODO
|
||||
*/
|
||||
void testImplicitResolver(bool verbose, string dataFilename, string detectFilename)
|
||||
{
|
||||
string correctTag;
|
||||
Node node;
|
||||
|
||||
scope(exit)
|
||||
{
|
||||
if(verbose)
|
||||
{
|
||||
writeln("Correct tag: ", correctTag);
|
||||
writeln("Node: ", node.debugString);
|
||||
assert(node.isSequence);
|
||||
assert(node.tag.get == correctTag);
|
||||
}
|
||||
}
|
||||
|
||||
correctTag = readText(dataFilename).strip();
|
||||
node = yaml.load(dataFilename);
|
||||
}
|
||||
|
||||
|
||||
unittest
|
||||
{
|
||||
writeln("D:YAML Resolver unittest");
|
||||
run("testImplicitResolver", &testImplicitResolver, ["data", "detect"]);
|
||||
}
|
8
yaml.d
8
yaml.d
|
@ -6,8 +6,12 @@
|
|||
|
||||
module yaml;
|
||||
|
||||
public import dyaml.loader;
|
||||
public import dyaml.constructor;
|
||||
public import dyaml.dumper;
|
||||
public import dyaml.encoding;
|
||||
public import dyaml.exception;
|
||||
public import dyaml.linebreak;
|
||||
public import dyaml.loader;
|
||||
public import dyaml.representer;
|
||||
public import dyaml.resolver;
|
||||
public import dyaml.node;
|
||||
public import dyaml.exception;
|
||||
|
|
Loading…
Reference in a new issue