Implemented a simple Queue data structure, until Phobos has a
replacement. Tokens are not immutable anymore.
This commit is contained in:
parent
595302fbff
commit
fb814c66c2
|
@ -30,6 +30,7 @@ import dyaml.event;
|
||||||
import dyaml.exception;
|
import dyaml.exception;
|
||||||
import dyaml.flags;
|
import dyaml.flags;
|
||||||
import dyaml.linebreak;
|
import dyaml.linebreak;
|
||||||
|
import dyaml.queue;
|
||||||
import dyaml.tag;
|
import dyaml.tag;
|
||||||
import dyaml.token;
|
import dyaml.token;
|
||||||
|
|
||||||
|
@ -83,7 +84,7 @@ struct Emitter
|
||||||
|
|
||||||
//TODO Should be replaced by a queue or linked list once Phobos has anything usable.
|
//TODO Should be replaced by a queue or linked list once Phobos has anything usable.
|
||||||
///Event queue.
|
///Event queue.
|
||||||
Event[] events_;
|
Queue!Event events_;
|
||||||
///Event we're currently emitting.
|
///Event we're currently emitting.
|
||||||
Event event_;
|
Event event_;
|
||||||
|
|
||||||
|
@ -171,21 +172,25 @@ struct Emitter
|
||||||
{
|
{
|
||||||
stream_ = null;
|
stream_ = null;
|
||||||
clear(states_);
|
clear(states_);
|
||||||
|
states_ = null;
|
||||||
clear(events_);
|
clear(events_);
|
||||||
clear(indents_);
|
clear(indents_);
|
||||||
|
indents_ = null;
|
||||||
clear(tagDirectives_);
|
clear(tagDirectives_);
|
||||||
|
tagDirectives_ = null;
|
||||||
clear(preparedAnchor_);
|
clear(preparedAnchor_);
|
||||||
|
preparedAnchor_ = null;
|
||||||
clear(preparedTag_);
|
clear(preparedTag_);
|
||||||
|
preparedTag_ = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Emit an event. Throws EmitterException on error.
|
///Emit an event. Throws EmitterException on error.
|
||||||
void emit(immutable Event event)
|
void emit(immutable Event event)
|
||||||
{
|
{
|
||||||
events_ ~= event;
|
events_.push(event);
|
||||||
while(!needMoreEvents())
|
while(!needMoreEvents())
|
||||||
{
|
{
|
||||||
event_ = events_.front;
|
event_ = events_.pop();
|
||||||
events_.popFront();
|
|
||||||
state_();
|
state_();
|
||||||
clear(event_);
|
clear(event_);
|
||||||
}
|
}
|
||||||
|
@ -237,23 +242,31 @@ struct Emitter
|
||||||
}
|
}
|
||||||
|
|
||||||
///In some cases, we wait for a few next events before emitting.
|
///In some cases, we wait for a few next events before emitting.
|
||||||
bool needMoreEvents() const
|
bool needMoreEvents()
|
||||||
{
|
{
|
||||||
if(events_.length == 0){return true;}
|
if(events_.length == 0){return true;}
|
||||||
|
|
||||||
if(events_[0].id == EventID.DocumentStart){return needEvents(1);}
|
immutable event = events_.peek();
|
||||||
if(events_[0].id == EventID.SequenceStart){return needEvents(2);}
|
if(event.id == EventID.DocumentStart){return needEvents(1);}
|
||||||
if(events_[0].id == EventID.MappingStart) {return needEvents(3);}
|
if(event.id == EventID.SequenceStart){return needEvents(2);}
|
||||||
|
if(event.id == EventID.MappingStart) {return needEvents(3);}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Determines if we need specified number of more events.
|
///Determines if we need specified number of more events.
|
||||||
bool needEvents(in uint count) const
|
bool needEvents(in uint count)
|
||||||
{
|
{
|
||||||
int level = 0;
|
int level = 0;
|
||||||
foreach(ref event; events_[1 .. $])
|
|
||||||
|
//Rather ugly, but good enough for now.
|
||||||
|
//Couldn't be bothered writing a range as events_ should eventually
|
||||||
|
//become a Phobos queue/linked list.
|
||||||
|
events_.startIteration();
|
||||||
|
events_.next();
|
||||||
|
while(!events_.iterationOver())
|
||||||
{
|
{
|
||||||
|
immutable event = events_.next();
|
||||||
if([EventID.DocumentStart, EventID.SequenceStart,
|
if([EventID.DocumentStart, EventID.SequenceStart,
|
||||||
EventID.MappingStart].canFind(event.id))
|
EventID.MappingStart].canFind(event.id))
|
||||||
{
|
{
|
||||||
|
@ -645,14 +658,14 @@ struct Emitter
|
||||||
bool checkEmptySequence() const
|
bool checkEmptySequence() const
|
||||||
{
|
{
|
||||||
return event_.id == EventID.SequenceStart && events_.length > 0
|
return event_.id == EventID.SequenceStart && events_.length > 0
|
||||||
&& events_[0].id == EventID.SequenceEnd;
|
&& events_.peek().id == EventID.SequenceEnd;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Check if an empty mapping is next.
|
///Check if an empty mapping is next.
|
||||||
bool checkEmptyMapping() const
|
bool checkEmptyMapping() const
|
||||||
{
|
{
|
||||||
return event_.id == EventID.MappingStart && events_.length > 0
|
return event_.id == EventID.MappingStart && events_.length > 0
|
||||||
&& events_[0].id == EventID.MappingEnd;
|
&& events_.peek().id == EventID.MappingEnd;
|
||||||
}
|
}
|
||||||
|
|
||||||
///Check if an empty document is next.
|
///Check if an empty document is next.
|
||||||
|
@ -663,7 +676,7 @@ struct Emitter
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
immutable event = events_[0];
|
immutable event = events_.peek();
|
||||||
bool emptyScalar = event.id == EventID.Scalar && event.anchor.isNull() &&
|
bool emptyScalar = event.id == EventID.Scalar && event.anchor.isNull() &&
|
||||||
event.tag.isNull() && event.implicit && event.value == "";
|
event.tag.isNull() && event.implicit && event.value == "";
|
||||||
return emptyScalar;
|
return emptyScalar;
|
||||||
|
|
|
@ -252,7 +252,7 @@ final class Parser
|
||||||
///Parse stream start.
|
///Parse stream start.
|
||||||
Event parseStreamStart()
|
Event parseStreamStart()
|
||||||
{
|
{
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
state_ = &parseImplicitDocumentStart;
|
state_ = &parseImplicitDocumentStart;
|
||||||
return streamStartEvent(token.startMark, token.endMark, token.encoding);
|
return streamStartEvent(token.startMark, token.endMark, token.encoding);
|
||||||
}
|
}
|
||||||
|
@ -265,7 +265,7 @@ final class Parser
|
||||||
TokenID.StreamEnd))
|
TokenID.StreamEnd))
|
||||||
{
|
{
|
||||||
tagHandles_ = defaultTags_;
|
tagHandles_ = defaultTags_;
|
||||||
Token token = scanner_.peekToken();
|
immutable token = scanner_.peekToken();
|
||||||
|
|
||||||
states_ ~= &parseDocumentEnd;
|
states_ ~= &parseDocumentEnd;
|
||||||
state_ = &parseBlockNode;
|
state_ = &parseBlockNode;
|
||||||
|
@ -300,7 +300,7 @@ final class Parser
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
//Parse the end of the stream.
|
//Parse the end of the stream.
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
assert(states_.length == 0);
|
assert(states_.length == 0);
|
||||||
assert(marks_.length == 0);
|
assert(marks_.length == 0);
|
||||||
state_ = null;
|
state_ = null;
|
||||||
|
@ -342,7 +342,7 @@ final class Parser
|
||||||
//Process directives.
|
//Process directives.
|
||||||
while(scanner_.checkToken(TokenID.Directive))
|
while(scanner_.checkToken(TokenID.Directive))
|
||||||
{
|
{
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
//Name and value are separated by '\0'.
|
//Name and value are separated by '\0'.
|
||||||
auto parts = token.value.split("\0");
|
auto parts = token.value.split("\0");
|
||||||
const name = parts[0];
|
const name = parts[0];
|
||||||
|
@ -416,7 +416,7 @@ final class Parser
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.Alias))
|
if(scanner_.checkToken(TokenID.Alias))
|
||||||
{
|
{
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
return aliasEvent(token.startMark, token.endMark, Anchor(token.value));
|
return aliasEvent(token.startMark, token.endMark, Anchor(token.value));
|
||||||
}
|
}
|
||||||
|
@ -431,7 +431,7 @@ final class Parser
|
||||||
{
|
{
|
||||||
if(!scanner_.checkToken(id)){return false;}
|
if(!scanner_.checkToken(id)){return false;}
|
||||||
invalidMarks = false;
|
invalidMarks = false;
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
if(start){startMark = token.startMark;}
|
if(start){startMark = token.startMark;}
|
||||||
if(id == TokenID.Tag){tagMark = token.startMark;}
|
if(id == TokenID.Tag){tagMark = token.startMark;}
|
||||||
endMark = token.endMark;
|
endMark = token.endMark;
|
||||||
|
@ -462,7 +462,7 @@ final class Parser
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.Scalar))
|
if(scanner_.checkToken(TokenID.Scalar))
|
||||||
{
|
{
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
|
|
||||||
implicit = (token.style == ScalarStyle.Plain && tag is null) || tag == "!";
|
implicit = (token.style == ScalarStyle.Plain && tag is null) || tag == "!";
|
||||||
bool implicit_2 = (!implicit) && tag is null;
|
bool implicit_2 = (!implicit) && tag is null;
|
||||||
|
@ -515,7 +515,7 @@ final class Parser
|
||||||
[implicit, false] , "");
|
[implicit, false] , "");
|
||||||
}
|
}
|
||||||
|
|
||||||
Token token = scanner_.peekToken();
|
immutable token = scanner_.peekToken();
|
||||||
throw new Error("While parsing a " ~ (block ? "block" : "flow") ~ " node",
|
throw new Error("While parsing a " ~ (block ? "block" : "flow") ~ " node",
|
||||||
startMark, "expected the node content, but found: "
|
startMark, "expected the node content, but found: "
|
||||||
~ token.idString, token.startMark);
|
~ token.idString, token.startMark);
|
||||||
|
@ -571,7 +571,7 @@ final class Parser
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.BlockEntry))
|
if(scanner_.checkToken(TokenID.BlockEntry))
|
||||||
{
|
{
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.BlockEnd))
|
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.BlockEnd))
|
||||||
{
|
{
|
||||||
states_~= &parseBlockSequenceEntry!false;
|
states_~= &parseBlockSequenceEntry!false;
|
||||||
|
@ -584,7 +584,7 @@ final class Parser
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.BlockEnd))
|
if(!scanner_.checkToken(TokenID.BlockEnd))
|
||||||
{
|
{
|
||||||
Token token = scanner_.peekToken();
|
immutable token = scanner_.peekToken();
|
||||||
throw new Error("While parsing a block collection", marks_[$ - 1],
|
throw new Error("While parsing a block collection", marks_[$ - 1],
|
||||||
"expected block end, but found " ~ token.idString,
|
"expected block end, but found " ~ token.idString,
|
||||||
token.startMark);
|
token.startMark);
|
||||||
|
@ -592,7 +592,7 @@ final class Parser
|
||||||
|
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
popMark();
|
popMark();
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
return sequenceEndEvent(token.startMark, token.endMark);
|
return sequenceEndEvent(token.startMark, token.endMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -603,7 +603,7 @@ final class Parser
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.BlockEntry))
|
if(scanner_.checkToken(TokenID.BlockEntry))
|
||||||
{
|
{
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.Key,
|
if(!scanner_.checkToken(TokenID.BlockEntry, TokenID.Key,
|
||||||
TokenID.Value, TokenID.BlockEnd))
|
TokenID.Value, TokenID.BlockEnd))
|
||||||
|
@ -617,7 +617,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
|
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
Token token = scanner_.peekToken();
|
immutable token = scanner_.peekToken();
|
||||||
return sequenceEndEvent(token.startMark, token.endMark);
|
return sequenceEndEvent(token.startMark, token.endMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -635,7 +635,7 @@ final class Parser
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.Key))
|
if(scanner_.checkToken(TokenID.Key))
|
||||||
{
|
{
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
|
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
|
||||||
{
|
{
|
||||||
|
@ -649,7 +649,7 @@ final class Parser
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.BlockEnd))
|
if(!scanner_.checkToken(TokenID.BlockEnd))
|
||||||
{
|
{
|
||||||
Token token = scanner_.peekToken();
|
immutable token = scanner_.peekToken();
|
||||||
throw new Error("While parsing a block mapping", marks_[$ - 1],
|
throw new Error("While parsing a block mapping", marks_[$ - 1],
|
||||||
"expected block end, but found: " ~ token.idString,
|
"expected block end, but found: " ~ token.idString,
|
||||||
token.startMark);
|
token.startMark);
|
||||||
|
@ -657,7 +657,7 @@ final class Parser
|
||||||
|
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
popMark();
|
popMark();
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
return mappingEndEvent(token.startMark, token.endMark);
|
return mappingEndEvent(token.startMark, token.endMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -666,7 +666,7 @@ final class Parser
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.Value))
|
if(scanner_.checkToken(TokenID.Value))
|
||||||
{
|
{
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
|
if(!scanner_.checkToken(TokenID.Key, TokenID.Value, TokenID.BlockEnd))
|
||||||
{
|
{
|
||||||
|
@ -710,7 +710,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
Token token = scanner_.peekToken;
|
immutable token = scanner_.peekToken;
|
||||||
throw new Error("While parsing a flow sequence", marks_[$ - 1],
|
throw new Error("While parsing a flow sequence", marks_[$ - 1],
|
||||||
"expected ',' or ']', but got: " ~
|
"expected ',' or ']', but got: " ~
|
||||||
token.idString, token.startMark);
|
token.idString, token.startMark);
|
||||||
|
@ -719,7 +719,7 @@ final class Parser
|
||||||
|
|
||||||
if(scanner_.checkToken(TokenID.Key))
|
if(scanner_.checkToken(TokenID.Key))
|
||||||
{
|
{
|
||||||
Token token = scanner_.peekToken();
|
immutable token = scanner_.peekToken();
|
||||||
state_ = &parseFlowSequenceEntryMappingKey;
|
state_ = &parseFlowSequenceEntryMappingKey;
|
||||||
return mappingStartEvent(token.startMark, token.endMark,
|
return mappingStartEvent(token.startMark, token.endMark,
|
||||||
Anchor(), Tag(), true, CollectionStyle.Flow);
|
Anchor(), Tag(), true, CollectionStyle.Flow);
|
||||||
|
@ -731,7 +731,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
popMark();
|
popMark();
|
||||||
return sequenceEndEvent(token.startMark, token.endMark);
|
return sequenceEndEvent(token.startMark, token.endMark);
|
||||||
|
@ -740,7 +740,7 @@ final class Parser
|
||||||
///Parse a key in flow context.
|
///Parse a key in flow context.
|
||||||
Event parseFlowKey(in Event delegate() nextState)
|
Event parseFlowKey(in Event delegate() nextState)
|
||||||
{
|
{
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
|
|
||||||
if(!scanner_.checkToken(TokenID.Value, TokenID.FlowEntry,
|
if(!scanner_.checkToken(TokenID.Value, TokenID.FlowEntry,
|
||||||
TokenID.FlowSequenceEnd))
|
TokenID.FlowSequenceEnd))
|
||||||
|
@ -764,7 +764,7 @@ final class Parser
|
||||||
{
|
{
|
||||||
if(scanner_.checkToken(TokenID.Value))
|
if(scanner_.checkToken(TokenID.Value))
|
||||||
{
|
{
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
if(!scanner_.checkToken(TokenID.FlowEntry, checkId))
|
if(!scanner_.checkToken(TokenID.FlowEntry, checkId))
|
||||||
{
|
{
|
||||||
states_ ~= nextState;
|
states_ ~= nextState;
|
||||||
|
@ -790,7 +790,7 @@ final class Parser
|
||||||
Event parseFlowSequenceEntryMappingEnd()
|
Event parseFlowSequenceEntryMappingEnd()
|
||||||
{
|
{
|
||||||
state_ = &parseFlowSequenceEntry!false;
|
state_ = &parseFlowSequenceEntry!false;
|
||||||
Token token = scanner_.peekToken();
|
immutable token = scanner_.peekToken();
|
||||||
return mappingEndEvent(token.startMark, token.startMark);
|
return mappingEndEvent(token.startMark, token.startMark);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -817,7 +817,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
Token token = scanner_.peekToken;
|
immutable token = scanner_.peekToken;
|
||||||
throw new Error("While parsing a flow mapping", marks_[$ - 1],
|
throw new Error("While parsing a flow mapping", marks_[$ - 1],
|
||||||
"expected ',' or '}', but got: " ~
|
"expected ',' or '}', but got: " ~
|
||||||
token.idString, token.startMark);
|
token.idString, token.startMark);
|
||||||
|
@ -836,7 +836,7 @@ final class Parser
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Token token = scanner_.getToken();
|
immutable token = scanner_.getToken();
|
||||||
state_ = popState();
|
state_ = popState();
|
||||||
popMark();
|
popMark();
|
||||||
return mappingEndEvent(token.startMark, token.endMark);
|
return mappingEndEvent(token.startMark, token.endMark);
|
||||||
|
|
230
dyaml/queue.d
Normal file
230
dyaml/queue.d
Normal file
|
@ -0,0 +1,230 @@
|
||||||
|
|
||||||
|
// Copyright Ferdinand Majerech 2011.
|
||||||
|
// Distributed under the Boost Software License, Version 1.0.
|
||||||
|
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||||
|
// http://www.boost.org/LICENSE_1_0.txt)
|
||||||
|
|
||||||
|
module dyaml.queue;
|
||||||
|
|
||||||
|
|
||||||
|
///Queue collection.
|
||||||
|
import core.stdc.stdlib;
|
||||||
|
import core.memory;
|
||||||
|
|
||||||
|
import std.container;
|
||||||
|
|
||||||
|
|
||||||
|
package:
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple queue implemented as a singly linked list with a tail pointer.
|
||||||
|
*
|
||||||
|
* Needed in some D:YAML code that needs a queue-like structure without too
|
||||||
|
* much reallocation that goes with an array.
|
||||||
|
*
|
||||||
|
* This should be replaced once Phobos has a decent queue/linked list.
|
||||||
|
*
|
||||||
|
* Uses manual allocation through malloc/free.
|
||||||
|
*
|
||||||
|
* Also has some features uncommon for a queue, e.g. iteration.
|
||||||
|
* Couldn't bother with implementing a range, as this is used only as
|
||||||
|
* a placeholder until Phobos gets a decent replacement.
|
||||||
|
*/
|
||||||
|
struct Queue(T)
|
||||||
|
{
|
||||||
|
private:
|
||||||
|
///Linked list node containing one element and pointer to the next node.
|
||||||
|
struct Node
|
||||||
|
{
|
||||||
|
T payload_ = T.init;
|
||||||
|
Node* next_ = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
///Start of the linked list - first element added in time (end of the queue).
|
||||||
|
Node* first_ = null;
|
||||||
|
///Last element of the linked list - last element added in time (start of the queue).
|
||||||
|
Node* last_ = null;
|
||||||
|
///Cursor pointing to the current node in iteration.
|
||||||
|
Node* cursor_ = null;
|
||||||
|
///Length of the queue.
|
||||||
|
size_t length_ = 0;
|
||||||
|
|
||||||
|
public:
|
||||||
|
@disable void opAssign(ref Queue);
|
||||||
|
|
||||||
|
///Destroy the queue, deallocating all its elements.
|
||||||
|
~this()
|
||||||
|
{
|
||||||
|
while(!empty){pop();}
|
||||||
|
cursor_ = last_ = first_ = null;
|
||||||
|
length_ = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
///Start iterating over the queue.
|
||||||
|
void startIteration()
|
||||||
|
{
|
||||||
|
cursor_ = first_;
|
||||||
|
}
|
||||||
|
|
||||||
|
///Get next element in the queue.
|
||||||
|
ref const(T) next()
|
||||||
|
in
|
||||||
|
{
|
||||||
|
assert(!empty);
|
||||||
|
assert(cursor_ !is null);
|
||||||
|
}
|
||||||
|
body
|
||||||
|
{
|
||||||
|
const previous = cursor_;
|
||||||
|
cursor_ = cursor_.next_;
|
||||||
|
return previous.payload_;
|
||||||
|
}
|
||||||
|
|
||||||
|
///Are we done iterating?
|
||||||
|
bool iterationOver() const
|
||||||
|
{
|
||||||
|
return cursor_ is null;
|
||||||
|
}
|
||||||
|
|
||||||
|
///Push new item to the queue.
|
||||||
|
void push(in T item)
|
||||||
|
{
|
||||||
|
Node* newLast = allocate!Node(item, cast(Node*)null);
|
||||||
|
if(last_ !is null){last_.next_ = newLast;}
|
||||||
|
if(first_ is null){first_ = newLast;}
|
||||||
|
last_ = newLast;
|
||||||
|
++length_;
|
||||||
|
}
|
||||||
|
|
||||||
|
///Insert a new item putting it to specified index in the linked list.
|
||||||
|
void insert(in T item, in size_t idx)
|
||||||
|
in
|
||||||
|
{
|
||||||
|
assert(idx <= length_);
|
||||||
|
}
|
||||||
|
body
|
||||||
|
{
|
||||||
|
if(idx == 0)
|
||||||
|
{
|
||||||
|
//Add after the first element - so this will be the next to pop.
|
||||||
|
first_ = allocate!Node(item, first_);
|
||||||
|
++length_;
|
||||||
|
}
|
||||||
|
else if(idx == length_)
|
||||||
|
{
|
||||||
|
//Adding before last added element, so we can just push.
|
||||||
|
push(item);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
//Get the element before one we're inserting.
|
||||||
|
Node* current = first_;
|
||||||
|
foreach(i; 1 .. idx)
|
||||||
|
{
|
||||||
|
current = current.next_;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Insert a new node after current, and put current.next_ behind it.
|
||||||
|
current.next_ = allocate!Node(item, current.next_);
|
||||||
|
++length_;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
///Return the next element in the queue and remove it.
|
||||||
|
T pop()
|
||||||
|
in
|
||||||
|
{
|
||||||
|
assert(!empty, "Trying to pop an element from an empty queue");
|
||||||
|
}
|
||||||
|
body
|
||||||
|
{
|
||||||
|
T result = peek();
|
||||||
|
Node* temp = first_;
|
||||||
|
first_ = first_.next_;
|
||||||
|
free(temp);
|
||||||
|
if(--length_ == 0)
|
||||||
|
{
|
||||||
|
assert(first_ is null);
|
||||||
|
last_ = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
///Return the next element in the queue.
|
||||||
|
ref const(T) peek() const
|
||||||
|
in
|
||||||
|
{
|
||||||
|
assert(!empty, "Trying to peek at an element in an empty queue");
|
||||||
|
}
|
||||||
|
body
|
||||||
|
{
|
||||||
|
return first_.payload_;
|
||||||
|
}
|
||||||
|
|
||||||
|
///Is the queue empty?
|
||||||
|
@property bool empty() const
|
||||||
|
{
|
||||||
|
return first_ is null;
|
||||||
|
}
|
||||||
|
|
||||||
|
///Return number of elements in the queue.
|
||||||
|
@property size_t length() const
|
||||||
|
{
|
||||||
|
return length_;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
///Allocate a struct, passing arguments to its constructor or default initializer.
|
||||||
|
T* allocate(T, Args...)(Args args)
|
||||||
|
{
|
||||||
|
T* ptr = cast(T*)malloc(T.sizeof);
|
||||||
|
*ptr = T(args);
|
||||||
|
//The struct might contain references to GC-allocated memory, so tell the GC about it.
|
||||||
|
GC.addRange(cast(void*)ptr, T.sizeof);
|
||||||
|
return ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
///Deallocate struct pointed at by specified pointer.
|
||||||
|
void free(T)(T* ptr)
|
||||||
|
{
|
||||||
|
//GC doesn't need to care about any references in this struct anymore.
|
||||||
|
GC.removeRange(cast(void*)ptr);
|
||||||
|
clear(*ptr);
|
||||||
|
std.c.stdlib.free(ptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
unittest
|
||||||
|
{
|
||||||
|
auto queue = Queue!int();
|
||||||
|
assert(queue.empty);
|
||||||
|
foreach(i; 0 .. 65)
|
||||||
|
{
|
||||||
|
queue.push(5);
|
||||||
|
assert(queue.pop() == 5);
|
||||||
|
assert(queue.empty);
|
||||||
|
assert(queue.length_ == 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
int[] array = [1, -1, 2, -2, 3, -3, 4, -4, 5, -5];
|
||||||
|
foreach(i; array)
|
||||||
|
{
|
||||||
|
queue.push(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
array = 42 ~ array[0 .. 3] ~ 42 ~ array[3 .. $] ~ 42;
|
||||||
|
queue.insert(42, 3);
|
||||||
|
queue.insert(42, 0);
|
||||||
|
queue.insert(42, queue.length);
|
||||||
|
|
||||||
|
int[] array2;
|
||||||
|
while(!queue.empty)
|
||||||
|
{
|
||||||
|
array2 ~= queue.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(array == array2);
|
||||||
|
}
|
|
@ -23,6 +23,7 @@ import std.typecons;
|
||||||
import std.utf;
|
import std.utf;
|
||||||
|
|
||||||
import dyaml.exception;
|
import dyaml.exception;
|
||||||
|
import dyaml.queue;
|
||||||
import dyaml.reader;
|
import dyaml.reader;
|
||||||
import dyaml.token;
|
import dyaml.token;
|
||||||
|
|
||||||
|
@ -119,9 +120,9 @@ final class Scanner
|
||||||
///Past indentation levels. Used as a stack.
|
///Past indentation levels. Used as a stack.
|
||||||
int[] indents_;
|
int[] indents_;
|
||||||
|
|
||||||
//Should be replaced by a queue or linked list once Phobos has anything usable.
|
|
||||||
///Processed tokens not yet emitted. Used as a queue.
|
///Processed tokens not yet emitted. Used as a queue.
|
||||||
Token[] tokens_;
|
Queue!Token tokens_;
|
||||||
|
|
||||||
///Number of tokens emitted through the getToken method.
|
///Number of tokens emitted through the getToken method.
|
||||||
uint tokensTaken_;
|
uint tokensTaken_;
|
||||||
|
|
||||||
|
@ -152,7 +153,6 @@ final class Scanner
|
||||||
~this()
|
~this()
|
||||||
{
|
{
|
||||||
clear(tokens_);
|
clear(tokens_);
|
||||||
tokens_ = null;
|
|
||||||
clear(indents_);
|
clear(indents_);
|
||||||
indents_ = null;
|
indents_ = null;
|
||||||
clear(possibleSimpleKeys_);
|
clear(possibleSimpleKeys_);
|
||||||
|
@ -180,7 +180,7 @@ final class Scanner
|
||||||
if(ids.length == 0){return true;}
|
if(ids.length == 0){return true;}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
const nextId = tokens_.front.id;
|
const nextId = tokens_.peek().id;
|
||||||
foreach(id; ids)
|
foreach(id; ids)
|
||||||
{
|
{
|
||||||
if(nextId == id){return true;}
|
if(nextId == id){return true;}
|
||||||
|
@ -195,10 +195,10 @@ final class Scanner
|
||||||
*
|
*
|
||||||
* Must not be called if there are no tokens left.
|
* Must not be called if there are no tokens left.
|
||||||
*/
|
*/
|
||||||
ref Token peekToken()
|
ref const(Token) peekToken()
|
||||||
{
|
{
|
||||||
while(needMoreTokens){fetchToken();}
|
while(needMoreTokens){fetchToken();}
|
||||||
if(!tokens_.empty){return tokens_.front;}
|
if(!tokens_.empty){return tokens_.peek();}
|
||||||
assert(false, "No token left to peek");
|
assert(false, "No token left to peek");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -213,9 +213,7 @@ final class Scanner
|
||||||
if(!tokens_.empty)
|
if(!tokens_.empty)
|
||||||
{
|
{
|
||||||
++tokensTaken_;
|
++tokensTaken_;
|
||||||
Token result = tokens_.front;
|
return tokens_.pop();
|
||||||
tokens_.popFront();
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
assert(false, "No token left to get");
|
assert(false, "No token left to get");
|
||||||
}
|
}
|
||||||
|
@ -380,7 +378,7 @@ final class Scanner
|
||||||
{
|
{
|
||||||
indent_ = indents_.back;
|
indent_ = indents_.back;
|
||||||
indents_.popBack();
|
indents_.popBack();
|
||||||
tokens_ ~= blockEndToken(reader_.mark, reader_.mark);
|
tokens_.push(blockEndToken(reader_.mark, reader_.mark));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -403,7 +401,7 @@ final class Scanner
|
||||||
///Add STREAM-START token.
|
///Add STREAM-START token.
|
||||||
void fetchStreamStart()
|
void fetchStreamStart()
|
||||||
{
|
{
|
||||||
tokens_ ~= streamStartToken(reader_.mark, reader_.mark, reader_.encoding);
|
tokens_.push(streamStartToken(reader_.mark, reader_.mark, reader_.encoding));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add STREAM-END token.
|
///Add STREAM-END token.
|
||||||
|
@ -417,7 +415,7 @@ final class Scanner
|
||||||
SimpleKey[uint] empty;
|
SimpleKey[uint] empty;
|
||||||
possibleSimpleKeys_ = empty;
|
possibleSimpleKeys_ = empty;
|
||||||
|
|
||||||
tokens_ ~= streamEndToken(reader_.mark, reader_.mark);
|
tokens_.push(streamEndToken(reader_.mark, reader_.mark));
|
||||||
done_ = true;
|
done_ = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -430,7 +428,7 @@ final class Scanner
|
||||||
removePossibleSimpleKey();
|
removePossibleSimpleKey();
|
||||||
allowSimpleKey_ = false;
|
allowSimpleKey_ = false;
|
||||||
|
|
||||||
tokens_ ~= scanDirective();
|
tokens_.push(scanDirective());
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add DOCUMENT-START or DOCUMENT-END token.
|
///Add DOCUMENT-START or DOCUMENT-END token.
|
||||||
|
@ -445,7 +443,7 @@ final class Scanner
|
||||||
|
|
||||||
Mark startMark = reader_.mark;
|
Mark startMark = reader_.mark;
|
||||||
reader_.forward(3);
|
reader_.forward(3);
|
||||||
tokens_ ~= simpleToken!id(startMark, reader_.mark);
|
tokens_.push(simpleToken!id(startMark, reader_.mark));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Aliases to add DOCUMENT-START or DOCUMENT-END token.
|
///Aliases to add DOCUMENT-START or DOCUMENT-END token.
|
||||||
|
@ -463,7 +461,7 @@ final class Scanner
|
||||||
|
|
||||||
Mark startMark = reader_.mark;
|
Mark startMark = reader_.mark;
|
||||||
reader_.forward();
|
reader_.forward();
|
||||||
tokens_ ~= simpleToken!id(startMark, reader_.mark);
|
tokens_.push(simpleToken!id(startMark, reader_.mark));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
|
///Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
|
||||||
|
@ -481,7 +479,7 @@ final class Scanner
|
||||||
|
|
||||||
Mark startMark = reader_.mark;
|
Mark startMark = reader_.mark;
|
||||||
reader_.forward();
|
reader_.forward();
|
||||||
tokens_ ~= simpleToken!id(startMark, reader_.mark);
|
tokens_.push(simpleToken!id(startMark, reader_.mark));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token/
|
///Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token/
|
||||||
|
@ -498,7 +496,7 @@ final class Scanner
|
||||||
|
|
||||||
Mark startMark = reader_.mark;
|
Mark startMark = reader_.mark;
|
||||||
reader_.forward();
|
reader_.forward();
|
||||||
tokens_ ~= flowEntryToken(startMark, reader_.mark);
|
tokens_.push(flowEntryToken(startMark, reader_.mark));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -515,7 +513,7 @@ final class Scanner
|
||||||
|
|
||||||
if(addIndent(reader_.column))
|
if(addIndent(reader_.column))
|
||||||
{
|
{
|
||||||
tokens_ ~= simpleToken!id(reader_.mark, reader_.mark);
|
tokens_.push(simpleToken!id(reader_.mark, reader_.mark));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -534,7 +532,7 @@ final class Scanner
|
||||||
|
|
||||||
Mark startMark = reader_.mark;
|
Mark startMark = reader_.mark;
|
||||||
reader_.forward();
|
reader_.forward();
|
||||||
tokens_ ~= blockEntryToken(startMark, reader_.mark);
|
tokens_.push(blockEntryToken(startMark, reader_.mark));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add KEY token. Might add BLOCK-MAPPING-START in the process.
|
///Add KEY token. Might add BLOCK-MAPPING-START in the process.
|
||||||
|
@ -549,7 +547,7 @@ final class Scanner
|
||||||
|
|
||||||
Mark startMark = reader_.mark;
|
Mark startMark = reader_.mark;
|
||||||
reader_.forward();
|
reader_.forward();
|
||||||
tokens_ ~= keyToken(startMark, reader_.mark);
|
tokens_.push(keyToken(startMark, reader_.mark));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add VALUE token. Might add KEY and/or BLOCK-MAPPING-START in the process.
|
///Add VALUE token. Might add KEY and/or BLOCK-MAPPING-START in the process.
|
||||||
|
@ -567,14 +565,12 @@ final class Scanner
|
||||||
|
|
||||||
//Add KEY.
|
//Add KEY.
|
||||||
//Manually inserting since tokens are immutable (need linked list).
|
//Manually inserting since tokens are immutable (need linked list).
|
||||||
tokens_ = tokens_[0 .. idx] ~ keyToken(keyMark, keyMark) ~
|
tokens_.insert(keyToken(keyMark, keyMark), idx);
|
||||||
tokens_[idx .. tokens_.length];
|
|
||||||
|
|
||||||
//If this key starts a new block mapping, we need to add BLOCK-MAPPING-START.
|
//If this key starts a new block mapping, we need to add BLOCK-MAPPING-START.
|
||||||
if(flowLevel_ == 0 && addIndent(key.column))
|
if(flowLevel_ == 0 && addIndent(key.column))
|
||||||
{
|
{
|
||||||
tokens_ = tokens_[0 .. idx] ~ blockMappingStartToken(keyMark, keyMark) ~
|
tokens_.insert(blockMappingStartToken(keyMark, keyMark), idx);
|
||||||
tokens_[idx .. tokens_.length];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//There cannot be two simple keys in a row.
|
//There cannot be two simple keys in a row.
|
||||||
|
@ -591,7 +587,7 @@ final class Scanner
|
||||||
//BLOCK-MAPPING-START. It'll be detected as an error later by the parser.
|
//BLOCK-MAPPING-START. It'll be detected as an error later by the parser.
|
||||||
if(flowLevel_ == 0 && addIndent(reader_.column))
|
if(flowLevel_ == 0 && addIndent(reader_.column))
|
||||||
{
|
{
|
||||||
tokens_ ~= blockMappingStartToken(reader_.mark, reader_.mark);
|
tokens_.push(blockMappingStartToken(reader_.mark, reader_.mark));
|
||||||
}
|
}
|
||||||
|
|
||||||
//Reset possible simple key on the current level.
|
//Reset possible simple key on the current level.
|
||||||
|
@ -603,7 +599,7 @@ final class Scanner
|
||||||
//Add VALUE.
|
//Add VALUE.
|
||||||
Mark startMark = reader_.mark;
|
Mark startMark = reader_.mark;
|
||||||
reader_.forward();
|
reader_.forward();
|
||||||
tokens_ ~= valueToken(startMark, reader_.mark);
|
tokens_.push(valueToken(startMark, reader_.mark));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add ALIAS or ANCHOR token.
|
///Add ALIAS or ANCHOR token.
|
||||||
|
@ -615,7 +611,7 @@ final class Scanner
|
||||||
//No simple keys after ALIAS/ANCHOR.
|
//No simple keys after ALIAS/ANCHOR.
|
||||||
allowSimpleKey_ = false;
|
allowSimpleKey_ = false;
|
||||||
|
|
||||||
tokens_ ~= scanAnchor(id);
|
tokens_.push(scanAnchor(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Aliases to add ALIAS or ANCHOR token.
|
///Aliases to add ALIAS or ANCHOR token.
|
||||||
|
@ -630,7 +626,7 @@ final class Scanner
|
||||||
//No simple keys after TAG.
|
//No simple keys after TAG.
|
||||||
allowSimpleKey_ = false;
|
allowSimpleKey_ = false;
|
||||||
|
|
||||||
tokens_ ~= scanTag();
|
tokens_.push(scanTag());
|
||||||
}
|
}
|
||||||
|
|
||||||
///Add block SCALAR token.
|
///Add block SCALAR token.
|
||||||
|
@ -642,7 +638,7 @@ final class Scanner
|
||||||
//A simple key may follow a block scalar.
|
//A simple key may follow a block scalar.
|
||||||
allowSimpleKey_ = true;
|
allowSimpleKey_ = true;
|
||||||
|
|
||||||
tokens_ ~= scanBlockScalar(style);
|
tokens_.push(scanBlockScalar(style));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Aliases to add literal or folded block scalar.
|
///Aliases to add literal or folded block scalar.
|
||||||
|
@ -658,7 +654,7 @@ final class Scanner
|
||||||
allowSimpleKey_ = false;
|
allowSimpleKey_ = false;
|
||||||
|
|
||||||
//Scan and add SCALAR.
|
//Scan and add SCALAR.
|
||||||
tokens_ ~= scanFlowScalar(quotes);
|
tokens_.push(scanFlowScalar(quotes));
|
||||||
}
|
}
|
||||||
|
|
||||||
///Aliases to add single or double quoted block scalar.
|
///Aliases to add single or double quoted block scalar.
|
||||||
|
@ -675,7 +671,7 @@ final class Scanner
|
||||||
allowSimpleKey_ = false;
|
allowSimpleKey_ = false;
|
||||||
|
|
||||||
//Scan and add SCALAR. May change allowSimpleKey_
|
//Scan and add SCALAR. May change allowSimpleKey_
|
||||||
tokens_ ~= scanPlain();
|
tokens_.push(scanPlain());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -69,7 +69,7 @@ enum CollectionStyle : ubyte
|
||||||
*
|
*
|
||||||
* 32 bytes on 64-bit.
|
* 32 bytes on 64-bit.
|
||||||
*/
|
*/
|
||||||
immutable struct Token
|
struct Token
|
||||||
{
|
{
|
||||||
///Value of the token, if any.
|
///Value of the token, if any.
|
||||||
string value;
|
string value;
|
||||||
|
|
Loading…
Reference in a new issue