Initial commit.
The library is able to support simple TCP servers in the current state. The API is still mostly compatible with mainline vibe.d, but the driver systen has been replaced by the eventcore library and sockets/files/timers/... are now structs with automatic reference counting instead of GC collected classes. The stream interfaces have been removed for now.
This commit is contained in:
commit
7e2d1dd038
22 changed files with 9977 additions and 0 deletions
634
source/vibe/internal/array.d
Normal file
634
source/vibe/internal/array.d
Normal file
|
@ -0,0 +1,634 @@
|
|||
/**
|
||||
Utility functions for array processing
|
||||
|
||||
Copyright: © 2012 RejectedSoftware e.K.
|
||||
License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file.
|
||||
Authors: Sönke Ludwig
|
||||
*/
|
||||
module vibe.internal.array;
|
||||
|
||||
import vibe.internal.memory;
|
||||
|
||||
import std.algorithm;
|
||||
import std.range : isInputRange, isOutputRange;
|
||||
import std.traits;
|
||||
static import std.utf;
|
||||
|
||||
|
||||
void removeFromArray(T)(ref T[] array, T item)
|
||||
{
|
||||
foreach( i; 0 .. array.length )
|
||||
if( array[i] is item ){
|
||||
removeFromArrayIdx(array, i);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void removeFromArrayIdx(T)(ref T[] array, size_t idx)
|
||||
{
|
||||
foreach( j; idx+1 .. array.length)
|
||||
array[j-1] = array[j];
|
||||
array.length = array.length-1;
|
||||
}
|
||||
|
||||
enum AppenderResetMode {
|
||||
keepData,
|
||||
freeData,
|
||||
reuseData
|
||||
}
|
||||
|
||||
struct AllocAppender(ArrayType : E[], E) {
|
||||
alias ElemType = Unqual!E;
|
||||
|
||||
static assert(!hasIndirections!E && !hasElaborateDestructor!E);
|
||||
|
||||
private {
|
||||
ElemType[] m_data;
|
||||
ElemType[] m_remaining;
|
||||
Allocator m_alloc;
|
||||
bool m_allocatedBuffer = false;
|
||||
}
|
||||
|
||||
this(Allocator alloc, ElemType[] initial_buffer = null)
|
||||
{
|
||||
m_alloc = alloc;
|
||||
m_data = initial_buffer;
|
||||
m_remaining = initial_buffer;
|
||||
}
|
||||
|
||||
@disable this(this);
|
||||
|
||||
@property ArrayType data() { return cast(ArrayType)m_data[0 .. m_data.length - m_remaining.length]; }
|
||||
|
||||
void reset(AppenderResetMode reset_mode = AppenderResetMode.keepData)
|
||||
{
|
||||
if (reset_mode == AppenderResetMode.keepData) m_data = null;
|
||||
else if (reset_mode == AppenderResetMode.freeData) { if (m_allocatedBuffer) m_alloc.free(m_data); m_data = null; }
|
||||
m_remaining = m_data;
|
||||
}
|
||||
|
||||
/** Grows the capacity of the internal buffer so that it can hold a minumum amount of elements.
|
||||
|
||||
Params:
|
||||
amount = The minimum amount of elements that shall be appendable without
|
||||
triggering a re-allocation.
|
||||
|
||||
*/
|
||||
void reserve(size_t amount)
|
||||
{
|
||||
size_t nelems = m_data.length - m_remaining.length;
|
||||
if (!m_data.length) {
|
||||
m_data = cast(ElemType[])m_alloc.alloc(amount*E.sizeof);
|
||||
m_remaining = m_data;
|
||||
m_allocatedBuffer = true;
|
||||
}
|
||||
if (m_remaining.length < amount) {
|
||||
debug {
|
||||
import std.digest.crc;
|
||||
auto checksum = crc32Of(m_data[0 .. nelems]);
|
||||
}
|
||||
if (m_allocatedBuffer) m_data = cast(ElemType[])m_alloc.realloc(m_data, (nelems+amount)*E.sizeof);
|
||||
else {
|
||||
auto newdata = cast(ElemType[])m_alloc.alloc((nelems+amount)*E.sizeof);
|
||||
newdata[0 .. nelems] = m_data[0 .. nelems];
|
||||
m_data = newdata;
|
||||
m_allocatedBuffer = true;
|
||||
}
|
||||
debug assert(crc32Of(m_data[0 .. nelems]) == checksum);
|
||||
}
|
||||
m_remaining = m_data[nelems .. m_data.length];
|
||||
}
|
||||
|
||||
void put(E el)
|
||||
{
|
||||
if( m_remaining.length == 0 ) grow(1);
|
||||
m_remaining[0] = el;
|
||||
m_remaining = m_remaining[1 .. $];
|
||||
}
|
||||
|
||||
void put(ArrayType arr)
|
||||
{
|
||||
if (m_remaining.length < arr.length) grow(arr.length);
|
||||
m_remaining[0 .. arr.length] = arr[];
|
||||
m_remaining = m_remaining[arr.length .. $];
|
||||
}
|
||||
|
||||
static if( !hasAliasing!E ){
|
||||
void put(in ElemType[] arr){
|
||||
put(cast(ArrayType)arr);
|
||||
}
|
||||
}
|
||||
|
||||
static if( is(ElemType == char) ){
|
||||
void put(dchar el)
|
||||
{
|
||||
if( el < 128 ) put(cast(char)el);
|
||||
else {
|
||||
char[4] buf;
|
||||
auto len = std.utf.encode(buf, el);
|
||||
put(cast(ArrayType)buf[0 .. len]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static if( is(ElemType == wchar) ){
|
||||
void put(dchar el)
|
||||
{
|
||||
if( el < 128 ) put(cast(wchar)el);
|
||||
else {
|
||||
wchar[3] buf;
|
||||
auto len = std.utf.encode(buf, el);
|
||||
put(cast(ArrayType)buf[0 .. len]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static if (!is(E == immutable) || !hasAliasing!E) {
|
||||
/** Appends a number of bytes in-place.
|
||||
|
||||
The delegate will get the memory slice of the memory that follows
|
||||
the already written data. Use `reserve` to ensure that this slice
|
||||
has enough room. The delegate should overwrite as much of the
|
||||
slice as desired and then has to return the number of elements
|
||||
that should be appended (counting from the start of the slice).
|
||||
*/
|
||||
void append(scope size_t delegate(scope ElemType[] dst) del)
|
||||
{
|
||||
auto n = del(m_remaining);
|
||||
assert(n <= m_remaining.length);
|
||||
m_remaining = m_remaining[n .. $];
|
||||
}
|
||||
}
|
||||
|
||||
void grow(size_t min_free)
|
||||
{
|
||||
if( !m_data.length && min_free < 16 ) min_free = 16;
|
||||
|
||||
auto min_size = m_data.length + min_free - m_remaining.length;
|
||||
auto new_size = max(m_data.length, 16);
|
||||
while( new_size < min_size )
|
||||
new_size = (new_size * 3) / 2;
|
||||
reserve(new_size - m_data.length + m_remaining.length);
|
||||
}
|
||||
}
|
||||
|
||||
unittest {
|
||||
auto a = AllocAppender!string(defaultAllocator());
|
||||
a.put("Hello");
|
||||
a.put(' ');
|
||||
a.put("World");
|
||||
assert(a.data == "Hello World");
|
||||
a.reset();
|
||||
assert(a.data == "");
|
||||
}
|
||||
|
||||
unittest {
|
||||
char[4] buf;
|
||||
auto a = AllocAppender!string(defaultAllocator(), buf);
|
||||
a.put("He");
|
||||
assert(a.data == "He");
|
||||
assert(a.data.ptr == buf.ptr);
|
||||
a.put("ll");
|
||||
assert(a.data == "Hell");
|
||||
assert(a.data.ptr == buf.ptr);
|
||||
a.put('o');
|
||||
assert(a.data == "Hello");
|
||||
assert(a.data.ptr != buf.ptr);
|
||||
}
|
||||
|
||||
unittest {
|
||||
char[4] buf;
|
||||
auto a = AllocAppender!string(defaultAllocator(), buf);
|
||||
a.put("Hello");
|
||||
assert(a.data == "Hello");
|
||||
assert(a.data.ptr != buf.ptr);
|
||||
}
|
||||
|
||||
unittest {
|
||||
auto app = AllocAppender!(int[])(defaultAllocator);
|
||||
app.reserve(2);
|
||||
app.append((scope mem) {
|
||||
assert(mem.length >= 2);
|
||||
mem[0] = 1;
|
||||
mem[1] = 2;
|
||||
return 2;
|
||||
});
|
||||
assert(app.data == [1, 2]);
|
||||
}
|
||||
|
||||
unittest {
|
||||
auto app = AllocAppender!string(defaultAllocator);
|
||||
app.reserve(3);
|
||||
app.append((scope mem) {
|
||||
assert(mem.length >= 3);
|
||||
mem[0] = 'f';
|
||||
mem[1] = 'o';
|
||||
mem[2] = 'o';
|
||||
return 3;
|
||||
});
|
||||
assert(app.data == "foo");
|
||||
}
|
||||
|
||||
|
||||
struct FixedAppender(ArrayType : E[], size_t NELEM, E) {
|
||||
alias ElemType = Unqual!E;
|
||||
private {
|
||||
ElemType[NELEM] m_data;
|
||||
size_t m_fill;
|
||||
}
|
||||
|
||||
void clear()
|
||||
{
|
||||
m_fill = 0;
|
||||
}
|
||||
|
||||
void put(E el)
|
||||
{
|
||||
m_data[m_fill++] = el;
|
||||
}
|
||||
|
||||
static if( is(ElemType == char) ){
|
||||
void put(dchar el)
|
||||
{
|
||||
if( el < 128 ) put(cast(char)el);
|
||||
else {
|
||||
char[4] buf;
|
||||
auto len = std.utf.encode(buf, el);
|
||||
put(cast(ArrayType)buf[0 .. len]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static if( is(ElemType == wchar) ){
|
||||
void put(dchar el)
|
||||
{
|
||||
if( el < 128 ) put(cast(wchar)el);
|
||||
else {
|
||||
wchar[3] buf;
|
||||
auto len = std.utf.encode(buf, el);
|
||||
put(cast(ArrayType)buf[0 .. len]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void put(ArrayType arr)
|
||||
{
|
||||
m_data[m_fill .. m_fill+arr.length] = (cast(ElemType[])arr)[];
|
||||
m_fill += arr.length;
|
||||
}
|
||||
|
||||
@property ArrayType data() { return cast(ArrayType)m_data[0 .. m_fill]; }
|
||||
|
||||
static if (!is(E == immutable)) {
|
||||
void reset() { m_fill = 0; }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
TODO: clear ring buffer fields upon removal (to run struct destructors, if T is a struct)
|
||||
*/
|
||||
struct FixedRingBuffer(T, size_t N = 0, bool INITIALIZE = true) {
|
||||
private {
|
||||
static if( N > 0 ) {
|
||||
static if (INITIALIZE) T[N] m_buffer;
|
||||
else T[N] m_buffer = void;
|
||||
} else T[] m_buffer;
|
||||
size_t m_start = 0;
|
||||
size_t m_fill = 0;
|
||||
}
|
||||
|
||||
static if( N == 0 ){
|
||||
bool m_freeOnDestruct;
|
||||
this(size_t capacity) { m_buffer = new T[capacity]; }
|
||||
~this() { if (m_freeOnDestruct && m_buffer.length > 0) delete m_buffer; }
|
||||
}
|
||||
|
||||
@property bool empty() const { return m_fill == 0; }
|
||||
|
||||
@property bool full() const { return m_fill == m_buffer.length; }
|
||||
|
||||
@property size_t length() const { return m_fill; }
|
||||
|
||||
@property size_t freeSpace() const { return m_buffer.length - m_fill; }
|
||||
|
||||
@property size_t capacity() const { return m_buffer.length; }
|
||||
|
||||
static if( N == 0 ){
|
||||
deprecated @property void freeOnDestruct(bool b) { m_freeOnDestruct = b; }
|
||||
|
||||
/// Resets the capacity to zero and explicitly frees the memory for the buffer.
|
||||
void dispose()
|
||||
{
|
||||
delete m_buffer;
|
||||
m_buffer = null;
|
||||
m_start = m_fill = 0;
|
||||
}
|
||||
|
||||
@property void capacity(size_t new_size)
|
||||
{
|
||||
if( m_buffer.length ){
|
||||
auto newbuffer = new T[new_size];
|
||||
auto dst = newbuffer;
|
||||
auto newfill = min(m_fill, new_size);
|
||||
read(dst[0 .. newfill]);
|
||||
if (m_freeOnDestruct && m_buffer.length > 0) delete m_buffer;
|
||||
m_buffer = newbuffer;
|
||||
m_start = 0;
|
||||
m_fill = newfill;
|
||||
} else {
|
||||
if (m_freeOnDestruct && m_buffer.length > 0) delete m_buffer;
|
||||
m_buffer = new T[new_size];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@property ref inout(T) front() inout { assert(!empty); return m_buffer[m_start]; }
|
||||
|
||||
@property ref inout(T) back() inout { assert(!empty); return m_buffer[mod(m_start+m_fill-1)]; }
|
||||
|
||||
void clear()
|
||||
{
|
||||
popFrontN(length);
|
||||
assert(m_fill == 0);
|
||||
m_start = 0;
|
||||
}
|
||||
|
||||
void put()(T itm) { assert(m_fill < m_buffer.length); m_buffer[mod(m_start + m_fill++)] = itm; }
|
||||
void put(TC : T)(TC[] itms)
|
||||
{
|
||||
if( !itms.length ) return;
|
||||
assert(m_fill+itms.length <= m_buffer.length);
|
||||
if( mod(m_start+m_fill) >= mod(m_start+m_fill+itms.length) ){
|
||||
size_t chunk1 = m_buffer.length - (m_start+m_fill);
|
||||
size_t chunk2 = itms.length - chunk1;
|
||||
m_buffer[m_start+m_fill .. m_buffer.length] = itms[0 .. chunk1];
|
||||
m_buffer[0 .. chunk2] = itms[chunk1 .. $];
|
||||
} else {
|
||||
m_buffer[mod(m_start+m_fill) .. mod(m_start+m_fill)+itms.length] = itms[];
|
||||
}
|
||||
m_fill += itms.length;
|
||||
}
|
||||
void putN(size_t n) { assert(m_fill+n <= m_buffer.length); m_fill += n; }
|
||||
|
||||
void popFront() { assert(!empty); m_start = mod(m_start+1); m_fill--; }
|
||||
void popFrontN(size_t n) { assert(length >= n); m_start = mod(m_start + n); m_fill -= n; }
|
||||
|
||||
void popBack() { assert(!empty); m_fill--; }
|
||||
void popBackN(size_t n) { assert(length >= n); m_fill -= n; }
|
||||
|
||||
void removeAt(Range r)
|
||||
{
|
||||
assert(r.m_buffer is m_buffer);
|
||||
if( m_start + m_fill > m_buffer.length ){
|
||||
assert(r.m_start >= m_start && r.m_start < m_buffer.length || r.m_start < mod(m_start+m_fill));
|
||||
if( r.m_start > m_start ){
|
||||
foreach(i; r.m_start .. m_buffer.length-1)
|
||||
m_buffer[i] = m_buffer[i+1];
|
||||
m_buffer[$-1] = m_buffer[0];
|
||||
foreach(i; 0 .. mod(m_start + m_fill - 1))
|
||||
m_buffer[i] = m_buffer[i+1];
|
||||
} else {
|
||||
foreach(i; r.m_start .. mod(m_start + m_fill - 1))
|
||||
m_buffer[i] = m_buffer[i+1];
|
||||
}
|
||||
} else {
|
||||
assert(r.m_start >= m_start && r.m_start < m_start+m_fill);
|
||||
foreach(i; r.m_start .. m_start+m_fill-1)
|
||||
m_buffer[i] = m_buffer[i+1];
|
||||
}
|
||||
m_fill--;
|
||||
destroy(m_buffer[mod(m_start+m_fill)]); // TODO: only call destroy for non-POD T
|
||||
}
|
||||
|
||||
inout(T)[] peek() inout { return m_buffer[m_start .. min(m_start+m_fill, m_buffer.length)]; }
|
||||
T[] peekDst() {
|
||||
if (!m_buffer.length) return null;
|
||||
if( m_start + m_fill < m_buffer.length ) return m_buffer[m_start+m_fill .. $];
|
||||
else return m_buffer[mod(m_start+m_fill) .. m_start];
|
||||
}
|
||||
|
||||
void read(T[] dst)
|
||||
{
|
||||
assert(dst.length <= length);
|
||||
if( !dst.length ) return;
|
||||
if( mod(m_start) >= mod(m_start+dst.length) ){
|
||||
size_t chunk1 = m_buffer.length - m_start;
|
||||
size_t chunk2 = dst.length - chunk1;
|
||||
dst[0 .. chunk1] = m_buffer[m_start .. $];
|
||||
dst[chunk1 .. $] = m_buffer[0 .. chunk2];
|
||||
} else {
|
||||
dst[] = m_buffer[m_start .. m_start+dst.length];
|
||||
}
|
||||
popFrontN(dst.length);
|
||||
}
|
||||
|
||||
int opApply(scope int delegate(ref T itm) del)
|
||||
{
|
||||
if( m_start+m_fill > m_buffer.length ){
|
||||
foreach(i; m_start .. m_buffer.length)
|
||||
if( auto ret = del(m_buffer[i]) )
|
||||
return ret;
|
||||
foreach(i; 0 .. mod(m_start+m_fill))
|
||||
if( auto ret = del(m_buffer[i]) )
|
||||
return ret;
|
||||
} else {
|
||||
foreach(i; m_start .. m_start+m_fill)
|
||||
if( auto ret = del(m_buffer[i]) )
|
||||
return ret;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/// iterate through elements with index
|
||||
int opApply(scope int delegate(size_t i, ref T itm) del)
|
||||
{
|
||||
if( m_start+m_fill > m_buffer.length ){
|
||||
foreach(i; m_start .. m_buffer.length)
|
||||
if( auto ret = del(i - m_start, m_buffer[i]) )
|
||||
return ret;
|
||||
foreach(i; 0 .. mod(m_start+m_fill))
|
||||
if( auto ret = del(i + m_buffer.length - m_start, m_buffer[i]) )
|
||||
return ret;
|
||||
} else {
|
||||
foreach(i; m_start .. m_start+m_fill)
|
||||
if( auto ret = del(i - m_start, m_buffer[i]) )
|
||||
return ret;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
ref inout(T) opIndex(size_t idx) inout { assert(idx < length); return m_buffer[mod(m_start+idx)]; }
|
||||
|
||||
Range opSlice() { return Range(m_buffer, m_start, m_fill); }
|
||||
|
||||
Range opSlice(size_t from, size_t to)
|
||||
{
|
||||
assert(from <= to);
|
||||
assert(to <= m_fill);
|
||||
return Range(m_buffer, mod(m_start+from), to-from);
|
||||
}
|
||||
|
||||
size_t opDollar(size_t dim)() const if(dim == 0) { return length; }
|
||||
|
||||
private size_t mod(size_t n)
|
||||
const {
|
||||
static if( N == 0 ){
|
||||
/*static if(PotOnly){
|
||||
return x & (m_buffer.length-1);
|
||||
} else {*/
|
||||
return n % m_buffer.length;
|
||||
//}
|
||||
} else static if( ((N - 1) & N) == 0 ){
|
||||
return n & (N - 1);
|
||||
} else return n % N;
|
||||
}
|
||||
|
||||
static struct Range {
|
||||
private {
|
||||
T[] m_buffer;
|
||||
size_t m_start;
|
||||
size_t m_length;
|
||||
}
|
||||
|
||||
private this(T[] buffer, size_t start, size_t length)
|
||||
{
|
||||
m_buffer = buffer;
|
||||
m_start = start;
|
||||
m_length = length;
|
||||
}
|
||||
|
||||
@property bool empty() const { return m_length == 0; }
|
||||
|
||||
@property inout(T) front() inout { assert(!empty); return m_buffer[m_start]; }
|
||||
|
||||
void popFront()
|
||||
{
|
||||
assert(!empty);
|
||||
m_start++;
|
||||
m_length--;
|
||||
if( m_start >= m_buffer.length )
|
||||
m_start = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unittest {
|
||||
static assert(isInputRange!(FixedRingBuffer!int) && isOutputRange!(FixedRingBuffer!int, int));
|
||||
|
||||
FixedRingBuffer!(int, 5) buf;
|
||||
assert(buf.length == 0 && buf.freeSpace == 5); buf.put(1); // |1 . . . .
|
||||
assert(buf.length == 1 && buf.freeSpace == 4); buf.put(2); // |1 2 . . .
|
||||
assert(buf.length == 2 && buf.freeSpace == 3); buf.put(3); // |1 2 3 . .
|
||||
assert(buf.length == 3 && buf.freeSpace == 2); buf.put(4); // |1 2 3 4 .
|
||||
assert(buf.length == 4 && buf.freeSpace == 1); buf.put(5); // |1 2 3 4 5
|
||||
assert(buf.length == 5 && buf.freeSpace == 0);
|
||||
assert(buf.front == 1);
|
||||
buf.popFront(); // .|2 3 4 5
|
||||
assert(buf.front == 2);
|
||||
buf.popFrontN(2); // . . .|4 5
|
||||
assert(buf.front == 4);
|
||||
assert(buf.length == 2 && buf.freeSpace == 3);
|
||||
buf.put([6, 7, 8]); // 6 7 8|4 5
|
||||
assert(buf.length == 5 && buf.freeSpace == 0);
|
||||
int[5] dst;
|
||||
buf.read(dst); // . . .|. .
|
||||
assert(dst == [4, 5, 6, 7, 8]);
|
||||
assert(buf.length == 0 && buf.freeSpace == 5);
|
||||
buf.put([1, 2]); // . . .|1 2
|
||||
assert(buf.length == 2 && buf.freeSpace == 3);
|
||||
buf.read(dst[0 .. 2]); //|. . . . .
|
||||
assert(dst[0 .. 2] == [1, 2]);
|
||||
|
||||
buf.put([0, 0, 0, 1, 2]); //|0 0 0 1 2
|
||||
buf.popFrontN(2); //. .|0 1 2
|
||||
buf.put([3, 4]); // 3 4|0 1 2
|
||||
foreach(i, item; buf)
|
||||
{
|
||||
assert(i == item);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Write a single batch and drain
|
||||
struct BatchBuffer(T, size_t N = 0) {
|
||||
private {
|
||||
size_t m_fill;
|
||||
size_t m_first;
|
||||
static if (N == 0) T[] m_buffer;
|
||||
else T[N] m_buffer;
|
||||
}
|
||||
|
||||
static if (N == 0) {
|
||||
@property void capacity(size_t n) { assert(n >= m_fill); m_buffer.length = n; }
|
||||
}
|
||||
|
||||
@property bool empty() { return m_first >= m_fill; }
|
||||
@property size_t capacity() const { return m_buffer.length; }
|
||||
@property size_t length() { return m_fill - m_first; }
|
||||
@property ref inout(T) front() inout { assert(!empty); return m_buffer[m_first]; }
|
||||
void popFront() { assert(!m_empty); m_first++; }
|
||||
void popFrontN(size_t n) { assert(n <= length); m_first += n; }
|
||||
inout(T)[] peek() inout { return m_buffer[m_first .. m_fill]; }
|
||||
T[] peekDst() { assert(empty); return m_buffer; }
|
||||
void putN(size_t n) { assert(empty && n <= m_buffer.length); m_fill = n; }
|
||||
void putN(T[] elems) { assert(empty && elems.length <= m_buffer.length); m_buffer[0 .. elems.length] = elems[]; m_fill = elems.length; }
|
||||
}
|
||||
|
||||
|
||||
struct ArraySet(Key)
|
||||
{
|
||||
private {
|
||||
Key[4] m_staticEntries;
|
||||
Key[] m_entries;
|
||||
}
|
||||
|
||||
@property ArraySet dup()
|
||||
{
|
||||
return ArraySet(m_staticEntries, m_entries.dup);
|
||||
}
|
||||
|
||||
bool opBinaryRight(string op)(Key key) if (op == "in") { return contains(key); }
|
||||
|
||||
int opApply(int delegate(ref Key) del)
|
||||
{
|
||||
foreach (ref k; m_staticEntries)
|
||||
if (k != Key.init)
|
||||
if (auto ret = del(k))
|
||||
return ret;
|
||||
foreach (ref k; m_entries)
|
||||
if (k != Key.init)
|
||||
if (auto ret = del(k))
|
||||
return ret;
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool contains(Key key)
|
||||
const {
|
||||
foreach (ref k; m_staticEntries) if (k == key) return true;
|
||||
foreach (ref k; m_entries) if (k == key) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
void insert(Key key)
|
||||
{
|
||||
if (contains(key)) return;
|
||||
foreach (ref k; m_staticEntries)
|
||||
if (k == Key.init) {
|
||||
k = key;
|
||||
return;
|
||||
}
|
||||
foreach (ref k; m_entries)
|
||||
if (k == Key.init) {
|
||||
k = key;
|
||||
return;
|
||||
}
|
||||
m_entries ~= key;
|
||||
}
|
||||
|
||||
void remove(Key key)
|
||||
{
|
||||
foreach (ref k; m_staticEntries) if (k == key) { k = Key.init; return; }
|
||||
foreach (ref k; m_entries) if (k == key) { k = Key.init; return; }
|
||||
}
|
||||
}
|
45
source/vibe/internal/async.d
Normal file
45
source/vibe/internal/async.d
Normal file
|
@ -0,0 +1,45 @@
|
|||
module vibe.internal.async;
|
||||
|
||||
import std.traits : ParameterTypeTuple;
|
||||
import std.typecons : tuple;
|
||||
import vibe.core.core;
|
||||
import vibe.core.log;
|
||||
import core.time : Duration, seconds;
|
||||
|
||||
|
||||
auto asyncAwait(string method, Object, ARGS...)(Object object, ARGS args)
|
||||
{
|
||||
alias CB = ParameterTypeTuple!(__traits(getMember, Object, method))[$-1];
|
||||
alias CBTypes = ParameterTypeTuple!CB;
|
||||
|
||||
bool fired = false;
|
||||
CBTypes ret;
|
||||
Task t;
|
||||
|
||||
void callback(CBTypes params)
|
||||
@safe nothrow {
|
||||
logTrace("Got result.");
|
||||
fired = true;
|
||||
ret = params;
|
||||
if (t != Task.init)
|
||||
resumeTask(t);
|
||||
}
|
||||
|
||||
logTrace("Calling %s...", method);
|
||||
__traits(getMember, object, method)(args, &callback);
|
||||
if (!fired) {
|
||||
logTrace("Need to wait...");
|
||||
t = Task.getThis();
|
||||
do yieldForEvent();
|
||||
while (!fired);
|
||||
}
|
||||
logTrace("Return result.");
|
||||
return tuple(ret);
|
||||
}
|
||||
|
||||
auto asyncAwait(string method, Object, ARGS...)(Duration timeout, Object object, ARGS args)
|
||||
{
|
||||
assert(timeout >= 0.seconds);
|
||||
if (timeout == Duration.max) return asyncAwait(object, args);
|
||||
else assert(false, "TODO!");
|
||||
}
|
375
source/vibe/internal/hashmap.d
Normal file
375
source/vibe/internal/hashmap.d
Normal file
|
@ -0,0 +1,375 @@
|
|||
/**
|
||||
Internal hash map implementation.
|
||||
|
||||
Copyright: © 2013 RejectedSoftware e.K.
|
||||
License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file.
|
||||
Authors: Sönke Ludwig
|
||||
*/
|
||||
module vibe.internal.hashmap;
|
||||
|
||||
import vibe.internal.memory;
|
||||
|
||||
import std.conv : emplace;
|
||||
import std.traits;
|
||||
|
||||
|
||||
struct DefaultHashMapTraits(Key) {
|
||||
enum clearValue = Key.init;
|
||||
static bool equals(in Key a, in Key b)
|
||||
{
|
||||
static if (is(Key == class)) return a is b;
|
||||
else return a == b;
|
||||
}
|
||||
static size_t hashOf(in ref Key k)
|
||||
{
|
||||
static if (is(Key == class) && &Key.toHash == &Object.toHash)
|
||||
return cast(size_t)cast(void*)k;
|
||||
else static if (__traits(compiles, Key.init.toHash()))
|
||||
return k.toHash();
|
||||
else static if (__traits(compiles, Key.init.toHashShared()))
|
||||
return k.toHashShared();
|
||||
else {
|
||||
// evil casts to be able to get the most basic operations of
|
||||
// HashMap nothrow and @nogc
|
||||
static size_t hashWrapper(in ref Key k) {
|
||||
static typeinfo = typeid(Key);
|
||||
return typeinfo.getHash(&k);
|
||||
}
|
||||
static @nogc nothrow size_t properlyTypedWrapper(in ref Key k) { return 0; }
|
||||
return (cast(typeof(&properlyTypedWrapper))&hashWrapper)(k);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct HashMap(TKey, TValue, Traits = DefaultHashMapTraits!TKey)
|
||||
{
|
||||
import vibe.internal.traits : isOpApplyDg;
|
||||
|
||||
alias Key = TKey;
|
||||
alias Value = TValue;
|
||||
|
||||
struct TableEntry {
|
||||
UnConst!Key key = Traits.clearValue;
|
||||
Value value;
|
||||
|
||||
this(Key key, Value value) { this.key = cast(UnConst!Key)key; this.value = value; }
|
||||
}
|
||||
private {
|
||||
TableEntry[] m_table; // NOTE: capacity is always POT
|
||||
size_t m_length;
|
||||
Allocator m_allocator;
|
||||
bool m_resizing;
|
||||
}
|
||||
|
||||
this(Allocator allocator)
|
||||
{
|
||||
m_allocator = allocator;
|
||||
}
|
||||
|
||||
~this()
|
||||
{
|
||||
clear();
|
||||
if (m_table.ptr !is null) freeArray(m_allocator, m_table);
|
||||
}
|
||||
|
||||
@disable this(this);
|
||||
|
||||
@property size_t length() const { return m_length; }
|
||||
|
||||
void remove(Key key)
|
||||
{
|
||||
auto idx = findIndex(key);
|
||||
assert (idx != size_t.max, "Removing non-existent element.");
|
||||
auto i = idx;
|
||||
while (true) {
|
||||
m_table[i].key = Traits.clearValue;
|
||||
m_table[i].value = Value.init;
|
||||
|
||||
size_t j = i, r;
|
||||
do {
|
||||
if (++i >= m_table.length) i -= m_table.length;
|
||||
if (Traits.equals(m_table[i].key, Traits.clearValue)) {
|
||||
m_length--;
|
||||
return;
|
||||
}
|
||||
r = Traits.hashOf(m_table[i].key) & (m_table.length-1);
|
||||
} while ((j<r && r<=i) || (i<j && j<r) || (r<=i && i<j));
|
||||
m_table[j] = m_table[i];
|
||||
}
|
||||
}
|
||||
|
||||
Value get(Key key, lazy Value default_value = Value.init)
|
||||
{
|
||||
auto idx = findIndex(key);
|
||||
if (idx == size_t.max) return default_value;
|
||||
return m_table[idx].value;
|
||||
}
|
||||
|
||||
/// Workaround #12647
|
||||
package Value getNothrow(Key key, Value default_value = Value.init)
|
||||
{
|
||||
auto idx = findIndex(key);
|
||||
if (idx == size_t.max) return default_value;
|
||||
return m_table[idx].value;
|
||||
}
|
||||
|
||||
static if (!is(typeof({ Value v; const(Value) vc; v = vc; }))) {
|
||||
const(Value) get(Key key, lazy const(Value) default_value = Value.init)
|
||||
{
|
||||
auto idx = findIndex(key);
|
||||
if (idx == size_t.max) return default_value;
|
||||
return m_table[idx].value;
|
||||
}
|
||||
}
|
||||
|
||||
void clear()
|
||||
{
|
||||
foreach (i; 0 .. m_table.length)
|
||||
if (!Traits.equals(m_table[i].key, Traits.clearValue)) {
|
||||
m_table[i].key = Traits.clearValue;
|
||||
m_table[i].value = Value.init;
|
||||
}
|
||||
m_length = 0;
|
||||
}
|
||||
|
||||
void opIndexAssign(Value value, Key key)
|
||||
{
|
||||
assert(!Traits.equals(key, Traits.clearValue), "Inserting clear value into hash map.");
|
||||
grow(1);
|
||||
auto i = findInsertIndex(key);
|
||||
if (!Traits.equals(m_table[i].key, key)) m_length++;
|
||||
m_table[i] = TableEntry(key, value);
|
||||
}
|
||||
|
||||
ref inout(Value) opIndex(Key key)
|
||||
inout {
|
||||
auto idx = findIndex(key);
|
||||
assert (idx != size_t.max, "Accessing non-existent key.");
|
||||
return m_table[idx].value;
|
||||
}
|
||||
|
||||
inout(Value)* opBinaryRight(string op)(Key key)
|
||||
inout if (op == "in") {
|
||||
auto idx = findIndex(key);
|
||||
if (idx == size_t.max) return null;
|
||||
return &m_table[idx].value;
|
||||
}
|
||||
|
||||
int opApply(DG)(scope DG del) if (isOpApplyDg!(DG, Key, Value))
|
||||
{
|
||||
import std.traits : arity;
|
||||
foreach (i; 0 .. m_table.length)
|
||||
if (!Traits.equals(m_table[i].key, Traits.clearValue)) {
|
||||
static assert(arity!del >= 1 && arity!del <= 2,
|
||||
"isOpApplyDg should have prevented this");
|
||||
static if (arity!del == 1) {
|
||||
if (int ret = del(m_table[i].value))
|
||||
return ret;
|
||||
} else
|
||||
if (int ret = del(m_table[i].key, m_table[i].value))
|
||||
return ret;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private size_t findIndex(Key key)
|
||||
const {
|
||||
if (m_length == 0) return size_t.max;
|
||||
size_t start = Traits.hashOf(key) & (m_table.length-1);
|
||||
auto i = start;
|
||||
while (!Traits.equals(m_table[i].key, key)) {
|
||||
if (Traits.equals(m_table[i].key, Traits.clearValue)) return size_t.max;
|
||||
if (++i >= m_table.length) i -= m_table.length;
|
||||
if (i == start) return size_t.max;
|
||||
}
|
||||
return i;
|
||||
}
|
||||
|
||||
private size_t findInsertIndex(Key key)
|
||||
const {
|
||||
auto hash = Traits.hashOf(key);
|
||||
size_t target = hash & (m_table.length-1);
|
||||
auto i = target;
|
||||
while (!Traits.equals(m_table[i].key, Traits.clearValue) && !Traits.equals(m_table[i].key, key)) {
|
||||
if (++i >= m_table.length) i -= m_table.length;
|
||||
assert (i != target, "No free bucket found, HashMap full!?");
|
||||
}
|
||||
return i;
|
||||
}
|
||||
|
||||
private void grow(size_t amount)
|
||||
{
|
||||
auto newsize = m_length + amount;
|
||||
if (newsize < (m_table.length*2)/3) return;
|
||||
auto newcap = m_table.length ? m_table.length : 16;
|
||||
while (newsize >= (newcap*2)/3) newcap *= 2;
|
||||
resize(newcap);
|
||||
}
|
||||
|
||||
private void resize(size_t new_size)
|
||||
@trusted {
|
||||
assert(!m_resizing);
|
||||
m_resizing = true;
|
||||
scope(exit) m_resizing = false;
|
||||
|
||||
if (!m_allocator) m_allocator = defaultAllocator();
|
||||
|
||||
uint pot = 0;
|
||||
while (new_size > 1) pot++, new_size /= 2;
|
||||
new_size = 1 << pot;
|
||||
|
||||
auto oldtable = m_table;
|
||||
|
||||
// allocate the new array, automatically initializes with empty entries (Traits.clearValue)
|
||||
m_table = allocArray!TableEntry(m_allocator, new_size);
|
||||
|
||||
// perform a move operation of all non-empty elements from the old array to the new one
|
||||
foreach (ref el; oldtable)
|
||||
if (!Traits.equals(el.key, Traits.clearValue)) {
|
||||
auto idx = findInsertIndex(el.key);
|
||||
(cast(ubyte[])(&m_table[idx])[0 .. 1])[] = (cast(ubyte[])(&el)[0 .. 1])[];
|
||||
}
|
||||
|
||||
// all elements have been moved to the new array, so free the old one without calling destructors
|
||||
if (oldtable !is null) freeArray(m_allocator, oldtable, false);
|
||||
}
|
||||
}
|
||||
|
||||
unittest {
|
||||
import std.conv;
|
||||
|
||||
HashMap!(string, string) map;
|
||||
|
||||
foreach (i; 0 .. 100) {
|
||||
map[to!string(i)] = to!string(i) ~ "+";
|
||||
assert(map.length == i+1);
|
||||
}
|
||||
|
||||
foreach (i; 0 .. 100) {
|
||||
auto str = to!string(i);
|
||||
auto pe = str in map;
|
||||
assert(pe !is null && *pe == str ~ "+");
|
||||
assert(map[str] == str ~ "+");
|
||||
}
|
||||
|
||||
foreach (i; 0 .. 50) {
|
||||
map.remove(to!string(i));
|
||||
assert(map.length == 100-i-1);
|
||||
}
|
||||
|
||||
foreach (i; 50 .. 100) {
|
||||
auto str = to!string(i);
|
||||
auto pe = str in map;
|
||||
assert(pe !is null && *pe == str ~ "+");
|
||||
assert(map[str] == str ~ "+");
|
||||
}
|
||||
}
|
||||
|
||||
// test for nothrow/@nogc compliance
|
||||
static if (__VERSION__ >= 2066)
|
||||
nothrow unittest {
|
||||
HashMap!(int, int) map1;
|
||||
HashMap!(string, string) map2;
|
||||
map1[1] = 2;
|
||||
map2["1"] = "2";
|
||||
|
||||
@nogc nothrow void performNoGCOps()
|
||||
{
|
||||
foreach (int v; map1) {}
|
||||
foreach (int k, int v; map1) {}
|
||||
assert(1 in map1);
|
||||
assert(map1.length == 1);
|
||||
assert(map1[1] == 2);
|
||||
assert(map1.getNothrow(1, -1) == 2);
|
||||
|
||||
foreach (string v; map2) {}
|
||||
foreach (string k, string v; map2) {}
|
||||
assert("1" in map2);
|
||||
assert(map2.length == 1);
|
||||
assert(map2["1"] == "2");
|
||||
assert(map2.getNothrow("1", "") == "2");
|
||||
}
|
||||
|
||||
performNoGCOps();
|
||||
}
|
||||
|
||||
unittest { // test for proper use of constructor/post-blit/destructor
|
||||
static struct Test {
|
||||
static size_t constructedCounter = 0;
|
||||
bool constructed = false;
|
||||
this(int) { constructed = true; constructedCounter++; }
|
||||
this(this) { if (constructed) constructedCounter++; }
|
||||
~this() { if (constructed) constructedCounter--; }
|
||||
}
|
||||
|
||||
assert(Test.constructedCounter == 0);
|
||||
|
||||
{ // sanity check
|
||||
Test t;
|
||||
assert(Test.constructedCounter == 0);
|
||||
t = Test(1);
|
||||
assert(Test.constructedCounter == 1);
|
||||
auto u = t;
|
||||
assert(Test.constructedCounter == 2);
|
||||
t = Test.init;
|
||||
assert(Test.constructedCounter == 1);
|
||||
}
|
||||
assert(Test.constructedCounter == 0);
|
||||
|
||||
{ // basic insertion and hash map resizing
|
||||
HashMap!(int, Test) map;
|
||||
foreach (i; 1 .. 67) {
|
||||
map[i] = Test(1);
|
||||
assert(Test.constructedCounter == i);
|
||||
}
|
||||
}
|
||||
|
||||
assert(Test.constructedCounter == 0);
|
||||
|
||||
{ // test clear() and overwriting existing entries
|
||||
HashMap!(int, Test) map;
|
||||
foreach (i; 1 .. 67) {
|
||||
map[i] = Test(1);
|
||||
assert(Test.constructedCounter == i);
|
||||
}
|
||||
map.clear();
|
||||
foreach (i; 1 .. 67) {
|
||||
map[i] = Test(1);
|
||||
assert(Test.constructedCounter == i);
|
||||
}
|
||||
foreach (i; 1 .. 67) {
|
||||
map[i] = Test(1);
|
||||
assert(Test.constructedCounter == 66);
|
||||
}
|
||||
}
|
||||
|
||||
assert(Test.constructedCounter == 0);
|
||||
|
||||
{ // test removing entries and adding entries after remove
|
||||
HashMap!(int, Test) map;
|
||||
foreach (i; 1 .. 67) {
|
||||
map[i] = Test(1);
|
||||
assert(Test.constructedCounter == i);
|
||||
}
|
||||
foreach (i; 1 .. 33) {
|
||||
map.remove(i);
|
||||
assert(Test.constructedCounter == 66 - i);
|
||||
}
|
||||
foreach (i; 67 .. 130) {
|
||||
map[i] = Test(1);
|
||||
assert(Test.constructedCounter == i - 32);
|
||||
}
|
||||
}
|
||||
|
||||
assert(Test.constructedCounter == 0);
|
||||
}
|
||||
|
||||
private template UnConst(T) {
|
||||
static if (is(T U == const(U))) {
|
||||
alias UnConst = U;
|
||||
} else static if (is(T V == immutable(V))) {
|
||||
alias UnConst = V;
|
||||
} else alias UnConst = T;
|
||||
}
|
||||
|
||||
static if (__VERSION__ < 2066) private static bool nogc() { return false; }
|
872
source/vibe/internal/memory.d
Normal file
872
source/vibe/internal/memory.d
Normal file
|
@ -0,0 +1,872 @@
|
|||
/**
|
||||
Utility functions for memory management
|
||||
|
||||
Note that this module currently is a big sand box for testing allocation related stuff.
|
||||
Nothing here, including the interfaces, is final but rather a lot of experimentation.
|
||||
|
||||
Copyright: © 2012-2013 RejectedSoftware e.K.
|
||||
License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file.
|
||||
Authors: Sönke Ludwig
|
||||
*/
|
||||
module vibe.internal.memory;
|
||||
|
||||
import vibe.internal.traits : synchronizedIsNothrow;
|
||||
|
||||
import core.exception : OutOfMemoryError;
|
||||
import core.stdc.stdlib;
|
||||
import core.memory;
|
||||
import std.conv;
|
||||
import std.exception : enforceEx;
|
||||
import std.traits;
|
||||
import std.algorithm;
|
||||
|
||||
Allocator defaultAllocator() nothrow
|
||||
{
|
||||
version(VibeManualMemoryManagement){
|
||||
return manualAllocator();
|
||||
} else {
|
||||
static __gshared Allocator alloc;
|
||||
if (!alloc) {
|
||||
alloc = new GCAllocator;
|
||||
//alloc = new AutoFreeListAllocator(alloc);
|
||||
//alloc = new DebugAllocator(alloc);
|
||||
alloc = new LockAllocator(alloc);
|
||||
}
|
||||
return alloc;
|
||||
}
|
||||
}
|
||||
|
||||
Allocator manualAllocator() nothrow
|
||||
{
|
||||
static __gshared Allocator alloc;
|
||||
if( !alloc ){
|
||||
alloc = new MallocAllocator;
|
||||
alloc = new AutoFreeListAllocator(alloc);
|
||||
//alloc = new DebugAllocator(alloc);
|
||||
alloc = new LockAllocator(alloc);
|
||||
}
|
||||
return alloc;
|
||||
}
|
||||
|
||||
Allocator threadLocalAllocator() nothrow
|
||||
{
|
||||
static Allocator alloc;
|
||||
if (!alloc) {
|
||||
version(VibeManualMemoryManagement) alloc = new MallocAllocator;
|
||||
else alloc = new GCAllocator;
|
||||
alloc = new AutoFreeListAllocator(alloc);
|
||||
// alloc = new DebugAllocator(alloc);
|
||||
}
|
||||
return alloc;
|
||||
}
|
||||
|
||||
Allocator threadLocalManualAllocator() nothrow
|
||||
{
|
||||
static Allocator alloc;
|
||||
if (!alloc) {
|
||||
alloc = new MallocAllocator;
|
||||
alloc = new AutoFreeListAllocator(alloc);
|
||||
// alloc = new DebugAllocator(alloc);
|
||||
}
|
||||
return alloc;
|
||||
}
|
||||
|
||||
auto allocObject(T, bool MANAGED = true, ARGS...)(Allocator allocator, ARGS args)
|
||||
{
|
||||
auto mem = allocator.alloc(AllocSize!T);
|
||||
static if( MANAGED ){
|
||||
static if( hasIndirections!T )
|
||||
GC.addRange(mem.ptr, mem.length);
|
||||
return internalEmplace!T(mem, args);
|
||||
}
|
||||
else static if( is(T == class) ) return cast(T)mem.ptr;
|
||||
else return cast(T*)mem.ptr;
|
||||
}
|
||||
|
||||
T[] allocArray(T, bool MANAGED = true)(Allocator allocator, size_t n)
|
||||
{
|
||||
auto mem = allocator.alloc(T.sizeof * n);
|
||||
auto ret = cast(T[])mem;
|
||||
static if( MANAGED ){
|
||||
static if( hasIndirections!T )
|
||||
GC.addRange(mem.ptr, mem.length);
|
||||
// TODO: use memset for class, pointers and scalars
|
||||
foreach (ref el; ret) {
|
||||
internalEmplace!T(cast(void[])((&el)[0 .. 1]));
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
void freeArray(T, bool MANAGED = true)(Allocator allocator, ref T[] array, bool call_destructors = true)
|
||||
{
|
||||
static if (MANAGED) {
|
||||
static if (hasIndirections!T)
|
||||
GC.removeRange(array.ptr);
|
||||
static if (hasElaborateDestructor!T)
|
||||
if (call_destructors)
|
||||
foreach_reverse (ref el; array)
|
||||
destroy(el);
|
||||
}
|
||||
allocator.free(cast(void[])array);
|
||||
array = null;
|
||||
}
|
||||
|
||||
|
||||
interface Allocator {
|
||||
nothrow:
|
||||
enum size_t alignment = 0x10;
|
||||
enum size_t alignmentMask = alignment-1;
|
||||
|
||||
void[] alloc(size_t sz)
|
||||
out { assert((cast(size_t)__result.ptr & alignmentMask) == 0, "alloc() returned misaligned data."); }
|
||||
|
||||
void[] realloc(void[] mem, size_t new_sz)
|
||||
in {
|
||||
assert(mem.ptr !is null, "realloc() called with null array.");
|
||||
assert((cast(size_t)mem.ptr & alignmentMask) == 0, "misaligned pointer passed to realloc().");
|
||||
}
|
||||
out { assert((cast(size_t)__result.ptr & alignmentMask) == 0, "realloc() returned misaligned data."); }
|
||||
|
||||
void free(void[] mem)
|
||||
in {
|
||||
assert(mem.ptr !is null, "free() called with null array.");
|
||||
assert((cast(size_t)mem.ptr & alignmentMask) == 0, "misaligned pointer passed to free().");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
Simple proxy allocator protecting its base allocator with a mutex.
|
||||
*/
|
||||
class LockAllocator : Allocator {
|
||||
private {
|
||||
Allocator m_base;
|
||||
}
|
||||
this(Allocator base) nothrow { m_base = base; }
|
||||
void[] alloc(size_t sz) {
|
||||
static if (!synchronizedIsNothrow)
|
||||
scope (failure) assert(0, "Internal error: function should be nothrow");
|
||||
|
||||
synchronized (this)
|
||||
return m_base.alloc(sz);
|
||||
}
|
||||
void[] realloc(void[] mem, size_t new_sz)
|
||||
in {
|
||||
assert(mem.ptr !is null, "realloc() called with null array.");
|
||||
assert((cast(size_t)mem.ptr & alignmentMask) == 0, "misaligned pointer passed to realloc().");
|
||||
}
|
||||
body {
|
||||
static if (!synchronizedIsNothrow)
|
||||
scope (failure) assert(0, "Internal error: function should be nothrow");
|
||||
|
||||
synchronized(this)
|
||||
return m_base.realloc(mem, new_sz);
|
||||
}
|
||||
void free(void[] mem)
|
||||
in {
|
||||
assert(mem.ptr !is null, "free() called with null array.");
|
||||
assert((cast(size_t)mem.ptr & alignmentMask) == 0, "misaligned pointer passed to free().");
|
||||
}
|
||||
body {
|
||||
static if (!synchronizedIsNothrow)
|
||||
scope (failure) assert(0, "Internal error: function should be nothrow");
|
||||
synchronized(this)
|
||||
m_base.free(mem);
|
||||
}
|
||||
}
|
||||
|
||||
final class DebugAllocator : Allocator {
|
||||
import vibe.internal.hashmap : HashMap;
|
||||
private {
|
||||
Allocator m_baseAlloc;
|
||||
HashMap!(void*, size_t) m_blocks;
|
||||
size_t m_bytes;
|
||||
size_t m_maxBytes;
|
||||
}
|
||||
|
||||
this(Allocator base_allocator) nothrow
|
||||
{
|
||||
m_baseAlloc = base_allocator;
|
||||
m_blocks = HashMap!(void*, size_t)(manualAllocator());
|
||||
}
|
||||
|
||||
@property size_t allocatedBlockCount() const { return m_blocks.length; }
|
||||
@property size_t bytesAllocated() const { return m_bytes; }
|
||||
@property size_t maxBytesAllocated() const { return m_maxBytes; }
|
||||
|
||||
void[] alloc(size_t sz)
|
||||
{
|
||||
auto ret = m_baseAlloc.alloc(sz);
|
||||
assert(ret.length == sz, "base.alloc() returned block with wrong size.");
|
||||
assert(m_blocks.getNothrow(ret.ptr, size_t.max) == size_t.max, "base.alloc() returned block that is already allocated.");
|
||||
m_blocks[ret.ptr] = sz;
|
||||
m_bytes += sz;
|
||||
if( m_bytes > m_maxBytes ){
|
||||
m_maxBytes = m_bytes;
|
||||
logDebug_("New allocation maximum: %d (%d blocks)", m_maxBytes, m_blocks.length);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
void[] realloc(void[] mem, size_t new_size)
|
||||
{
|
||||
auto sz = m_blocks.getNothrow(mem.ptr, size_t.max);
|
||||
assert(sz != size_t.max, "realloc() called with non-allocated pointer.");
|
||||
assert(sz == mem.length, "realloc() called with block of wrong size.");
|
||||
auto ret = m_baseAlloc.realloc(mem, new_size);
|
||||
assert(ret.length == new_size, "base.realloc() returned block with wrong size.");
|
||||
assert(ret.ptr is mem.ptr || m_blocks.getNothrow(ret.ptr, size_t.max) == size_t.max, "base.realloc() returned block that is already allocated.");
|
||||
m_bytes -= sz;
|
||||
m_blocks.remove(mem.ptr);
|
||||
m_blocks[ret.ptr] = new_size;
|
||||
m_bytes += new_size;
|
||||
return ret;
|
||||
}
|
||||
void free(void[] mem)
|
||||
{
|
||||
auto sz = m_blocks.getNothrow(mem.ptr, size_t.max);
|
||||
assert(sz != size_t.max, "free() called with non-allocated object.");
|
||||
assert(sz == mem.length, "free() called with block of wrong size.");
|
||||
m_baseAlloc.free(mem);
|
||||
m_bytes -= sz;
|
||||
m_blocks.remove(mem.ptr);
|
||||
}
|
||||
}
|
||||
|
||||
final class MallocAllocator : Allocator {
|
||||
void[] alloc(size_t sz)
|
||||
{
|
||||
static err = new immutable OutOfMemoryError;
|
||||
auto ptr = .malloc(sz + Allocator.alignment);
|
||||
if (ptr is null) throw err;
|
||||
return adjustPointerAlignment(ptr)[0 .. sz];
|
||||
}
|
||||
|
||||
void[] realloc(void[] mem, size_t new_size)
|
||||
{
|
||||
size_t csz = min(mem.length, new_size);
|
||||
auto p = extractUnalignedPointer(mem.ptr);
|
||||
size_t oldmisalign = mem.ptr - p;
|
||||
|
||||
auto pn = cast(ubyte*).realloc(p, new_size+Allocator.alignment);
|
||||
if (p == pn) return pn[oldmisalign .. new_size+oldmisalign];
|
||||
|
||||
auto pna = cast(ubyte*)adjustPointerAlignment(pn);
|
||||
auto newmisalign = pna - pn;
|
||||
|
||||
// account for changed alignment after realloc (move memory back to aligned position)
|
||||
if (oldmisalign != newmisalign) {
|
||||
if (newmisalign > oldmisalign) {
|
||||
foreach_reverse (i; 0 .. csz)
|
||||
pn[i + newmisalign] = pn[i + oldmisalign];
|
||||
} else {
|
||||
foreach (i; 0 .. csz)
|
||||
pn[i + newmisalign] = pn[i + oldmisalign];
|
||||
}
|
||||
}
|
||||
|
||||
return pna[0 .. new_size];
|
||||
}
|
||||
|
||||
void free(void[] mem)
|
||||
{
|
||||
.free(extractUnalignedPointer(mem.ptr));
|
||||
}
|
||||
}
|
||||
|
||||
final class GCAllocator : Allocator {
|
||||
void[] alloc(size_t sz)
|
||||
{
|
||||
auto mem = GC.malloc(sz+Allocator.alignment);
|
||||
auto alignedmem = adjustPointerAlignment(mem);
|
||||
assert(alignedmem - mem <= Allocator.alignment);
|
||||
auto ret = alignedmem[0 .. sz];
|
||||
ensureValidMemory(ret);
|
||||
return ret;
|
||||
}
|
||||
void[] realloc(void[] mem, size_t new_size)
|
||||
{
|
||||
size_t csz = min(mem.length, new_size);
|
||||
|
||||
auto p = extractUnalignedPointer(mem.ptr);
|
||||
size_t misalign = mem.ptr - p;
|
||||
assert(misalign <= Allocator.alignment);
|
||||
|
||||
void[] ret;
|
||||
auto extended = GC.extend(p, new_size - mem.length, new_size - mem.length);
|
||||
if (extended) {
|
||||
assert(extended >= new_size+Allocator.alignment);
|
||||
ret = p[misalign .. new_size+misalign];
|
||||
} else {
|
||||
ret = alloc(new_size);
|
||||
ret[0 .. csz] = mem[0 .. csz];
|
||||
}
|
||||
ensureValidMemory(ret);
|
||||
return ret;
|
||||
}
|
||||
void free(void[] mem)
|
||||
{
|
||||
// For safety reasons, the GCAllocator should never explicitly free memory.
|
||||
//GC.free(extractUnalignedPointer(mem.ptr));
|
||||
}
|
||||
}
|
||||
|
||||
final class AutoFreeListAllocator : Allocator {
|
||||
import std.typetuple;
|
||||
|
||||
private {
|
||||
enum minExponent = 5;
|
||||
enum freeListCount = 14;
|
||||
FreeListAlloc[freeListCount] m_freeLists;
|
||||
Allocator m_baseAlloc;
|
||||
}
|
||||
|
||||
this(Allocator base_allocator) nothrow
|
||||
{
|
||||
m_baseAlloc = base_allocator;
|
||||
foreach (i; iotaTuple!freeListCount)
|
||||
m_freeLists[i] = new FreeListAlloc(nthFreeListSize!(i), m_baseAlloc);
|
||||
}
|
||||
|
||||
void[] alloc(size_t sz)
|
||||
{
|
||||
auto idx = getAllocatorIndex(sz);
|
||||
return idx < freeListCount ? m_freeLists[idx].alloc()[0 .. sz] : m_baseAlloc.alloc(sz);
|
||||
}
|
||||
|
||||
void[] realloc(void[] data, size_t sz)
|
||||
{
|
||||
auto curidx = getAllocatorIndex(data.length);
|
||||
auto newidx = getAllocatorIndex(sz);
|
||||
|
||||
if (curidx == newidx) {
|
||||
if (curidx == freeListCount) {
|
||||
// forward large blocks to the base allocator
|
||||
return m_baseAlloc.realloc(data, sz);
|
||||
} else {
|
||||
// just grow the slice if it still fits into the free list slot
|
||||
return data.ptr[0 .. sz];
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise re-allocate manually
|
||||
auto newd = alloc(sz);
|
||||
assert(newd.ptr+sz <= data.ptr || newd.ptr >= data.ptr+data.length, "New block overlaps old one!?");
|
||||
auto len = min(data.length, sz);
|
||||
newd[0 .. len] = data[0 .. len];
|
||||
free(data);
|
||||
return newd;
|
||||
}
|
||||
|
||||
void free(void[] data)
|
||||
{
|
||||
//logTrace("AFL free %08X(%s)", data.ptr, data.length);
|
||||
auto idx = getAllocatorIndex(data.length);
|
||||
if (idx < freeListCount) m_freeLists[idx].free(data.ptr[0 .. 1 << (idx + minExponent)]);
|
||||
else m_baseAlloc.free(data);
|
||||
}
|
||||
|
||||
// does a CT optimized binary search for the right allocater
|
||||
private int getAllocatorIndex(size_t sz)
|
||||
@safe nothrow @nogc {
|
||||
//pragma(msg, getAllocatorIndexStr!(0, freeListCount));
|
||||
return mixin(getAllocatorIndexStr!(0, freeListCount));
|
||||
}
|
||||
|
||||
private template getAllocatorIndexStr(int low, int high)
|
||||
{
|
||||
static if (__VERSION__ <= 2066) import std.string : format;
|
||||
else import std.format : format;
|
||||
static if (low == high) enum getAllocatorIndexStr = format("%s", low);
|
||||
else {
|
||||
enum mid = (low + high) / 2;
|
||||
enum getAllocatorIndexStr =
|
||||
"sz > nthFreeListSize!%s ? %s : %s"
|
||||
.format(mid, getAllocatorIndexStr!(mid+1, high), getAllocatorIndexStr!(low, mid));
|
||||
}
|
||||
}
|
||||
|
||||
unittest {
|
||||
auto a = new AutoFreeListAllocator(null);
|
||||
assert(a.getAllocatorIndex(0) == 0);
|
||||
foreach (i; iotaTuple!freeListCount) {
|
||||
assert(a.getAllocatorIndex(nthFreeListSize!i-1) == i);
|
||||
assert(a.getAllocatorIndex(nthFreeListSize!i) == i);
|
||||
assert(a.getAllocatorIndex(nthFreeListSize!i+1) == i+1);
|
||||
}
|
||||
assert(a.getAllocatorIndex(size_t.max) == freeListCount);
|
||||
}
|
||||
|
||||
private static pure size_t nthFreeListSize(size_t i)() { return 1 << (i + minExponent); }
|
||||
private template iotaTuple(size_t i) {
|
||||
static if (i > 1) alias iotaTuple = TypeTuple!(iotaTuple!(i-1), i-1);
|
||||
else alias iotaTuple = TypeTuple!(0);
|
||||
}
|
||||
}
|
||||
|
||||
final class PoolAllocator : Allocator {
|
||||
static struct Pool { Pool* next; void[] data; void[] remaining; }
|
||||
static struct Destructor { Destructor* next; void function(void*) destructor; void* object; }
|
||||
private {
|
||||
Allocator m_baseAllocator;
|
||||
Pool* m_freePools;
|
||||
Pool* m_fullPools;
|
||||
Destructor* m_destructors;
|
||||
size_t m_poolSize;
|
||||
}
|
||||
|
||||
this(size_t pool_size, Allocator base) nothrow
|
||||
{
|
||||
m_poolSize = pool_size;
|
||||
m_baseAllocator = base;
|
||||
}
|
||||
|
||||
@property size_t totalSize()
|
||||
{
|
||||
size_t amt = 0;
|
||||
for (auto p = m_fullPools; p; p = p.next)
|
||||
amt += p.data.length;
|
||||
for (auto p = m_freePools; p; p = p.next)
|
||||
amt += p.data.length;
|
||||
return amt;
|
||||
}
|
||||
|
||||
@property size_t allocatedSize()
|
||||
{
|
||||
size_t amt = 0;
|
||||
for (auto p = m_fullPools; p; p = p.next)
|
||||
amt += p.data.length;
|
||||
for (auto p = m_freePools; p; p = p.next)
|
||||
amt += p.data.length - p.remaining.length;
|
||||
return amt;
|
||||
}
|
||||
|
||||
void[] alloc(size_t sz)
|
||||
{
|
||||
auto aligned_sz = alignedSize(sz);
|
||||
|
||||
Pool* pprev = null;
|
||||
Pool* p = cast(Pool*)m_freePools;
|
||||
while( p && p.remaining.length < aligned_sz ){
|
||||
pprev = p;
|
||||
p = p.next;
|
||||
}
|
||||
|
||||
if( !p ){
|
||||
auto pmem = m_baseAllocator.alloc(AllocSize!Pool);
|
||||
|
||||
p = emplace!Pool(cast(Pool*)pmem.ptr);
|
||||
p.data = m_baseAllocator.alloc(max(aligned_sz, m_poolSize));
|
||||
p.remaining = p.data;
|
||||
p.next = cast(Pool*)m_freePools;
|
||||
m_freePools = p;
|
||||
pprev = null;
|
||||
}
|
||||
|
||||
auto ret = p.remaining[0 .. aligned_sz];
|
||||
p.remaining = p.remaining[aligned_sz .. $];
|
||||
if( !p.remaining.length ){
|
||||
if( pprev ){
|
||||
pprev.next = p.next;
|
||||
} else {
|
||||
m_freePools = p.next;
|
||||
}
|
||||
p.next = cast(Pool*)m_fullPools;
|
||||
m_fullPools = p;
|
||||
}
|
||||
|
||||
return ret[0 .. sz];
|
||||
}
|
||||
|
||||
void[] realloc(void[] arr, size_t newsize)
|
||||
{
|
||||
auto aligned_sz = alignedSize(arr.length);
|
||||
auto aligned_newsz = alignedSize(newsize);
|
||||
|
||||
if( aligned_newsz <= aligned_sz ) return arr[0 .. newsize]; // TODO: back up remaining
|
||||
|
||||
auto pool = m_freePools;
|
||||
bool last_in_pool = pool && arr.ptr+aligned_sz == pool.remaining.ptr;
|
||||
if( last_in_pool && pool.remaining.length+aligned_sz >= aligned_newsz ){
|
||||
pool.remaining = pool.remaining[aligned_newsz-aligned_sz .. $];
|
||||
arr = arr.ptr[0 .. aligned_newsz];
|
||||
assert(arr.ptr+arr.length == pool.remaining.ptr, "Last block does not align with the remaining space!?");
|
||||
return arr[0 .. newsize];
|
||||
} else {
|
||||
auto ret = alloc(newsize);
|
||||
assert(ret.ptr >= arr.ptr+aligned_sz || ret.ptr+ret.length <= arr.ptr, "New block overlaps old one!?");
|
||||
ret[0 .. min(arr.length, newsize)] = arr[0 .. min(arr.length, newsize)];
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
void free(void[] mem)
|
||||
{
|
||||
}
|
||||
|
||||
void freeAll()
|
||||
{
|
||||
version(VibeManualMemoryManagement){
|
||||
// destroy all initialized objects
|
||||
for (auto d = m_destructors; d; d = d.next)
|
||||
d.destructor(cast(void*)d.object);
|
||||
m_destructors = null;
|
||||
|
||||
// put all full Pools into the free pools list
|
||||
for (Pool* p = cast(Pool*)m_fullPools, pnext; p; p = pnext) {
|
||||
pnext = p.next;
|
||||
p.next = cast(Pool*)m_freePools;
|
||||
m_freePools = cast(Pool*)p;
|
||||
}
|
||||
|
||||
// free up all pools
|
||||
for (Pool* p = cast(Pool*)m_freePools; p; p = p.next)
|
||||
p.remaining = p.data;
|
||||
}
|
||||
}
|
||||
|
||||
void reset()
|
||||
{
|
||||
version(VibeManualMemoryManagement){
|
||||
freeAll();
|
||||
Pool* pnext;
|
||||
for (auto p = cast(Pool*)m_freePools; p; p = pnext) {
|
||||
pnext = p.next;
|
||||
m_baseAllocator.free(p.data);
|
||||
m_baseAllocator.free((cast(void*)p)[0 .. AllocSize!Pool]);
|
||||
}
|
||||
m_freePools = null;
|
||||
}
|
||||
}
|
||||
|
||||
private static destroy(T)(void* ptr)
|
||||
{
|
||||
static if( is(T == class) ) .destroy(cast(T)ptr);
|
||||
else .destroy(*cast(T*)ptr);
|
||||
}
|
||||
}
|
||||
|
||||
final class FreeListAlloc : Allocator
|
||||
{
|
||||
nothrow:
|
||||
private static struct FreeListSlot { FreeListSlot* next; }
|
||||
private {
|
||||
FreeListSlot* m_firstFree = null;
|
||||
size_t m_nalloc = 0;
|
||||
size_t m_nfree = 0;
|
||||
Allocator m_baseAlloc;
|
||||
immutable size_t m_elemSize;
|
||||
}
|
||||
|
||||
this(size_t elem_size, Allocator base_allocator)
|
||||
{
|
||||
assert(elem_size >= size_t.sizeof);
|
||||
m_elemSize = elem_size;
|
||||
m_baseAlloc = base_allocator;
|
||||
logDebug_("Create FreeListAlloc %d", m_elemSize);
|
||||
}
|
||||
|
||||
@property size_t elementSize() const { return m_elemSize; }
|
||||
|
||||
void[] alloc(size_t sz)
|
||||
{
|
||||
assert(sz == m_elemSize, "Invalid allocation size.");
|
||||
return alloc();
|
||||
}
|
||||
|
||||
void[] alloc()
|
||||
{
|
||||
void[] mem;
|
||||
if( m_firstFree ){
|
||||
auto slot = m_firstFree;
|
||||
m_firstFree = slot.next;
|
||||
slot.next = null;
|
||||
mem = (cast(void*)slot)[0 .. m_elemSize];
|
||||
debug m_nfree--;
|
||||
} else {
|
||||
mem = m_baseAlloc.alloc(m_elemSize);
|
||||
//logInfo("Alloc %d bytes: alloc: %d, free: %d", SZ, s_nalloc, s_nfree);
|
||||
}
|
||||
debug m_nalloc++;
|
||||
//logInfo("Alloc %d bytes: alloc: %d, free: %d", SZ, s_nalloc, s_nfree);
|
||||
return mem;
|
||||
}
|
||||
|
||||
void[] realloc(void[] mem, size_t sz)
|
||||
{
|
||||
assert(mem.length == m_elemSize);
|
||||
assert(sz == m_elemSize);
|
||||
return mem;
|
||||
}
|
||||
|
||||
void free(void[] mem)
|
||||
{
|
||||
assert(mem.length == m_elemSize, "Memory block passed to free has wrong size.");
|
||||
auto s = cast(FreeListSlot*)mem.ptr;
|
||||
s.next = m_firstFree;
|
||||
m_firstFree = s;
|
||||
m_nalloc--;
|
||||
m_nfree++;
|
||||
}
|
||||
}
|
||||
|
||||
struct FreeListObjectAlloc(T, bool USE_GC = true, bool INIT = true)
|
||||
{
|
||||
enum ElemSize = AllocSize!T;
|
||||
enum ElemSlotSize = max(AllocSize!T, Slot.sizeof);
|
||||
|
||||
static if( is(T == class) ){
|
||||
alias TR = T;
|
||||
} else {
|
||||
alias TR = T*;
|
||||
}
|
||||
|
||||
struct Slot { Slot* next; }
|
||||
|
||||
private static Slot* s_firstFree;
|
||||
|
||||
static TR alloc(ARGS...)(ARGS args)
|
||||
{
|
||||
void[] mem;
|
||||
if (s_firstFree !is null) {
|
||||
auto ret = s_firstFree;
|
||||
s_firstFree = s_firstFree.next;
|
||||
ret.next = null;
|
||||
mem = (cast(void*)ret)[0 .. ElemSize];
|
||||
} else {
|
||||
//logInfo("alloc %s/%d", T.stringof, ElemSize);
|
||||
mem = manualAllocator().alloc(ElemSlotSize);
|
||||
static if( hasIndirections!T ) GC.addRange(mem.ptr, ElemSlotSize);
|
||||
}
|
||||
|
||||
static if (INIT) return cast(TR)internalEmplace!(Unqual!T)(mem, args); // FIXME: this emplace has issues with qualified types, but Unqual!T may result in the wrong constructor getting called.
|
||||
else return cast(TR)mem.ptr;
|
||||
}
|
||||
|
||||
static void free(TR obj)
|
||||
{
|
||||
static if (INIT) {
|
||||
scope (failure) assert(0, "You shouldn't throw in destructors");
|
||||
auto objc = obj;
|
||||
static if (is(TR == T*)) .destroy(*objc);//typeid(T).destroy(cast(void*)obj);
|
||||
else .destroy(objc);
|
||||
}
|
||||
|
||||
auto sl = cast(Slot*)obj;
|
||||
sl.next = s_firstFree;
|
||||
s_firstFree = sl;
|
||||
//static if( hasIndirections!T ) GC.removeRange(cast(void*)obj);
|
||||
//manualAllocator().free((cast(void*)obj)[0 .. ElemSlotSize]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template AllocSize(T)
|
||||
{
|
||||
static if (is(T == class)) {
|
||||
// workaround for a strange bug where AllocSize!SSLStream == 0: TODO: dustmite!
|
||||
enum dummy = T.stringof ~ __traits(classInstanceSize, T).stringof;
|
||||
enum AllocSize = __traits(classInstanceSize, T);
|
||||
} else {
|
||||
enum AllocSize = T.sizeof;
|
||||
}
|
||||
}
|
||||
|
||||
struct FreeListRef(T, bool INIT = true)
|
||||
{
|
||||
alias ObjAlloc = FreeListObjectAlloc!(T, true, INIT);
|
||||
enum ElemSize = AllocSize!T;
|
||||
|
||||
static if( is(T == class) ){
|
||||
alias TR = T;
|
||||
} else {
|
||||
alias TR = T*;
|
||||
}
|
||||
|
||||
private TR m_object;
|
||||
private size_t m_magic = 0x1EE75817; // workaround for compiler bug
|
||||
|
||||
static FreeListRef opCall(ARGS...)(ARGS args)
|
||||
{
|
||||
//logInfo("refalloc %s/%d", T.stringof, ElemSize);
|
||||
FreeListRef ret;
|
||||
ret.m_object = ObjAlloc.alloc(args);
|
||||
ret.refCount = 1;
|
||||
return ret;
|
||||
}
|
||||
|
||||
~this()
|
||||
{
|
||||
//if( m_object ) logInfo("~this!%s(): %d", T.stringof, this.refCount);
|
||||
//if( m_object ) logInfo("ref %s destructor %d", T.stringof, refCount);
|
||||
//else logInfo("ref %s destructor %d", T.stringof, 0);
|
||||
clear();
|
||||
m_magic = 0;
|
||||
m_object = null;
|
||||
}
|
||||
|
||||
this(this)
|
||||
{
|
||||
checkInvariants();
|
||||
if( m_object ){
|
||||
//if( m_object ) logInfo("this!%s(this): %d", T.stringof, this.refCount);
|
||||
this.refCount++;
|
||||
}
|
||||
}
|
||||
|
||||
void opAssign(FreeListRef other)
|
||||
{
|
||||
clear();
|
||||
m_object = other.m_object;
|
||||
if( m_object ){
|
||||
//logInfo("opAssign!%s(): %d", T.stringof, this.refCount);
|
||||
refCount++;
|
||||
}
|
||||
}
|
||||
|
||||
void clear()
|
||||
{
|
||||
checkInvariants();
|
||||
if (m_object) {
|
||||
if (--this.refCount == 0)
|
||||
ObjAlloc.free(m_object);
|
||||
}
|
||||
|
||||
m_object = null;
|
||||
m_magic = 0x1EE75817;
|
||||
}
|
||||
|
||||
@property const(TR) get() const { checkInvariants(); return m_object; }
|
||||
@property TR get() { checkInvariants(); return m_object; }
|
||||
alias get this;
|
||||
|
||||
private @property ref int refCount()
|
||||
const {
|
||||
auto ptr = cast(ubyte*)cast(void*)m_object;
|
||||
ptr += ElemSize;
|
||||
return *cast(int*)ptr;
|
||||
}
|
||||
|
||||
private void checkInvariants()
|
||||
const {
|
||||
assert(m_magic == 0x1EE75817);
|
||||
assert(!m_object || refCount > 0);
|
||||
}
|
||||
}
|
||||
|
||||
private void* extractUnalignedPointer(void* base) nothrow
|
||||
{
|
||||
ubyte misalign = *(cast(ubyte*)base-1);
|
||||
assert(misalign <= Allocator.alignment);
|
||||
return base - misalign;
|
||||
}
|
||||
|
||||
private void* adjustPointerAlignment(void* base) nothrow
|
||||
{
|
||||
ubyte misalign = Allocator.alignment - (cast(size_t)base & Allocator.alignmentMask);
|
||||
base += misalign;
|
||||
*(cast(ubyte*)base-1) = misalign;
|
||||
return base;
|
||||
}
|
||||
|
||||
unittest {
|
||||
void test_align(void* p, size_t adjustment) {
|
||||
void* pa = adjustPointerAlignment(p);
|
||||
assert((cast(size_t)pa & Allocator.alignmentMask) == 0, "Non-aligned pointer.");
|
||||
assert(*(cast(ubyte*)pa-1) == adjustment, "Invalid adjustment "~to!string(p)~": "~to!string(*(cast(ubyte*)pa-1)));
|
||||
void* pr = extractUnalignedPointer(pa);
|
||||
assert(pr == p, "Recovered base != original");
|
||||
}
|
||||
void* ptr = .malloc(0x40);
|
||||
ptr += Allocator.alignment - (cast(size_t)ptr & Allocator.alignmentMask);
|
||||
test_align(ptr++, 0x10);
|
||||
test_align(ptr++, 0x0F);
|
||||
test_align(ptr++, 0x0E);
|
||||
test_align(ptr++, 0x0D);
|
||||
test_align(ptr++, 0x0C);
|
||||
test_align(ptr++, 0x0B);
|
||||
test_align(ptr++, 0x0A);
|
||||
test_align(ptr++, 0x09);
|
||||
test_align(ptr++, 0x08);
|
||||
test_align(ptr++, 0x07);
|
||||
test_align(ptr++, 0x06);
|
||||
test_align(ptr++, 0x05);
|
||||
test_align(ptr++, 0x04);
|
||||
test_align(ptr++, 0x03);
|
||||
test_align(ptr++, 0x02);
|
||||
test_align(ptr++, 0x01);
|
||||
test_align(ptr++, 0x10);
|
||||
}
|
||||
|
||||
private size_t alignedSize(size_t sz) nothrow
|
||||
{
|
||||
return ((sz + Allocator.alignment - 1) / Allocator.alignment) * Allocator.alignment;
|
||||
}
|
||||
|
||||
unittest {
|
||||
foreach( i; 0 .. 20 ){
|
||||
auto ia = alignedSize(i);
|
||||
assert(ia >= i);
|
||||
assert((ia & Allocator.alignmentMask) == 0);
|
||||
assert(ia < i+Allocator.alignment);
|
||||
}
|
||||
}
|
||||
|
||||
private void ensureValidMemory(void[] mem) nothrow
|
||||
{
|
||||
auto bytes = cast(ubyte[])mem;
|
||||
swap(bytes[0], bytes[$-1]);
|
||||
swap(bytes[0], bytes[$-1]);
|
||||
}
|
||||
|
||||
/// See issue #14194
|
||||
private T internalEmplace(T, Args...)(void[] chunk, auto ref Args args)
|
||||
if (is(T == class))
|
||||
in {
|
||||
import std.string, std.format;
|
||||
assert(chunk.length >= T.sizeof,
|
||||
format("emplace: Chunk size too small: %s < %s size = %s",
|
||||
chunk.length, T.stringof, T.sizeof));
|
||||
assert((cast(size_t) chunk.ptr) % T.alignof == 0,
|
||||
format("emplace: Misaligned memory block (0x%X): it must be %s-byte aligned for type %s", chunk.ptr, T.alignof, T.stringof));
|
||||
|
||||
} body {
|
||||
enum classSize = __traits(classInstanceSize, T);
|
||||
auto result = cast(T) chunk.ptr;
|
||||
|
||||
// Initialize the object in its pre-ctor state
|
||||
chunk[0 .. classSize] = typeid(T).init[];
|
||||
|
||||
// Call the ctor if any
|
||||
static if (is(typeof(result.__ctor(args))))
|
||||
{
|
||||
// T defines a genuine constructor accepting args
|
||||
// Go the classic route: write .init first, then call ctor
|
||||
result.__ctor(args);
|
||||
}
|
||||
else
|
||||
{
|
||||
static assert(args.length == 0 && !is(typeof(&T.__ctor)),
|
||||
"Don't know how to initialize an object of type "
|
||||
~ T.stringof ~ " with arguments " ~ Args.stringof);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/// Dittor
|
||||
private auto internalEmplace(T, Args...)(void[] chunk, auto ref Args args)
|
||||
if (!is(T == class))
|
||||
in {
|
||||
import std.string, std.format;
|
||||
assert(chunk.length >= T.sizeof,
|
||||
format("emplace: Chunk size too small: %s < %s size = %s",
|
||||
chunk.length, T.stringof, T.sizeof));
|
||||
assert((cast(size_t) chunk.ptr) % T.alignof == 0,
|
||||
format("emplace: Misaligned memory block (0x%X): it must be %s-byte aligned for type %s", chunk.ptr, T.alignof, T.stringof));
|
||||
|
||||
} body {
|
||||
return emplace(cast(T*)chunk.ptr, args);
|
||||
}
|
||||
|
||||
private void logDebug_(ARGS...)(string msg, ARGS args) {}
|
235
source/vibe/internal/string.d
Normal file
235
source/vibe/internal/string.d
Normal file
|
@ -0,0 +1,235 @@
|
|||
/**
|
||||
Utility functions for string processing
|
||||
|
||||
Copyright: © 2012-2014 RejectedSoftware e.K.
|
||||
License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file.
|
||||
Authors: Sönke Ludwig
|
||||
*/
|
||||
module vibe.internal.string;
|
||||
|
||||
public import std.string;
|
||||
|
||||
import vibe.internal.array;
|
||||
import vibe.internal.memory;
|
||||
|
||||
import std.algorithm;
|
||||
import std.array;
|
||||
import std.ascii;
|
||||
import std.format;
|
||||
import std.uni;
|
||||
import std.utf;
|
||||
import core.exception;
|
||||
|
||||
|
||||
/**
|
||||
Takes a string with possibly invalid UTF8 sequences and outputs a valid UTF8 string as near to
|
||||
the original as possible.
|
||||
*/
|
||||
string sanitizeUTF8(in ubyte[] str)
|
||||
@safe pure {
|
||||
import std.utf;
|
||||
auto ret = appender!string();
|
||||
ret.reserve(str.length);
|
||||
|
||||
size_t i = 0;
|
||||
while (i < str.length) {
|
||||
dchar ch = str[i];
|
||||
try ch = std.utf.decode(cast(const(char[]))str, i);
|
||||
catch( UTFException ){ i++; }
|
||||
//catch( AssertError ){ i++; }
|
||||
char[4] dst;
|
||||
auto len = std.utf.encode(dst, ch);
|
||||
ret.put(dst[0 .. len]);
|
||||
}
|
||||
|
||||
return ret.data;
|
||||
}
|
||||
|
||||
/**
|
||||
Strips the byte order mark of an UTF8 encoded string.
|
||||
This is useful when the string is coming from a file.
|
||||
*/
|
||||
string stripUTF8Bom(string str)
|
||||
@safe pure nothrow {
|
||||
if (str.length >= 3 && str[0 .. 3] == [0xEF, 0xBB, 0xBF])
|
||||
return str[3 ..$];
|
||||
return str;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
Checks if all characters in 'str' are contained in 'chars'.
|
||||
*/
|
||||
bool allOf(string str, string chars)
|
||||
@safe pure {
|
||||
foreach (dchar ch; str)
|
||||
if (!chars.canFind(ch))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
ptrdiff_t indexOfCT(Char)(in Char[] s, dchar c, CaseSensitive cs = CaseSensitive.yes)
|
||||
@safe pure {
|
||||
if (__ctfe) {
|
||||
if (cs == CaseSensitive.yes) {
|
||||
foreach (i, dchar ch; s)
|
||||
if (ch == c)
|
||||
return i;
|
||||
} else {
|
||||
c = std.uni.toLower(c);
|
||||
foreach (i, dchar ch; s)
|
||||
if (std.uni.toLower(ch) == c)
|
||||
return i;
|
||||
}
|
||||
return -1;
|
||||
} else return std.string.indexOf(s, c, cs);
|
||||
}
|
||||
ptrdiff_t indexOfCT(Char)(in Char[] s, in Char[] needle)
|
||||
{
|
||||
if (__ctfe) {
|
||||
if (s.length < needle.length) return -1;
|
||||
foreach (i; 0 .. s.length - needle.length)
|
||||
if (s[i .. i+needle.length] == needle)
|
||||
return i;
|
||||
return -1;
|
||||
} else return std.string.indexOf(s, needle);
|
||||
}
|
||||
|
||||
/**
|
||||
Checks if any character in 'str' is contained in 'chars'.
|
||||
*/
|
||||
bool anyOf(string str, string chars)
|
||||
@safe pure {
|
||||
foreach (ch; str)
|
||||
if (chars.canFind(ch))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/// ASCII whitespace trimming (space and tab)
|
||||
string stripLeftA(string s)
|
||||
@safe pure nothrow {
|
||||
while (s.length > 0 && (s[0] == ' ' || s[0] == '\t'))
|
||||
s = s[1 .. $];
|
||||
return s;
|
||||
}
|
||||
|
||||
/// ASCII whitespace trimming (space and tab)
|
||||
string stripRightA(string s)
|
||||
@safe pure nothrow {
|
||||
while (s.length > 0 && (s[$-1] == ' ' || s[$-1] == '\t'))
|
||||
s = s[0 .. $-1];
|
||||
return s;
|
||||
}
|
||||
|
||||
/// ASCII whitespace trimming (space and tab)
|
||||
string stripA(string s)
|
||||
@safe pure nothrow {
|
||||
return stripLeftA(stripRightA(s));
|
||||
}
|
||||
|
||||
/// Finds the first occurence of any of the characters in `chars`
|
||||
sizediff_t indexOfAny(string str, string chars)
|
||||
@safe pure {
|
||||
foreach (i, char ch; str)
|
||||
if (chars.canFind(ch))
|
||||
return i;
|
||||
return -1;
|
||||
}
|
||||
alias countUntilAny = indexOfAny;
|
||||
|
||||
/**
|
||||
Finds the closing bracket (works with any of '[', '$(LPAREN)', '<', '{').
|
||||
|
||||
Params:
|
||||
str = input string
|
||||
nested = whether to skip nested brackets
|
||||
Returns:
|
||||
The index of the closing bracket or -1 for unbalanced strings
|
||||
and strings that don't start with a bracket.
|
||||
*/
|
||||
sizediff_t matchBracket(string str, bool nested = true)
|
||||
@safe pure nothrow {
|
||||
if (str.length < 2) return -1;
|
||||
|
||||
char open = str[0], close = void;
|
||||
switch (str[0]) {
|
||||
case '[': close = ']'; break;
|
||||
case '(': close = ')'; break;
|
||||
case '<': close = '>'; break;
|
||||
case '{': close = '}'; break;
|
||||
default: return -1;
|
||||
}
|
||||
|
||||
size_t level = 1;
|
||||
foreach (i, char c; str[1 .. $]) {
|
||||
if (nested && c == open) ++level;
|
||||
else if (c == close) --level;
|
||||
if (level == 0) return i + 1;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
@safe unittest
|
||||
{
|
||||
static struct Test { string str; sizediff_t res; }
|
||||
enum tests = [
|
||||
Test("[foo]", 4), Test("<bar>", 4), Test("{baz}", 4),
|
||||
Test("[", -1), Test("[foo", -1), Test("ab[f]", -1),
|
||||
Test("[foo[bar]]", 9), Test("[foo{bar]]", 8),
|
||||
];
|
||||
foreach (test; tests)
|
||||
assert(matchBracket(test.str) == test.res);
|
||||
assert(matchBracket("[foo[bar]]", false) == 8);
|
||||
static assert(matchBracket("[foo]") == 4);
|
||||
}
|
||||
|
||||
/// Same as std.string.format, just using an allocator.
|
||||
string formatAlloc(ARGS...)(Allocator alloc, string fmt, ARGS args)
|
||||
{
|
||||
auto app = AllocAppender!string(alloc);
|
||||
formattedWrite(&app, fmt, args);
|
||||
return app.data;
|
||||
}
|
||||
|
||||
/// Special version of icmp() with optimization for ASCII characters
|
||||
int icmp2(string a, string b)
|
||||
@safe pure {
|
||||
size_t i = 0, j = 0;
|
||||
|
||||
// fast skip equal prefix
|
||||
size_t min_len = min(a.length, b.length);
|
||||
while( i < min_len && a[i] == b[i] ) i++;
|
||||
if( i > 0 && (a[i-1] & 0x80) ) i--; // don't stop half-way in a UTF-8 sequence
|
||||
j = i;
|
||||
|
||||
// compare the differing character and the rest of the string
|
||||
while(i < a.length && j < b.length){
|
||||
uint ac = cast(uint)a[i];
|
||||
uint bc = cast(uint)b[j];
|
||||
if( !((ac | bc) & 0x80) ){
|
||||
i++;
|
||||
j++;
|
||||
if( ac >= 'A' && ac <= 'Z' ) ac += 'a' - 'A';
|
||||
if( bc >= 'A' && bc <= 'Z' ) bc += 'a' - 'A';
|
||||
if( ac < bc ) return -1;
|
||||
else if( ac > bc ) return 1;
|
||||
} else {
|
||||
dchar acp = decode(a, i);
|
||||
dchar bcp = decode(b, j);
|
||||
if( acp != bcp ){
|
||||
acp = std.uni.toLower(acp);
|
||||
bcp = std.uni.toLower(bcp);
|
||||
if( acp < bcp ) return -1;
|
||||
else if( acp > bcp ) return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if( i < a.length ) return 1;
|
||||
else if( j < b.length ) return -1;
|
||||
|
||||
assert(i == a.length || j == b.length, "Strings equal but we didn't fully compare them!?");
|
||||
return 0;
|
||||
}
|
384
source/vibe/internal/traits.d
Normal file
384
source/vibe/internal/traits.d
Normal file
|
@ -0,0 +1,384 @@
|
|||
/**
|
||||
Extensions to `std.traits` module of Phobos. Some may eventually make it into Phobos,
|
||||
some are dirty hacks that work only for vibe.d
|
||||
|
||||
Copyright: © 2012 RejectedSoftware e.K.
|
||||
License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file.
|
||||
Authors: Sönke Ludwig, Михаил Страшун
|
||||
*/
|
||||
|
||||
module vibe.internal.traits;
|
||||
|
||||
import vibe.internal.typetuple;
|
||||
|
||||
|
||||
/**
|
||||
Checks if given type is a getter function type
|
||||
|
||||
Returns: `true` if argument is a getter
|
||||
*/
|
||||
template isPropertyGetter(T...)
|
||||
if (T.length == 1)
|
||||
{
|
||||
import std.traits : functionAttributes, FunctionAttribute, ReturnType,
|
||||
isSomeFunction;
|
||||
static if (isSomeFunction!(T[0])) {
|
||||
enum isPropertyGetter =
|
||||
(functionAttributes!(T[0]) & FunctionAttribute.property) != 0
|
||||
&& !is(ReturnType!T == void);
|
||||
}
|
||||
else
|
||||
enum isPropertyGetter = false;
|
||||
}
|
||||
|
||||
///
|
||||
unittest
|
||||
{
|
||||
interface Test
|
||||
{
|
||||
@property int getter();
|
||||
@property void setter(int);
|
||||
int simple();
|
||||
}
|
||||
|
||||
static assert(isPropertyGetter!(typeof(&Test.getter)));
|
||||
static assert(!isPropertyGetter!(typeof(&Test.setter)));
|
||||
static assert(!isPropertyGetter!(typeof(&Test.simple)));
|
||||
static assert(!isPropertyGetter!int);
|
||||
}
|
||||
|
||||
/**
|
||||
Checks if given type is a setter function type
|
||||
|
||||
Returns: `true` if argument is a setter
|
||||
*/
|
||||
template isPropertySetter(T...)
|
||||
if (T.length == 1)
|
||||
{
|
||||
import std.traits : functionAttributes, FunctionAttribute, ReturnType,
|
||||
isSomeFunction;
|
||||
|
||||
static if (isSomeFunction!(T[0])) {
|
||||
enum isPropertySetter =
|
||||
(functionAttributes!(T) & FunctionAttribute.property) != 0
|
||||
&& is(ReturnType!(T[0]) == void);
|
||||
}
|
||||
else
|
||||
enum isPropertySetter = false;
|
||||
}
|
||||
|
||||
///
|
||||
unittest
|
||||
{
|
||||
interface Test
|
||||
{
|
||||
@property int getter();
|
||||
@property void setter(int);
|
||||
int simple();
|
||||
}
|
||||
|
||||
static assert(isPropertySetter!(typeof(&Test.setter)));
|
||||
static assert(!isPropertySetter!(typeof(&Test.getter)));
|
||||
static assert(!isPropertySetter!(typeof(&Test.simple)));
|
||||
static assert(!isPropertySetter!int);
|
||||
}
|
||||
|
||||
/**
|
||||
Deduces single base interface for a type. Multiple interfaces
|
||||
will result in compile-time error.
|
||||
|
||||
Params:
|
||||
T = interface or class type
|
||||
|
||||
Returns:
|
||||
T if it is an interface. If T is a class, interface it implements.
|
||||
*/
|
||||
template baseInterface(T)
|
||||
if (is(T == interface) || is(T == class))
|
||||
{
|
||||
import std.traits : InterfacesTuple;
|
||||
|
||||
static if (is(T == interface)) {
|
||||
alias baseInterface = T;
|
||||
}
|
||||
else
|
||||
{
|
||||
alias Ifaces = InterfacesTuple!T;
|
||||
static assert (
|
||||
Ifaces.length == 1,
|
||||
"Type must be either provided as an interface or implement only one interface"
|
||||
);
|
||||
alias baseInterface = Ifaces[0];
|
||||
}
|
||||
}
|
||||
|
||||
///
|
||||
unittest
|
||||
{
|
||||
interface I1 { }
|
||||
class A : I1 { }
|
||||
interface I2 { }
|
||||
class B : I1, I2 { }
|
||||
|
||||
static assert (is(baseInterface!I1 == I1));
|
||||
static assert (is(baseInterface!A == I1));
|
||||
static assert (!is(typeof(baseInterface!B)));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
Determins if a member is a public, non-static data field.
|
||||
*/
|
||||
template isRWPlainField(T, string M)
|
||||
{
|
||||
static if (!isRWField!(T, M)) enum isRWPlainField = false;
|
||||
else {
|
||||
//pragma(msg, T.stringof~"."~M~":"~typeof(__traits(getMember, T, M)).stringof);
|
||||
enum isRWPlainField = __traits(compiles, *(&__traits(getMember, Tgen!T(), M)) = *(&__traits(getMember, Tgen!T(), M)));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Determines if a member is a public, non-static, de-facto data field.
|
||||
|
||||
In addition to plain data fields, R/W properties are also accepted.
|
||||
*/
|
||||
template isRWField(T, string M)
|
||||
{
|
||||
import std.traits;
|
||||
import std.typetuple;
|
||||
|
||||
static void testAssign()() {
|
||||
T t = void;
|
||||
__traits(getMember, t, M) = __traits(getMember, t, M);
|
||||
}
|
||||
|
||||
// reject type aliases
|
||||
static if (is(TypeTuple!(__traits(getMember, T, M)))) enum isRWField = false;
|
||||
// reject non-public members
|
||||
else static if (!isPublicMember!(T, M)) enum isRWField = false;
|
||||
// reject static members
|
||||
else static if (!isNonStaticMember!(T, M)) enum isRWField = false;
|
||||
// reject non-typed members
|
||||
else static if (!is(typeof(__traits(getMember, T, M)))) enum isRWField = false;
|
||||
// reject void typed members (includes templates)
|
||||
else static if (is(typeof(__traits(getMember, T, M)) == void)) enum isRWField = false;
|
||||
// reject non-assignable members
|
||||
else static if (!__traits(compiles, testAssign!()())) enum isRWField = false;
|
||||
else static if (anySatisfy!(isSomeFunction, __traits(getMember, T, M))) {
|
||||
// If M is a function, reject if not @property or returns by ref
|
||||
private enum FA = functionAttributes!(__traits(getMember, T, M));
|
||||
enum isRWField = (FA & FunctionAttribute.property) != 0;
|
||||
} else {
|
||||
enum isRWField = true;
|
||||
}
|
||||
}
|
||||
|
||||
unittest {
|
||||
import std.algorithm;
|
||||
|
||||
struct S {
|
||||
alias a = int; // alias
|
||||
int i; // plain RW field
|
||||
enum j = 42; // manifest constant
|
||||
static int k = 42; // static field
|
||||
private int privateJ; // private RW field
|
||||
|
||||
this(Args...)(Args args) {}
|
||||
|
||||
// read-write property (OK)
|
||||
@property int p1() { return privateJ; }
|
||||
@property void p1(int j) { privateJ = j; }
|
||||
// read-only property (NO)
|
||||
@property int p2() { return privateJ; }
|
||||
// write-only property (NO)
|
||||
@property void p3(int value) { privateJ = value; }
|
||||
// ref returning property (OK)
|
||||
@property ref int p4() { return i; }
|
||||
// parameter-less template property (OK)
|
||||
@property ref int p5()() { return i; }
|
||||
// not treated as a property by DMD, so not a field
|
||||
@property int p6()() { return privateJ; }
|
||||
@property void p6(int j)() { privateJ = j; }
|
||||
|
||||
static @property int p7() { return k; }
|
||||
static @property void p7(int value) { k = value; }
|
||||
|
||||
ref int f1() { return i; } // ref returning function (no field)
|
||||
|
||||
int f2(Args...)(Args args) { return i; }
|
||||
|
||||
ref int f3(Args...)(Args args) { return i; }
|
||||
|
||||
void someMethod() {}
|
||||
|
||||
ref int someTempl()() { return i; }
|
||||
}
|
||||
|
||||
enum plainFields = ["i"];
|
||||
enum fields = ["i", "p1", "p4", "p5"];
|
||||
|
||||
foreach (mem; __traits(allMembers, S)) {
|
||||
static if (isRWField!(S, mem)) static assert(fields.canFind(mem), mem~" detected as field.");
|
||||
else static assert(!fields.canFind(mem), mem~" not detected as field.");
|
||||
|
||||
static if (isRWPlainField!(S, mem)) static assert(plainFields.canFind(mem), mem~" not detected as plain field.");
|
||||
else static assert(!plainFields.canFind(mem), mem~" not detected as plain field.");
|
||||
}
|
||||
}
|
||||
|
||||
package T Tgen(T)(){ return T.init; }
|
||||
|
||||
|
||||
/**
|
||||
Tests if the protection of a member is public.
|
||||
*/
|
||||
template isPublicMember(T, string M)
|
||||
{
|
||||
import std.algorithm, std.typetuple : TypeTuple;
|
||||
|
||||
static if (!__traits(compiles, TypeTuple!(__traits(getMember, T, M)))) enum isPublicMember = false;
|
||||
else {
|
||||
alias MEM = TypeTuple!(__traits(getMember, T, M));
|
||||
enum isPublicMember = __traits(getProtection, MEM).among("public", "export");
|
||||
}
|
||||
}
|
||||
|
||||
unittest {
|
||||
class C {
|
||||
int a;
|
||||
export int b;
|
||||
protected int c;
|
||||
private int d;
|
||||
package int e;
|
||||
void f() {}
|
||||
static void g() {}
|
||||
private void h() {}
|
||||
private static void i() {}
|
||||
}
|
||||
|
||||
static assert (isPublicMember!(C, "a"));
|
||||
static assert (isPublicMember!(C, "b"));
|
||||
static assert (!isPublicMember!(C, "c"));
|
||||
static assert (!isPublicMember!(C, "d"));
|
||||
static assert (!isPublicMember!(C, "e"));
|
||||
static assert (isPublicMember!(C, "f"));
|
||||
static assert (isPublicMember!(C, "g"));
|
||||
static assert (!isPublicMember!(C, "h"));
|
||||
static assert (!isPublicMember!(C, "i"));
|
||||
|
||||
struct S {
|
||||
int a;
|
||||
export int b;
|
||||
private int d;
|
||||
package int e;
|
||||
}
|
||||
static assert (isPublicMember!(S, "a"));
|
||||
static assert (isPublicMember!(S, "b"));
|
||||
static assert (!isPublicMember!(S, "d"));
|
||||
static assert (!isPublicMember!(S, "e"));
|
||||
|
||||
S s;
|
||||
s.a = 21;
|
||||
assert(s.a == 21);
|
||||
}
|
||||
|
||||
/**
|
||||
Tests if a member requires $(D this) to be used.
|
||||
*/
|
||||
template isNonStaticMember(T, string M)
|
||||
{
|
||||
import std.typetuple;
|
||||
import std.traits;
|
||||
|
||||
alias MF = TypeTuple!(__traits(getMember, T, M));
|
||||
static if (M.length == 0) {
|
||||
enum isNonStaticMember = false;
|
||||
} else static if (anySatisfy!(isSomeFunction, MF)) {
|
||||
enum isNonStaticMember = !__traits(isStaticFunction, MF);
|
||||
} else {
|
||||
enum isNonStaticMember = !__traits(compiles, (){ auto x = __traits(getMember, T, M); }());
|
||||
}
|
||||
}
|
||||
|
||||
unittest { // normal fields
|
||||
struct S {
|
||||
int a;
|
||||
static int b;
|
||||
enum c = 42;
|
||||
void f();
|
||||
static void g();
|
||||
ref int h() { return a; }
|
||||
static ref int i() { return b; }
|
||||
}
|
||||
static assert(isNonStaticMember!(S, "a"));
|
||||
static assert(!isNonStaticMember!(S, "b"));
|
||||
static assert(!isNonStaticMember!(S, "c"));
|
||||
static assert(isNonStaticMember!(S, "f"));
|
||||
static assert(!isNonStaticMember!(S, "g"));
|
||||
static assert(isNonStaticMember!(S, "h"));
|
||||
static assert(!isNonStaticMember!(S, "i"));
|
||||
}
|
||||
|
||||
unittest { // tuple fields
|
||||
struct S(T...) {
|
||||
T a;
|
||||
static T b;
|
||||
}
|
||||
|
||||
alias T = S!(int, float);
|
||||
auto p = T.b;
|
||||
static assert(isNonStaticMember!(T, "a"));
|
||||
static assert(!isNonStaticMember!(T, "b"));
|
||||
|
||||
alias U = S!();
|
||||
static assert(!isNonStaticMember!(U, "a"));
|
||||
static assert(!isNonStaticMember!(U, "b"));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
Tests if a Group of types is implicitly convertible to a Group of target types.
|
||||
*/
|
||||
bool areConvertibleTo(alias TYPES, alias TARGET_TYPES)()
|
||||
if (isGroup!TYPES && isGroup!TARGET_TYPES)
|
||||
{
|
||||
static assert(TYPES.expand.length == TARGET_TYPES.expand.length);
|
||||
foreach (i, V; TYPES.expand)
|
||||
if (!is(V : TARGET_TYPES.expand[i]))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
/// Test if the type $(D DG) is a correct delegate for an opApply where the
|
||||
/// key/index is of type $(D TKEY) and the value of type $(D TVALUE).
|
||||
template isOpApplyDg(DG, TKEY, TVALUE) {
|
||||
import std.traits;
|
||||
static if (is(DG == delegate) && is(ReturnType!DG : int)) {
|
||||
private alias PTT = ParameterTypeTuple!(DG);
|
||||
private alias PSCT = ParameterStorageClassTuple!(DG);
|
||||
private alias STC = ParameterStorageClass;
|
||||
// Just a value
|
||||
static if (PTT.length == 1) {
|
||||
enum isOpApplyDg = (is(PTT[0] == TVALUE));
|
||||
} else static if (PTT.length == 2) {
|
||||
enum isOpApplyDg = (is(PTT[0] == TKEY))
|
||||
&& (is(PTT[1] == TVALUE));
|
||||
} else
|
||||
enum isOpApplyDg = false;
|
||||
} else {
|
||||
enum isOpApplyDg = false;
|
||||
}
|
||||
}
|
||||
|
||||
unittest {
|
||||
static assert(isOpApplyDg!(int delegate(int, string), int, string));
|
||||
static assert(isOpApplyDg!(int delegate(ref int, ref string), int, string));
|
||||
static assert(isOpApplyDg!(int delegate(int, ref string), int, string));
|
||||
static assert(isOpApplyDg!(int delegate(ref int, string), int, string));
|
||||
}
|
||||
|
||||
// Synchronized statements are logically nothrow but dmd still marks them as throwing.
|
||||
// DMD#4115, Druntime#1013, Druntime#1021, Phobos#2704
|
||||
import core.sync.mutex : Mutex;
|
||||
enum synchronizedIsNothrow = __traits(compiles, (Mutex m) nothrow { synchronized(m) {} });
|
123
source/vibe/internal/typetuple.d
Normal file
123
source/vibe/internal/typetuple.d
Normal file
|
@ -0,0 +1,123 @@
|
|||
/**
|
||||
Additions to std.typetuple pending for inclusion into Phobos.
|
||||
|
||||
Copyright: © 2013 RejectedSoftware e.K.
|
||||
License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file.
|
||||
Authors: Михаил Страшун
|
||||
*/
|
||||
|
||||
module vibe.internal.typetuple;
|
||||
|
||||
import std.typetuple;
|
||||
import std.traits;
|
||||
|
||||
/**
|
||||
TypeTuple which does not auto-expand.
|
||||
|
||||
Useful when you need
|
||||
to multiple several type tuples as different template argument
|
||||
list parameters, without merging those.
|
||||
*/
|
||||
template Group(T...)
|
||||
{
|
||||
alias expand = T;
|
||||
}
|
||||
|
||||
///
|
||||
unittest
|
||||
{
|
||||
alias group = Group!(int, double, string);
|
||||
static assert (!is(typeof(group.length)));
|
||||
static assert (group.expand.length == 3);
|
||||
static assert (is(group.expand[1] == double));
|
||||
}
|
||||
|
||||
/**
|
||||
*/
|
||||
template isGroup(T...)
|
||||
{
|
||||
static if (T.length != 1) enum isGroup = false;
|
||||
else enum isGroup =
|
||||
!is(T[0]) && is(typeof(T[0]) == void) // does not evaluate to something
|
||||
&& is(typeof(T[0].expand.length) : size_t) // expands to something with length
|
||||
&& !is(typeof(&(T[0].expand))); // expands to not addressable
|
||||
}
|
||||
|
||||
version (unittest) // NOTE: GDC complains about template definitions in unittest blocks
|
||||
{
|
||||
alias group = Group!(int, double, string);
|
||||
alias group2 = Group!();
|
||||
|
||||
template Fake(T...)
|
||||
{
|
||||
int[] expand;
|
||||
}
|
||||
alias fake = Fake!(int, double, string);
|
||||
|
||||
alias fake2 = TypeTuple!(int, double, string);
|
||||
|
||||
static assert (isGroup!group);
|
||||
static assert (isGroup!group2);
|
||||
static assert (!isGroup!fake);
|
||||
static assert (!isGroup!fake2);
|
||||
}
|
||||
|
||||
/* Copied from Phobos as it is private there.
|
||||
*/
|
||||
private template isSame(ab...)
|
||||
if (ab.length == 2)
|
||||
{
|
||||
static if (is(ab[0]) && is(ab[1]))
|
||||
{
|
||||
enum isSame = is(ab[0] == ab[1]);
|
||||
}
|
||||
else static if (!is(ab[0]) &&
|
||||
!is(ab[1]) &&
|
||||
is(typeof(ab[0] == ab[1]) == bool) &&
|
||||
(ab[0] == ab[1]))
|
||||
{
|
||||
static if (!__traits(compiles, &ab[0]) ||
|
||||
!__traits(compiles, &ab[1]))
|
||||
enum isSame = (ab[0] == ab[1]);
|
||||
else
|
||||
enum isSame = __traits(isSame, ab[0], ab[1]);
|
||||
}
|
||||
else
|
||||
{
|
||||
enum isSame = __traits(isSame, ab[0], ab[1]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
Compares two groups for element identity
|
||||
|
||||
Params:
|
||||
Group1, Group2 = any instances of `Group`
|
||||
|
||||
Returns:
|
||||
`true` if each element of Group1 is identical to
|
||||
the one of Group2 at the same index
|
||||
*/
|
||||
template Compare(alias Group1, alias Group2)
|
||||
if (isGroup!Group1 && isGroup!Group2)
|
||||
{
|
||||
private template implementation(size_t index)
|
||||
{
|
||||
static if (Group1.expand.length != Group2.expand.length) enum implementation = false;
|
||||
else static if (index >= Group1.expand.length) enum implementation = true;
|
||||
else static if (!isSame!(Group1.expand[index], Group2.expand[index])) enum implementation = false;
|
||||
else enum implementation = implementation!(index+1);
|
||||
}
|
||||
|
||||
enum Compare = implementation!0;
|
||||
}
|
||||
|
||||
///
|
||||
unittest
|
||||
{
|
||||
alias one = Group!(int, double);
|
||||
alias two = Group!(int, double);
|
||||
alias three = Group!(double, int);
|
||||
static assert (Compare!(one, two));
|
||||
static assert (!Compare!(one, three));
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue