1 module memutils.utils;
2 
3 import core.thread : Fiber;	
4 import std.traits : isPointer, hasIndirections, hasElaborateDestructor, isArray, ReturnType;
5 import std.conv : emplace;
6 import core.stdc.string : memset, memcpy;
7 import memutils.allocators;
8 import std.algorithm : startsWith;
9 import memutils.constants;
10 import memutils.vector : Array;
11 import std.range : ElementType;
12 import memutils.helpers : UnConst;
13 import std.conv;
14 
15 struct AppMem {
16 	mixin ConvenienceAllocators!(NativeGC, typeof(this));
17 }
18 
19 struct ThreadMem {
20 	mixin ConvenienceAllocators!(LocklessFreeList, typeof(this));
21 }
22 
23 struct SecureMem {
24 	mixin ConvenienceAllocators!(CryptoSafe, typeof(this));
25 }
26 // Reserved for containers
27 struct Malloc {
28 	enum ident = Mallocator;
29 }
30 
31 package:
32 
33 
34 template ObjectAllocator(T, ALLOC)
35 {
36 	import std.traits : ReturnType;
37 	import core.memory : GC;
38 	enum ElemSize = AllocSize!T;
39 
40 	static if (ALLOC.stringof == "PoolStack") {
41 		ReturnType!(ALLOC.top) function() m_getAlloc = &ALLOC.top;
42 	}
43 	static if (__traits(hasMember, T, "NOGC")) enum NOGC = T.NOGC;
44 	else enum NOGC = false;
45 
46 	alias TR = RefTypeOf!T;
47 
48 
49 	TR alloc(ARGS...)(auto ref ARGS args)
50 	{
51 		static if (ALLOC.stringof != "PoolStack") {
52 			auto allocator_ = getAllocator!(ALLOC.ident)(false);
53 			auto mem = allocator_.alloc(ElemSize);
54 		}
55 		else
56 			auto mem = m_getAlloc().alloc(ElemSize);
57 		static if ( ALLOC.stringof != "AppMem" && hasIndirections!T && !NOGC) 
58 		{
59 			static if (__traits(compiles, { GC.addRange(null, 0, typeid(string)); }()))
60 				GC.addRange(mem.ptr, ElemSize, typeid(T));
61 			else
62 				GC.addRange(mem.ptr, ElemSize);	
63 		}
64 		static if (!__traits(compiles, (){ emplace!T(mem,args); }))
65 		return cast(TR)T.init;
66 		else return cast(TR)emplace!T(mem, args);
67 
68 	}
69 
70 	void free(TR obj)
71 	{
72 
73 		static if( ALLOC.stringof != "AppMem" && hasIndirections!T && !NOGC) {
74 			GC.removeRange(cast(void*)obj);
75 		}
76 
77 		TR objc = obj;
78 		static if (is(TR == T*)) .destroy(*objc);
79 		else .destroy(objc);
80 
81 		static if (ALLOC.stringof != "PoolStack") {
82 			if (auto a = getAllocator!(ALLOC.ident)(true))
83 				a.free((cast(void*)obj)[0 .. ElemSize]);
84 		}
85 		else
86 			m_getAlloc().free((cast(void*)obj)[0 .. ElemSize]);
87 
88 	}
89 }
90 
91 /// Allocates an array without touching the memory.
92 T[] allocArray(T, ALLOC = ThreadMem)(size_t n)
93 {
94 	import core.memory : GC;
95 	mixin(translateAllocator());
96 	auto allocator = thisAllocator();
97 
98 	auto mem = allocator.alloc(T.sizeof * n);
99 	// logTrace("alloc ", T.stringof, ": ", mem.ptr);
100 	auto ret = (cast(T*)mem.ptr)[0 .. n];
101 	// logTrace("alloc ", ALLOC.stringof, ": ", mem.ptr, ":", mem.length);
102 	static if (__traits(hasMember, T, "NOGC")) enum NOGC = T.NOGC;
103 	else enum NOGC = false;
104 	
105 	static if( ALLOC.stringof != "AppMem" && hasIndirections!T && !NOGC) {
106 		static if (__traits(compiles, { GC.addRange(null, 0, typeid(string)); }()))
107 			GC.addRange(mem.ptr, mem.length, typeid(T));
108 		else
109 			GC.addRange(mem.ptr, mem.length);
110 	}
111 
112 	// don't touch the memory - all practical uses of this function will handle initialization.
113 	return ret;
114 }
115 
116 T[] reallocArray(T, ALLOC = ThreadMem)(T[] array, size_t n) {
117 	import core.memory : GC;
118 	assert(n > array.length, "Cannot reallocate to smaller sizes");
119 	mixin(translateAllocator());
120 	auto allocator = thisAllocator();
121 	// logTrace("realloc before ", ALLOC.stringof, ": ", cast(void*)array.ptr, ":", array.length);
122 
123 	//logTrace("realloc fre ", T.stringof, ": ", array.ptr);
124 	auto mem = allocator.realloc((cast(void*)array.ptr)[0 .. array.length * T.sizeof], T.sizeof * n);
125 	//logTrace("realloc ret ", T.stringof, ": ", mem.ptr);
126 	auto ret = (cast(T*)mem.ptr)[0 .. n];
127 	// logTrace("realloc after ", ALLOC.stringof, ": ", mem.ptr, ":", mem.length);
128 	
129 	static if (__traits(hasMember, T, "NOGC")) enum NOGC = T.NOGC;
130 	else enum NOGC = false;
131 	
132 	static if (ALLOC.stringof != "AppMem" && hasIndirections!T && !NOGC) {
133 		GC.removeRange(array.ptr);
134 		static if (__traits(compiles, { GC.addRange(null, 0, typeid(string)); }()))
135                 GC.addRange(mem.ptr, mem.length, typeid(T));
136         else
137                 GC.addRange(mem.ptr, mem.length);
138 		// Zero out unused capacity to prevent gc from seeing false pointers
139 		memset(mem.ptr + (array.length * T.sizeof), 0, (n - array.length) * T.sizeof);
140 	}
141 	
142 	return ret;
143 }
144 
145 void freeArray(T, ALLOC = ThreadMem)(auto ref T[] array, size_t max_destroy = size_t.max, size_t offset = 0)
146 {
147 	import core.memory : GC;
148 	mixin(translateAllocator());
149 	auto allocator = thisAllocator(true); // freeing. Avoid allocating in a dtor
150 	if (!allocator) return;
151 
152 	// logTrace("free ", ALLOC.stringof, ": ", cast(void*)array.ptr, ":", array.length);
153 	static if (__traits(hasMember, T, "NOGC")) enum NOGC = T.NOGC;
154 	else enum NOGC = false;
155 	
156 	static if (ALLOC.stringof != "AppMem" && hasIndirections!T && !NOGC) {
157 		GC.removeRange(array.ptr);
158 	}
159 
160 	static if (hasElaborateDestructor!T) { // calls destructors, but not for indirections...
161 		size_t i;
162 		foreach (ref e; array) {
163 			if (i < offset) { i++; continue; }
164 			if (i + offset == max_destroy) break;
165 			static if (is(T == struct) && !isPointer!T) .destroy(e);
166 			i++;
167 		}
168 	}
169 	allocator.free((cast(void*)array.ptr)[0 .. array.length * T.sizeof]);
170 	array = null;
171 }
172 
173 mixin template ConvenienceAllocators(alias ALLOC, alias THIS) {
174 	package enum ident = ALLOC;
175 static:
176 	// objects
177 	auto alloc(T, ARGS...)(auto ref ARGS args) 
178 		if (!isArray!T)
179 	{
180 		return ObjectAllocator!(T, THIS).alloc(args);
181 	}
182 	
183 	void free(T)(auto ref T* obj)
184 		if (!isArray!T && !is(T : Object))
185 	{
186 		scope(exit) obj = null;
187 		ObjectAllocator!(T, THIS).free(obj);
188 	}
189 	
190 	void free(T)(auto ref T obj)
191 		if (!isArray!T && is(T  : Object))
192 	{
193 		scope(exit) obj = null;
194 		ObjectAllocator!(T, THIS).free(obj);
195 	}
196 
197 	/// arrays
198 	auto alloc(T)(size_t n)
199 		if (isArray!T)
200 	{
201 		alias ElType = UnConst!(typeof(T.init[0]));
202 		return allocArray!(ElType, THIS)(n);
203 	}
204 
205 	auto copy(T)(auto ref T arr)
206 		if (isArray!T)
207 	{
208 		alias ElType = UnConst!(typeof(arr[0]));
209 		auto arr_copy = allocArray!(ElType, THIS)(arr.length);
210 		memcpy(arr_copy.ptr, arr.ptr, arr.length * ElType.sizeof);
211 
212 		return cast(T)arr_copy;
213 	}
214 
215 	auto realloc(T)(auto ref T arr, size_t n)
216 		if (isArray!T)
217 	{
218 		alias ElType = UnConst!(typeof(arr[0]));
219 		scope(exit) arr = null;
220 		auto arr_copy = reallocArray!(typeof(arr[0]), THIS)(arr, n);
221 		return cast(T) arr_copy;
222 	}
223 	
224 	void free(T)(auto ref T arr)
225 		if (isArray!T)
226 	{
227 		alias ElType = typeof(arr[0]);
228 		scope(exit) arr = null;
229 		freeArray!(ElType, THIS)(arr);
230 	}
231 
232 }
233 
234 string translateAllocator() { /// requires (ALLOC) template parameter
235 	return `
236 	static if (ALLOC.stringof != "PoolStack") {
237 		ReturnType!(getAllocator!(ALLOC.ident)) thisAllocator(bool is_freeing = false) {
238 			return getAllocator!(ALLOC.ident)(is_freeing);
239 		}
240 	}
241 	else {
242 		ReturnType!(ALLOC.top) function() thisAllocator = &ALLOC.top;
243 	}
244 	`;
245 }