1 /**
2 	FreeList allocator proxy templates used to prevent memory segmentation
3 	on base allocator.
4 
5     Copyright: © 2012-2013 RejectedSoftware e.K.
6     		   © 2014-2015 Etienne Cimon
7     License: Subject to the terms of the MIT license.
8     Authors: Sönke Ludwig, Etienne Cimon
9 */
10 module memutils.freelist;
11 
12 import memutils.allocators;
13 import memutils.memory;
14 import std.algorithm : min;
15 
16 final class AutoFreeListAllocator(Base : Allocator) : Allocator {
17 	import std.typetuple;
18 	
19 	private {
20 		enum minExponent = 3;
21 		enum freeListCount = 12;
22 		static if (is(Base == MallocAllocator) && __VERSION__ >= 2072)
23 			FreeListMallocAllocator[freeListCount] m_freeLists;
24 		else
25 			FreeListAlloc!Base[freeListCount] m_freeLists;
26 		Base m_baseAlloc;
27 	}
28 	
29 	this()
30 	{
31 		version(TLSGC) { } else {
32 			if (!mtx) mtx = new Mutex;
33 		}
34 		m_baseAlloc = getAllocator!Base();
35 		foreach (i; iotaTuple!freeListCount) {
36 			static if (is(Base == MallocAllocator) && __VERSION__ >= 2072)
37 				m_freeLists[i] = new FreeListMallocAllocator(nthFreeListSize!(i));
38 			else
39 				m_freeLists[i] = new FreeListAlloc!Base(nthFreeListSize!(i));
40 		}
41 	}
42 
43 	~this() {
44 		foreach(fl; m_freeLists)
45 			destroy(fl);
46 	}
47 	
48 	void[] alloc(size_t sz)
49 	{
50 
51 		version(TLSGC) { } else {
52 			mtx.lock_nothrow();
53 			scope(exit) mtx.unlock_nothrow();
54 		}
55 		logTrace("AFL alloc ", sz);
56 		foreach (i; iotaTuple!freeListCount)
57 			if (sz <= nthFreeListSize!(i))
58 				return m_freeLists[i].alloc().ptr[0 .. sz];
59 		if (sz > nthFreeListSize!(freeListCount-1)) return m_baseAlloc.alloc(sz);
60 		assert(false);
61 	}
62 
63 	void[] realloc(void[] data, size_t sz)
64 	{
65 		version(TLSGC) { } else {
66 			mtx.lock_nothrow();
67 			scope(exit) mtx.unlock_nothrow();
68 		}
69 		foreach (fl; m_freeLists) {
70 			if (data.length <= fl.elementSize) {
71 				// just grow the slice if it still fits into the free list slot
72 				if (sz <= fl.elementSize)
73 					return data.ptr[0 .. sz];
74 				
75 				// otherwise re-allocate
76 				auto newd = alloc(sz);
77 				assert(newd.ptr+sz <= data.ptr || newd.ptr >= data.ptr+data.length, "New block overlaps old one!?");
78 				auto len = min(data.length, sz);
79 				newd[0 .. len] = data[0 .. len];
80 				free(data);
81 				return newd;
82 			}
83 		}
84 		// forward large blocks to the base allocator
85 		return m_baseAlloc.realloc(data, sz);
86 	}
87 
88 	void free(void[] data)
89 	{
90 		version(TLSGC) { } else {
91 			mtx.lock_nothrow();
92 			scope(exit) mtx.unlock_nothrow();
93 		}
94 		logTrace("AFL free ", data.length);
95 		foreach(i; iotaTuple!freeListCount) {
96 			if (data.length <= nthFreeListSize!i) {
97 				m_freeLists[i].free(data.ptr[0 .. nthFreeListSize!i]);
98 				return;
99 			}
100 		}
101 		if (data.length > nthFreeListSize!(freeListCount-1)) {
102 			m_baseAlloc.free(data);
103 			return;
104 		}
105 		assert(false);
106 	}
107 	
108 	private static pure size_t nthFreeListSize(size_t i)() { return 1 << (i + minExponent); }
109 	private template iotaTuple(size_t i) {
110 		static if (i > 1) alias iotaTuple = TypeTuple!(iotaTuple!(i-1), i-1);
111 		else alias iotaTuple = TypeTuple!(0);
112 	}
113 }
114 
115 final class FreeListAlloc(Base : Allocator) : Allocator
116 {
117 	import memutils.vector : Vector;
118 	import memutils.utils : Malloc;
119 	private static struct FreeListSlot { FreeListSlot* next; }
120 	private {
121 		immutable size_t m_elemSize;
122 		Base m_baseAlloc;
123 		FreeListSlot* m_firstFree = null;
124 		size_t space;
125 		version(DebugLeaks) HashMap!(size_t, size_t, Malloc) m_owned;
126 		size_t m_nalloc = 0;
127 		size_t m_nfree = 0;
128 	}
129 
130 	~this() {
131 		import core.thread : thread_isMainThread;
132 		version(DebugLeaks)//if (!thread_isMainThread)
133 		{
134 			if (m_owned.length > 0)
135 			{
136 				import std.stdio : writeln;
137 				foreach(const ref size_t ptr, const ref size_t size; m_owned)
138 					writeln( cast(void*)ptr, " : ", size);
139 				asm { int 3; }
140 			}
141 		}
142 		while ( m_firstFree ){
143 			auto slot = m_firstFree;
144 			m_firstFree = slot.next;
145 			slot.next = null;
146 			m_baseAlloc.free( (cast(void*)slot)[0 .. m_elemSize] );
147 			m_nfree--;
148 		}
149 		//foreach(size_t slot; m_owned[])
150 			//	m_baseAlloc.free( (cast(void*)slot)[0 .. m_elemSize]);
151 	
152 	}
153 	
154 	this(size_t elem_size)
155 	{
156 		assert(elem_size >= size_t.sizeof);
157 		m_elemSize = elem_size;
158 		m_baseAlloc = getAllocator!Base();
159 		//logTrace("Create FreeListAlloc %d", m_elemSize);
160 	}
161 	
162 	@property size_t elementSize() const { return m_elemSize; }
163 	
164 	void[] alloc(size_t sz)
165 	{
166 		assert(sz == m_elemSize, "Invalid allocation size.");
167 		return alloc();
168 	}
169 	
170 	void[] alloc()
171 	{
172 		void[] mem;
173 		if (m_nfree == 0) m_firstFree = null;
174 		if( m_firstFree ){
175 			auto slot = m_firstFree;
176 			if (--m_nfree == 0)
177 				m_firstFree = null;
178 			else {
179 				m_firstFree = slot.next;
180 			}
181 			mem = (cast(void*)slot)[0 .. m_elemSize];
182 		} else {
183 			mem = m_baseAlloc.alloc(m_elemSize);
184 			//logInfo("Alloc %d bytes: alloc: %d, free: %d", SZ, s_nalloc, s_nfree);
185 		}
186 		version(DebugLeaks)//if (!thread_isMainThread)
187 		{
188 			//import std.stdio : writeln;
189 			//Exception ex = new Exception("");
190 			//try throw ex; catch (Exception e) { 
191 			//writeln(mem.ptr, " : ", e.toString());
192 			//}
193 			m_owned[cast(size_t)mem.ptr] = m_elemSize;
194 
195 		}
196 		m_nalloc++;
197 		//logInfo("Alloc %d bytes: alloc: %d, free: %d", SZ, s_nalloc, s_nfree);
198 		return mem;
199 	}
200 
201 	void[] realloc(void[] mem, size_t sz)
202 	{
203 		version(DebugLeaks)//if (!thread_isMainThread)
204 			m_owned[cast(size_t)mem.ptr] = sz;
205 		assert(mem.length == m_elemSize);
206 		assert(sz == m_elemSize);
207 		return mem;
208 	}
209 
210 	void free(void[] mem)
211 	{
212 		assert(mem.length == m_elemSize, "Memory block passed to free has wrong size.");
213 		auto s = cast(FreeListSlot*)mem.ptr;
214 		s.next = m_firstFree;
215 
216 		version(DebugLeaks)//if (!thread_isMainThread)
217 			m_owned.remove(cast(size_t)mem.ptr);
218 		m_firstFree = s;
219 		m_nalloc--;
220 		m_nfree++;
221 	}
222 }
223 
224 // workaround for 2.072.0 regression preventing FreeListAlloc template instantiation
225 private 
226 final class FreeListMallocAllocator
227 {
228 	import memutils.vector : Vector;
229 	import memutils.utils : Malloc;
230 	private static struct FreeListSlot { FreeListSlot* next; }
231 	private {
232 		immutable size_t m_elemSize;
233 		MallocAllocator m_baseAlloc;
234 		FreeListSlot* m_firstFree = null;
235 		size_t space;
236 		version(DebugLeaks) HashMap!(size_t, size_t, Malloc) m_owned;
237 		size_t m_nalloc = 0;
238 		size_t m_nfree = 0;
239 	}
240 
241 	~this() {
242 		import core.thread : thread_isMainThread;
243 		version(DebugLeaks)//if (!thread_isMainThread)
244 		{
245 			if (m_owned.length > 0)
246 			{
247 				import std.stdio : writeln;
248 				foreach(const ref size_t ptr, const ref size_t size; m_owned)
249 					writeln( cast(void*)ptr, " : ", size);
250 				asm { int 3; }
251 			}
252 		}
253 		while ( m_firstFree ){
254 			auto slot = m_firstFree;
255 			m_firstFree = slot.next;
256 			slot.next = null;
257 			m_baseAlloc.free( (cast(void*)slot)[0 .. m_elemSize] );
258 			m_nfree--;
259 		}
260 		//foreach(size_t slot; m_owned[])
261 			//	m_baseAlloc.free( (cast(void*)slot)[0 .. m_elemSize]);
262 	
263 	}
264 	
265 	this(size_t elem_size)
266 	{
267 		assert(elem_size >= size_t.sizeof);
268 		m_elemSize = elem_size;
269 		m_baseAlloc = getAllocator!MallocAllocator();
270 		//logTrace("Create FreeListAlloc %d", m_elemSize);
271 	}
272 	
273 	@property size_t elementSize() const { return m_elemSize; }
274 	
275 	void[] alloc(size_t sz)
276 	{
277 		assert(sz == m_elemSize, "Invalid allocation size.");
278 		return alloc();
279 	}
280 	
281 	void[] alloc()
282 	{
283 		void[] mem;
284 		if (m_nfree == 0) m_firstFree = null;
285 		if( m_firstFree ){
286 			auto slot = m_firstFree;
287 			if (--m_nfree == 0)
288 				m_firstFree = null;
289 			else {
290 				m_firstFree = slot.next;
291 			}
292 			mem = (cast(void*)slot)[0 .. m_elemSize];
293 		} else {
294 			mem = m_baseAlloc.alloc(m_elemSize);
295 			//logInfo("Alloc %d bytes: alloc: %d, free: %d", SZ, s_nalloc, s_nfree);
296 		}
297 		version(DebugLeaks)//if (!thread_isMainThread)
298 		{
299 			//import std.stdio : writeln;
300 			//Exception ex = new Exception("");
301 			//try throw ex; catch (Exception e) { 
302 			//writeln(mem.ptr, " : ", e.toString());
303 			//}
304 			m_owned[cast(size_t)mem.ptr] = m_elemSize;
305 
306 		}
307 		m_nalloc++;
308 		//logInfo("Alloc %d bytes: alloc: %d, free: %d", SZ, s_nalloc, s_nfree);
309 		return mem;
310 	}
311 
312 	void[] realloc(void[] mem, size_t sz)
313 	{
314 		version(DebugLeaks)//if (!thread_isMainThread)
315 			m_owned[cast(size_t)mem.ptr] = sz;
316 		assert(mem.length == m_elemSize);
317 		assert(sz == m_elemSize);
318 		return mem;
319 	}
320 
321 	void free(void[] mem)
322 	{
323 		assert(mem.length == m_elemSize, "Memory block passed to free has wrong size.");
324 		auto s = cast(FreeListSlot*)mem.ptr;
325 		s.next = m_firstFree;
326 
327 		version(DebugLeaks)//if (!thread_isMainThread)
328 			m_owned.remove(cast(size_t)mem.ptr);
329 		m_firstFree = s;
330 		m_nalloc--;
331 		m_nfree++;
332 	}
333 }