1 /** Manual memory management routines.
2 
3 Warning:
4 $(RED Never use functions like $(D malloc) directly) unless you know
5 what you are doing as unaligned memory which it returns may lead
6 to random crashed, incorrect behaviour and/or performance reduction.
7 
8 Also $(RED manual) $(I count) * $(I element size) $(RED multiplication often
9 leads to buffer overflow vulnerability) as one forgets the check.
10 
11 Copyright: Denis Shelomovskij 2013
12 
13 License: $(HTTP boost.org/LICENSE_1_0.txt, Boost License 1.0).
14 
15 Authors: Denis Shelomovskij
16 
17 Macros:
18 COREREF = $(HTTP dlang.org/phobos/core_$1.html#$2, $(D core.$1.$2))
19 */
20 module unstd.memory.allocation;
21 
22 
23 import core.stdc.stdlib;
24 import core.stdc..string;
25 import core.exception;
26 import core.memory;
27 
28 import std.traits;
29 
30 import unstd.array;
31 import unstd.math;
32 import unstd.lifetime;
33 import unstd.memory.misc;
34 
35 version(Windows) import WinHeap = unstd.windows.heap;
36 
37 
38 /**
39 Returns $(D true) if $(D A) is an unaligned allocator.
40 
41 The following code should compile for any unaligned allocator.
42 
43 ----
44 A a = void;
45 auto p = a.tryUnalignedAllocate(cast(size_t) 1);
46 auto q = a.tryUnalignedReallocate(p, cast(size_t) 1, cast(size_t) 1);
47 a.unalignedFree(p);
48 static assert(is(typeof(p) == void*) && is(typeof(q) == void*));
49 ----
50 */
51 enum isUnalignedAllocator(A) = __traits(compiles,
52 {
53 	A a = void;
54 	auto p = a.tryUnalignedAllocate(cast(size_t) 1);
55 	auto q = a.tryUnalignedReallocate(p, cast(size_t) 1, cast(size_t) 1);
56 	a.unalignedFree(p);
57 	static assert(is(typeof(p) == void*) && is(typeof(q) == void*));
58 });
59 
60 version(unittest) private struct _DummyUnalignedAllocator
61 {
62 pure nothrow @nogc:
63 
64 	void* tryUnalignedAllocate(in size_t count)
65 	{ return null; }
66 
67 	void* tryUnalignedReallocate(void* ptr, in size_t preserveCount, in size_t count)
68 	{ return null; }
69 
70 	void unalignedFree(void* ptr)
71 	{ }
72 }
73 
74 pure nothrow @nogc unittest
75 {
76 	static assert(!isUnalignedAllocator!int);
77 	static assert( isUnalignedAllocator!_DummyUnalignedAllocator);
78 }
79 
80 
81 /**
82 Requests a properly aligned block of memory of $(D count * T.sizeof)
83 bytes from $(D allocator).
84 
85 If $(D initialize) is true the returned memory will be set to $(D T.init).
86 
87 If allocation fails allocate will also call $(COREREF exception, onOutOfMemoryError)
88 which is expected to throw an $(COREREF exception, OutOfMemoryError).
89 
90 Preconditions:
91 $(D count != 0)
92 
93 Returns:
94 Allocated array or null if allocaton failed.
95 */
96 T[] allocate(T, A)(ref A allocator, in size_t count, in bool initialize = true, in bool gcScan = hasIndirections!T)
97 if(isUnalignedAllocator!A)
98 {
99 	T[] arr = allocator.tryAllocate!T(count, initialize, gcScan);
100 	if(!arr)
101 		onOutOfMemoryError();
102 	return arr;
103 }
104 
105 /// ditto
106 T[] tryAllocate(T, A)(ref A allocator, in size_t count, in bool initialize = true, in bool gcScan = hasIndirections!T)
107 if(isUnalignedAllocator!A)
108 in { assert(count); }
109 body
110 {
111 	void* ptr = allocator.tryRawAllocate(T.alignof, T.sizeof, count, false, gcScan);
112 	if(!ptr)
113 		return null;
114 	T[] arr = (cast(T*) ptr)[0 .. count];
115 	if(initialize)
116 		setElementsToInitialState(arr);
117 	return arr;
118 }
119 
120 /**
121 Requests resize of a properly aligned block of memory allocated from
122 $(D allocator) or if $(D ptr) is null requests memory allocation like
123 $(MREF allocate)/$(MREF tryAllocate). Memory may be moved, but
124 $(D array) elements content will stay the same.
125 
126 If $(D initialize) is true and $(D array.length < newCount) the memory of
127 "rest" elements will be set to $(D T.init).
128 
129 If reallocation fails $(D array) isn't changed.
130 $(D tryReallocate) returns whether reallocation succeeded.
131 
132 If reallocation fails reallocate will also call $(COREREF exception, onOutOfMemoryError)
133 which is expected to throw an $(COREREF exception, OutOfMemoryError).
134 
135 Preconditions:
136 $(D newCount)
137 */
138 void reallocate(T, A)(ref A allocator, ref T[] array, in size_t newCount, in bool initialize = true, in bool gcScan = hasIndirections!T)
139 if(isUnalignedAllocator!A)
140 {
141 	if(!allocator.tryReallocate!T(array, newCount, initialize, gcScan))
142 		onOutOfMemoryError();
143 }
144 
145 /// ditto
146 bool tryReallocate(T, A)(ref A allocator, ref T[] array, in size_t newCount, in bool initialize = true, in bool gcScan = hasIndirections!T)
147 if(isUnalignedAllocator!A)
148 in { assert(newCount); }
149 body
150 {
151 	import std.algorithm: min;
152 
153 	void* ptr = array.ptr;
154 	const preserveCount = min(array.length, newCount);
155 	if(!allocator.tryRawReallocate(T.alignof, T.sizeof, ptr, preserveCount, newCount, false, gcScan))
156 		return false;
157 	array = (cast(T*) ptr)[0 .. newCount];
158 	if(initialize)
159 		setElementsToInitialState(array[preserveCount .. newCount]);
160 	return true;
161 }
162 
163 /**
164 Deallocates the memory referenced by $(D array.ptr) from $(D allocator)
165 and sets $(D array) to null.
166 
167 If $(D array.ptr) is null, no action occurs.
168 */
169 void free(T, A)(ref A allocator, ref T[] array, in bool gcScan = hasIndirections!T)
170 if(isUnalignedAllocator!A)
171 {
172 	if(array.ptr)
173 	{
174 		allocator.free(array.ptr, gcScan);
175 		array = null;
176 	}
177 }
178 
179 /**
180 Deallocates the memory referenced by $(D ptr) from $(D allocator).
181 
182 If $(D ptr) is null, no action occurs.
183 */
184 void free(T, A)(ref A allocator, T* ptr, in bool gcScan = hasIndirections!T)
185 if(isUnalignedAllocator!A)
186 {
187 	allocator.rawFree(T.alignof, ptr, gcScan);
188 }
189 
190 unittest
191 {
192 	_DummyUnalignedAllocator a;
193 	int[] arr = a.tryAllocate!int(1);
194 	assert(!arr);
195 	arr = a.tryAllocate!int(1, false);
196 	assert(!arr);
197 	assert(!a.tryReallocate!int(arr, 1, false));
198 	a.free(arr);
199 }
200 
201 
202 /**
203 Requests an $(D alignment)-byte aligned block of memory of $(D count * elementSize)
204 bytes from $(D allocator).
205 
206 If $(D zeroFill) is true the returned memory will be zero-filled.
207 
208 If allocation fails rawAllocate will also call $(COREREF exception, onOutOfMemoryError)
209 which is expected to throw an $(COREREF exception, OutOfMemoryError).
210 
211 Preconditions:
212 $(D alignment != 0 && elementSize % alignment == 0 && count != 0)
213 
214 Returns:
215 A pointer to the allocated memory or null if allocaton failed.
216 */
217 void* rawAllocate(A)(ref A allocator, in size_t alignment, in size_t elementSize, in size_t count, in bool zeroFill = true, in bool gcScan = false)
218 if(isUnalignedAllocator!A)
219 {
220 	void* ptr = allocator.tryRawAllocate(alignment, elementSize, count, zeroFill, gcScan);
221 	if(!ptr)
222 		onOutOfMemoryError();
223 	return ptr;
224 }
225 
226 /// ditto
227 void* tryRawAllocate(A)(ref A allocator, in size_t alignment, in size_t elementSize, in size_t count, in bool zeroFill = true, in bool gcScan = false)
228 if(isUnalignedAllocator!A)
229 in { assert(alignment && elementSize % alignment == 0 && count); }
230 body
231 {
232 	if(const buffBytes = memoryMult(elementSize, count))
233 		if(const totalBytes = memoryAdd(buffBytes, alignmentMemoryPadding(alignment)))
234 			if(void* p = allocator.tryUnalignedAllocate(totalBytes))
235 			{
236 				p = alignMemory(alignment, p);
237 				if(zeroFill)
238 					memset(p, 0, buffBytes);
239 				if(gcScan)
240 					GC.addRange(p, buffBytes);
241 				return p;
242 			}
243 	return null;
244 }
245 
246 /**
247 Requests resize of an $(D alignment)-byte aligned block of memory allocated
248 from $(D allocator) or if $(D ptr) is null requests memory allocation like
249 $(MREF rawAllocate)/$(MREF tryRawAllocate). Memory may be moved, but $(D preserveCount) elements
250 content will stay the same.
251 
252 If $(D zeroFill) is true and $(D preserveCount < newCount) the memory of
253 "unpreserved" elements will be zero-filled.
254 
255 If reallocation fails $(D ptr) isn't changed.
256 $(D tryRawReallocate) returns whether reallocation succeeded.
257 
258 If reallocation fails rawReallocate will also call $(COREREF exception, onOutOfMemoryError)
259 which is expected to throw an $(COREREF exception, OutOfMemoryError).
260 
261 Preconditions:
262 $(D alignment && elementSize % alignment == 0 && (ptr || !preserveCount) && preserveCount <= newCount && newCount)
263 */
264 void rawReallocate(A)(ref A allocator, in size_t alignment, in size_t elementSize, ref void* ptr, in size_t preserveCount, in size_t newCount, in bool zeroFill = true, in bool gcScan = false)
265 if(isUnalignedAllocator!A)
266 {
267 	if(!allocator.tryRawReallocate(alignment, elementSize, ptr, preserveCount, newCount, zeroFill, gcScan))
268 		onOutOfMemoryError();
269 }
270 
271 /// ditto
272 bool tryRawReallocate(A)(ref A allocator, in size_t alignment, in size_t elementSize, ref void* ptr, in size_t preserveCount, in size_t newCount, in bool zeroFill = true, in bool gcScan = false)
273 if(isUnalignedAllocator!A)
274 in { assert(alignment && elementSize % alignment == 0 && (ptr || !preserveCount) && preserveCount <= newCount && newCount); }
275 body
276 {
277 	if(!ptr)
278 	{
279 		ptr = allocator.tryRawAllocate(alignment, elementSize, newCount, zeroFill, gcScan);
280 		return !!ptr;
281 	}
282 
283 	const preserveBuffBytes = elementSize * preserveCount;
284 
285 	if(gcScan)
286 	{
287 		if(void* p = allocator.tryRawAllocate(alignment, elementSize, newCount, false, true))
288 		{
289 			rawCopy(ptr, p, preserveBuffBytes);
290 			allocator.rawFree(alignment, ptr, true);
291 			ptr = p;
292 			if(zeroFill)
293 				memset(ptr + preserveBuffBytes, 0, elementSize * (newCount - preserveCount));
294 			return true;
295 		}
296 		return false;
297 	}
298 
299 	const padding = alignmentMemoryPadding(alignment);
300 	if(const buffBytes = memoryMult(elementSize, newCount))
301 		if(const totalBytes = memoryAdd(buffBytes, padding))
302 			if(const preserveTotalBytes = memoryAdd(preserveBuffBytes, padding))
303 			{
304 				const shift = ptr - dealignMemory(alignment, ptr);
305 				if(void* p = allocator.tryUnalignedReallocate(ptr - shift, preserveTotalBytes, totalBytes))
306 				{
307 					const newShift = getAlignMemoryShift(alignment, p);
308 					if(newShift != shift)
309 						rawCopy(p + shift, p + newShift, preserveBuffBytes);
310 					ptr = alignMemory(alignment, p);
311 					if(preserveCount < newCount && zeroFill)
312 						memset(ptr + preserveBuffBytes, 0, (newCount - preserveCount) * elementSize);
313 					return true;
314 				}
315 			}
316 	return false;
317 }
318 
319 
320 /**
321 Deallocates the memory referenced by $(D ptr) from $(D allocator).
322 
323 If $(D ptr) is null, no action occurs.
324 */
325 void rawFree(A)(ref A allocator, in size_t alignment, void* ptr, in bool gcScan = false)
326 if(isUnalignedAllocator!A)
327 {
328 	if(ptr)
329 	{
330 		if(gcScan)
331 			GC.removeRange(ptr);
332 		allocator.unalignedFree(dealignMemory(alignment, ptr));
333 	}
334 }
335 
336 unittest
337 {
338 	_DummyUnalignedAllocator a;
339 	void* p = a.tryRawAllocate(4, 4, 1);
340 	assert(!p);
341 	p = a.tryRawAllocate(4, 4, 1, false);
342 	assert(!p);
343 	assert(!a.tryRawReallocate(4, 4, p, 0, 1, false));
344 	a.rawFree(4, p);
345 }
346 
347 
348 version(unittest)
349 void testAllocator(A)(ref A a)
350 {
351 	auto longs = a.allocate!long(3, false);
352 	assert(longs.length == 3);
353 	a.free(longs);
354 	assert(!longs);
355 
356 	foreach(size_t n; [0, 0xFFF, 0xFFFFFF, 0x10000000])
357 		assert(!a.tryAllocate!ubyte(size_t.max - n));
358 
359 	auto chars = a.allocate!char(2);
360 	scope(exit) a.free(chars);
361 	assert(chars == [char.init, char.init]);
362 	chars[] = "ab";
363 	a.reallocate(chars, 3);
364 	assert(chars == ['a', 'b', char.init]);
365 	chars = chars[0 .. 1];
366 	a.reallocate(chars, 2);
367 	assert(chars == ['a', char.init]);
368 }
369 
370 
371 /**
372 An unaligned shared allocator which can be safely used from multiple threads.
373 */
374 @property ref heap() @safe nothrow
375 {
376 	version(Windows)
377 	{
378 		static _heap = WinHeap.HeapAllocator.init;
379 		try
380 		if(!_heap.heap.associated)
381 			_heap = WinHeap.HeapAllocator(WinHeap.processHeap.handle, false);
382 		catch(Exception) assert(0);
383 		return _heap;
384 	}
385 	else
386 	{
387 		// FIXME: Assume C heap is thread safe.
388 		return cHeap;
389 	}
390 }
391 
392 unittest
393 {
394 	testAllocator(heap);
395 }
396 
397 
398 /**
399 An unaligned thread local allocator.
400 
401 It can be faster than $(MREF heap) as it doesn't require a synchronization.
402 
403 Note:
404 Class destructors are called asynchronously from $(I GC) thread on
405 collection so $(D threadHeap) in a destructor may reflect different
406 thread than the one the class instance was created and used in.
407 
408 Bugs:
409 On non-$(I Windows) systems it behaves just like $(MREF heap)
410 i.e. it may lock shared mutex.
411 */
412 @property ref threadHeap() @safe nothrow
413 {
414 	version(Windows)
415 	{
416 		static _threadHeap = WinHeap.HeapAllocator.init;
417 		try
418 		if(!_threadHeap.heap.associated)
419 			_threadHeap = WinHeap.HeapAllocator(WinHeap.Heap.CreateOptions.noSerialize);
420 		catch(Exception) assert(0);
421 		return _threadHeap;
422 	}
423 	else
424 	{
425 		return heap;
426 	}
427 }
428 
429 unittest
430 {
431 	testAllocator(threadHeap);
432 }
433 
434 
435 struct CHeap
436 {
437 	@disable this();
438 	@disable this(this);
439 
440 static:
441 	// Allocate memory with C's `malloc`.
442 	void* tryUnalignedAllocate(in size_t count) @trusted nothrow @nogc
443 	in { assert(count); }
444 	body
445 	{
446 		// Workaround snn @@@BUG11646@@@
447 		version(DigitalMars) version(Win32)
448 			if(count > 0xD5550000) return null;
449 		return malloc(count);
450 	}
451 
452 	void* tryUnalignedReallocate(void* ptr, in size_t preserveCount, in size_t count) @system nothrow @nogc
453 	in { assert(ptr && preserveCount <= count && count); }
454 	body
455 	{ return realloc(ptr, count); }
456 
457 	// Free memory with C's `free`.
458 	void unalignedFree(void* ptr) @system nothrow @nogc
459 	in { assert(ptr); }
460 	body
461 	{ core.stdc.stdlib.free(ptr); }
462 }
463 
464 __gshared CHeap _cHeap = void;
465 
466 /// An unaligned allocator which uses C's $(D malloc)/$(D free).
467 @property ref CHeap cHeap() @trusted nothrow @nogc
468 { return _cHeap; }
469 
470 nothrow unittest
471 {
472 	testAllocator(cHeap);
473 }
474 
475 
476 /**
477 Creates temporary buffer.
478 
479 Returned object has two properties: $(D ptr) to access the buffer as $(D T*)
480 and $(D arr) to access it as $(D T[]).
481 
482 The temporary buffer is valid unless returned object is destroyed.
483 Thus if returned object is assigned to a variable the temporary is
484 valid unless the variable goes out of scope. If returned object isn't
485 assigned to a variable it will be destroyed at the end of creating
486 primary expression.
487 
488 If $(D count <= stackCount) or $(D stackCount) isn't specified and
489 no more than 1 KiB is requested tempAlloc will use stack allocated
490 buffer, for larger requests it will allocate temporary buffer
491 from $(MREF threadHeap).
492 
493 Preconditions:
494 $(D count != 0)
495 
496 Note:
497 This function can be used in function call expression (like
498 $(D needBuffFunc(tempAlloc(n).ptr))). Incorrect usage of this function may
499 lead to memory corruption.
500 See $(RED WARNING) in $(D tempCString) $(B Examples) section
501 ($(D tempCString) is an analog of tempAlloc for $(I C strings)).
502 
503 See_Also:
504 $(DPREF2 c, string, tempCString)
505 */
506 auto tempAlloc(T)(in size_t count, in bool initialize = true) @system
507 { return tempAlloc!(T, 1024 / T.sizeof)(count, initialize); }
508 
509 /// ditto
510 auto tempAlloc(T, size_t stackCount)(in size_t count, in bool initialize = true) @system
511 in { assert(count); }
512 body
513 {
514 	static assert(memoryMult(T.sizeof, stackCount));
515 
516 	static struct Res
517 	{
518 		@disable this();
519 		@disable this(this);
520 
521 		@property T* ptr() pure nothrow
522 		{ return _allocPtr ? _allocPtr : cast(T*) _buff.ptr; }
523 
524 		@property T[] arr() pure nothrow
525 		{ return ptr[0 .. _length]; }
526 
527 		~this()
528 		{ threadHeap.free(_allocPtr); }
529 
530 	private:
531 		T* _allocPtr;
532 		size_t _length;
533 		// Note: can't use T[stackCount] for types with alignment requirements as there is
534 		// no guarantee alignment of stack-allocated variables. See dmd @@@BUG2278@@@.
535 		static if(T.alignof != 1) 
536 			RawAutoalignedBuff!(T.alignof, T.sizeof * stackCount) _buff;
537 		else
538 			T[stackCount] _buff;
539 	}
540 
541 	// TODO: Don't stack allocate uninitialized array to
542 	// not confuse unprecise GC.
543 
544 	// Note: res can't contain a pointer to its _buff as structs are movable.
545 
546 	Res res = void;
547 	const needAllocate = count > stackCount;
548 	static if(T.alignof != 1) if(!needAllocate)
549 		res._buff.initialize(T.sizeof * count, false);
550 	if(needAllocate || initialize)
551 	{
552 		T[] arr = needAllocate ?
553 			threadHeap.allocate!T(count, false) : (cast(T*) res._buff.ptr)[0 .. count];
554 		if(initialize)
555 			setElementsToInitialState(arr);
556 		res._allocPtr = needAllocate ? arr.ptr : null;
557 	}
558 	else
559 	{
560 		res._allocPtr = null;
561 	}
562 	res._length = count;
563 	return res;
564 }
565 
566 /// ditto
567 auto tempAlloc(T, size_t stackCount : 0)(in size_t count, in bool initialize = true) @system
568 in { assert(count); }
569 body
570 {
571 	static struct Res
572 	{
573 		@disable this();
574 		@disable this(this);
575 
576 		@property T* ptr() pure nothrow
577 		{ return _arr.ptr; }
578 
579 		@property T[] arr() pure nothrow
580 		{ return _arr; }
581 
582 		~this()
583 		{ threadHeap.free(_arr.ptr); }
584 
585 	private:
586 		T[] _arr;
587 	}
588 
589 	Res res = void;
590 	res._arr = threadHeap.allocate!T(count, initialize);
591 	return res;
592 }
593 
594 @system unittest
595 {
596 	{
597 		auto tmp = tempAlloc!int(2);
598 		assert(tmp.ptr == tmp._buff.ptr && tmp.arr == [0, 0]);
599 	}
600 	{
601 		auto tmp = tempAlloc!(int, 0)(2);
602 		assert(tmp.arr == [0, 0]);
603 	}
604 	assert(tempAlloc!char(2).arr == [0xFF, 0xFF]);
605 
606 	static struct S
607 	{
608 		@disable this();
609 		@disable this(this);
610 	}
611 	assert(tempAlloc!S(1).arr == [S.init]);
612 }
613 
614 
615 private:
616 
617 // Helper functions for memory alignment.
618 // Note: maximum allowed alignment is 256.
619 // ----------------------------------------------------------------------------------------------------
620 
621 size_t alignmentMemoryPadding(in size_t alignment) @safe pure nothrow @nogc
622 in { assert(alignment && alignment <= ubyte.max + 1); }
623 body { return alignment == 1 ? 0 : alignment; }
624 
625 size_t getAlignMemoryShift(in size_t alignment, in void* unalignedPtr) @trusted pure nothrow @nogc
626 in { assert(unalignedPtr && alignment && alignment <= ubyte.max + 1); }
627 body
628 {
629 	if(alignment == 1)
630 		return 0;
631 
632 	const shift = alignUp(alignment, cast(size_t) unalignedPtr) - cast(size_t) unalignedPtr;
633 	return shift ? shift : alignment;
634 }
635 
636 void* alignMemory(in size_t alignment, void* unalignedPtr) @system pure nothrow @nogc
637 in { assert(unalignedPtr && alignment && alignment <= ubyte.max + 1); }
638 out(res) { assert(dealignMemory(alignment, res) == unalignedPtr); }
639 body
640 {
641 	if(alignment == 1)
642 		return unalignedPtr;
643 
644 	size_t shift = getAlignMemoryShift(alignment, unalignedPtr);
645 	void* ptr = unalignedPtr + shift;
646 	(cast(ubyte*) ptr)[-1] = cast(ubyte)(shift - 1);
647 	return ptr;
648 }
649 
650 inout(void)* dealignMemory(in size_t alignment, inout void* alignedPtr) @system pure nothrow @nogc
651 in { assert(alignedPtr && alignment && alignment <= ubyte.max + 1); }
652 out(res) { assert(res + getAlignMemoryShift(alignment, res) == alignedPtr); }
653 body
654 {
655 	if(alignment == 1)
656 		return alignedPtr;
657 
658 	const d = (cast(ubyte*) alignedPtr)[-1] + 1;
659 	return alignedPtr - d;
660 }
661 
662 pure nothrow @nogc unittest
663 {
664 	void[alignmentMemoryPadding(256)] buff = void;
665 	assert(alignMemory(1, buff.ptr) == buff.ptr);
666 	foreach(alignment; [4, 16, 64, 128, 256])
667 		assert(alignMemory(alignment, buff.ptr) != buff.ptr);
668 }
669 
670 
671 // Helper functions for memory amound manipulation.
672 // ----------------------------------------------------------------------------------------------------
673 
674 public size_t memoryAdd(in size_t bytes1, in size_t bytes2) @safe pure nothrow @nogc
675 in { assert(bytes1 || bytes2); }
676 body
677 {
678 	const size_t bytes = bytes1 + bytes2;
679 	if(bytes < bytes1)
680 		return 0;
681 	return bytes;
682 }
683 
684 public size_t memoryMult(in size_t elementSize, in size_t count) @safe pure nothrow @nogc
685 in { assert(elementSize && count); }
686 body
687 {
688 	const size_t bytes = elementSize * count;
689 	if(bytes / elementSize != count)
690 		return 0;
691 	return bytes;
692 }
693 
694 @safe pure nothrow @nogc unittest
695 {
696 	assert( memoryAdd(3, 4) == 7);
697 	assert( memoryAdd(1, 0) == 1);
698 	assert( memoryAdd(2, -3) == size_t.max);
699 	assert(!memoryAdd(3, -1));
700 	assert( memoryMult(3, 4) == 12);
701 	assert( memoryMult(3, 1) == 3);
702 	assert( memoryMult(1, -1) == size_t.max);
703 	assert(!memoryMult(3, -1));
704 }