1 /** Weak reference implementation. 2 3 Copyright: Denis Shelomovskij 2013 4 5 License: $(HTTP boost.org/LICENSE_1_0.txt, Boost License 1.0). 6 7 Authors: Denis Shelomovskij 8 */ 9 module unstd.memory.weakref; 10 11 12 import core.exception; 13 import core.memory; 14 import core.atomic; 15 16 import unstd.array; 17 import unstd.casts; 18 import unstd.memory.allocation; 19 20 21 /** 22 Detect whether a weak reference to type $(D T) can be created. 23 24 A weak reference can be created for a $(D class), $(D interface), or $(D delegate). 25 26 Warning: 27 $(D delegate) context must be a class instance. 28 I.e. creating a weak reference for a $(D delegate) created from a $(D struct) 29 member function will result in undefined behavior. 30 31 $(RED Weak reference will not work for closures) unless enhancement $(DBUGZILLA 9601) 32 is implemented as now regular D objects aren't created on closures. 33 */ 34 enum isWeakReferenceable(T) = is(T == class) || is(T == interface) || is(T == delegate); 35 36 /** 37 Implements weak reference. 38 39 Note: The class contains a pointer to a target object thus it behaves as 40 a normal reference if placed in GC block without $(D NO_SCAN) attribute. 41 42 Tip: This behaves like C#'s short weak reference or Java's weak reference. 43 */ 44 final @trusted class WeakReference(T) 45 if(isWeakReferenceable!T) 46 { 47 /* Create weak reference for $(D target). 48 49 Preconditions: 50 $(D target !is null) 51 */ 52 this(T target) 53 in { assert(target); } 54 body 55 { 56 _data.target = target; 57 rt_attachDisposeEvent(_targetToObj(target), &onTargetDisposed); 58 } 59 60 /// Determines whether referenced object is finalized. 61 @property bool alive() const pure nothrow @nogc 62 { return !!atomicLoad(_data.ptr); } 63 64 /** 65 Returns referenced object if it isn't finalized 66 thus creating a strong reference to it. 67 Returns null otherwise. 68 */ 69 @property inout(T) target() inout nothrow 70 { 71 return _data.getTarget(); 72 } 73 74 ~this() 75 { 76 if(T t = target) 77 { 78 rt_detachDisposeEvent(_targetToObj(t), &onTargetDisposed); 79 } 80 } 81 82 private: 83 shared ubyte[T.sizeof] _dataStore; 84 85 @property ref inout(_WeakData!T) _data() inout pure nothrow @nogc 86 { 87 return _dataStore.viewAs!(_WeakData!T); 88 } 89 90 void onTargetDisposed(Object) pure nothrow @nogc 91 { 92 atomicStore(_data.ptr, cast(shared void*) null); 93 } 94 } 95 96 /// Convenience function that returns a $(D WeakReference!T) object for $(D target). 97 @safe WeakReference!T weakReference(T)(T target) 98 if(isWeakReferenceable!T) 99 { 100 return new WeakReference!T(target); 101 } 102 103 /// 104 unittest 105 { 106 auto weak = weakReference(new Object()); 107 // ... 108 if(auto obj = weak.target) 109 { 110 // Still alive! Let's kill it by hands. 111 destroy(obj); 112 assert(!weak.alive && !weak.target); 113 } 114 } 115 116 /// 117 unittest 118 { 119 auto weak = weakReference(&(new Object()).toString); 120 // ... 121 if(auto del = weak.target) 122 { 123 // It's alive! Let's check and kill it by hands. 124 assert(del() == "object.Object"); 125 destroy(cast(Object) del.ptr); 126 assert(!weak.alive && !weak.target); 127 } 128 } 129 130 @safe unittest 131 { 132 void destroy(T)(T t) @trusted { .destroy(t); } // to make unittest `@safe` 133 134 { 135 auto o = new Object(); 136 auto w = weakReference(o); 137 assert(w.alive && w.target is o); 138 destroy(o); 139 assert(!w.alive && !w.target); 140 } 141 142 interface I { } 143 class C: I { void f() {} } 144 { 145 I i = new C(); 146 auto w = weakReference(i); 147 assert(w.alive && w.target is i); 148 destroy(i); 149 assert(!w.alive && !w.target); 150 } 151 { 152 auto c = new C(); 153 auto w = weakReference(&c.f); 154 assert(w.alive && w.target is &c.f); 155 destroy(c); 156 assert(!w.alive && !w.target); 157 } 158 } 159 160 /** 161 Implements weak reference array. 162 163 It gives better performance when working with 164 multiple weak references at once. 165 */ 166 final @trusted class WeakReferenceArray(T) 167 if(isWeakReferenceable!T) 168 { 169 /** 170 Create weak reference array with initial capacity $(D initialCapacity). 171 172 Preconditions: 173 $(D initialCapacity != 0) 174 */ 175 this(in size_t initialCapacity) 176 in { assert(initialCapacity); } 177 body 178 { 179 _data = cast(_WeakData!T*) heap.allocate!T(initialCapacity, false, false).ptr; 180 _capacity = initialCapacity; 181 } 182 183 /// Total _count of (possibly dead) weak references. 184 @property size_t count() const @safe pure nothrow @nogc 185 { return _count; } 186 187 /// Total count of alive weak references. 188 @property size_t aliveCount() const pure nothrow @nogc 189 { return atomicLoad(_aliveCount); } 190 191 /// Returns the _capacity of the array. 192 @property size_t capacity() const @safe pure nothrow @nogc 193 { return _capacity; } 194 195 /** 196 Determines whether array behaves as a _hard reference. 197 $(D false) by default. 198 */ 199 @property bool hard() const @safe pure nothrow @nogc 200 { return _hard; } 201 202 /** 203 Return array internal buffer which can be safely used while 204 the array behaves as a hard reference. 205 206 The array contains $(D aliveCount) non-null references. 207 Use $(D removeDead) before retrieveing the array to 208 get array of only alive objects. 209 210 Note: 211 Retrieved buffer may become invalid after addition of an object 212 into the array if $(D capacity == count) or after $(D reserve) or 213 $(D removeDead) call. 214 215 Preconditions: 216 $(D hard) 217 */ 218 @property inout(T)[] buff() inout pure nothrow @nogc 219 in { assert(hard); } 220 body 221 { return (cast(inout T*) _data)[0 .. _count]; } 222 223 /** 224 Appends new weak reference to $(D target) to the array. 225 */ 226 void opOpAssign(string op : "~")(T target) 227 { 228 if(_count == _capacity) 229 { 230 if(_capacity * 2 < _capacity) 231 onOutOfMemoryError(); 232 reserve(_capacity * 2); 233 } 234 _data[_count++].target = target; 235 if(!target) 236 return; 237 atomicOp!`+=`(_aliveCount, 1); 238 rt_attachDisposeEvent(_targetToObj(target), &onTargetDisposed); 239 } 240 241 /** 242 Returns $(D i)-th referenced object if it isn't finalized 243 thus creating a strong reference to it. 244 Returns null otherwise. 245 */ 246 inout(T) opIndex(in size_t i) inout nothrow 247 { 248 version(D_NoBoundsChecks) { } 249 else if(i >= _count) 250 onRangeError(); 251 252 if(_hard) 253 return _data[i].target; 254 255 return _data[i].getTarget(); 256 } 257 258 /// Changes $(D i)-th referenced object. 259 void opIndexAssign(T target, in size_t i) 260 { 261 version(D_NoBoundsChecks) { } 262 else if(i >= _count) 263 onRangeError(); 264 265 const wasHard = hard; 266 if(!wasHard) makeHard(); 267 scope(exit) if(!wasHard) makeWeak(); 268 269 if(_data[i].target is target) 270 return; 271 272 auto prevObject = _data[i].ptr ? _targetToObj(_data[i].target) : null; 273 auto object = target ? _targetToObj(target) : null; 274 _data[i].target = target; 275 276 if(!prevObject || !object) 277 (cast() _aliveCount) += object ? 1 : -1; 278 279 if(prevObject is object) 280 return; // no need to attach/detach dispose events 281 282 if(prevObject) 283 { 284 bool foundPrev = false; 285 foreach(j, t; buff) if(t && j != i) 286 { 287 foundPrev |= _targetToObj(t) is prevObject; 288 if(foundPrev) 289 break; 290 } 291 if(!foundPrev) 292 rt_detachDisposeEvent(prevObject, &onTargetDisposed); 293 } 294 if(object) 295 rt_attachDisposeEvent(object, &onTargetDisposed); 296 } 297 298 /// Reserve at least $(D newCapacity) elements for appending. 299 void reserve(in size_t newCapacity) 300 { 301 if(newCapacity <= _capacity) 302 return; 303 304 const wasHard = hard; 305 if(!wasHard) makeHard(); 306 scope(exit) if(!wasHard) makeWeak(); 307 308 _capacity = newCapacity; 309 T[] arr = buff; 310 heap.reallocate(arr, _capacity, false, false); 311 _data = cast(_WeakData!T*) arr.ptr; 312 } 313 314 /// Remove dead weak references from the array. This may decrease $(D count). 315 void removeDead() nothrow 316 { 317 const wasHard = hard; 318 if(!wasHard) makeHard(); 319 320 for(size_t i = 0; i < count; ) 321 { 322 // NOTE: This may be optimized but we assume small arrays for now. 323 if(!_data[i].ptr) 324 rawCopy(_data + i + 1, _data + i, --_count - i); 325 else 326 ++i; 327 } 328 329 if(!wasHard) makeWeak(); 330 } 331 332 /// Force the array to behave as a weak reference. 333 void makeWeak() nothrow 334 { 335 if(!_hard) 336 return; 337 _hard = false; 338 GC.removeRange(cast(void*) _data); 339 } 340 341 /// Force the array to behave as a hard reference. 342 void makeHard() nothrow 343 { 344 if(_hard) 345 return; 346 _hard = true; 347 GC.addRange(cast(void*) _data, T.sizeof * _count); 348 } 349 350 ~this() 351 { 352 makeHard(); 353 354 foreach(t; buff) if(t) 355 rt_detachDisposeEvent(_targetToObj(t), &onTargetDisposed); 356 357 heap.free(cast(T*) _data, false); 358 } 359 360 private: 361 size_t _capacity, _count = 0; 362 bool _hard = false; 363 shared size_t _aliveCount = 0; 364 _WeakData!T* _data; 365 366 void onTargetDisposed(Object obj) pure nothrow @nogc 367 { 368 version(assert) size_t repeatCount = 0; 369 foreach(ref d; _data[0 .. _count]) if(d.ptr && _targetToObj(d.target) is obj) 370 { 371 atomicOp!`-=`(_aliveCount, 1); 372 atomicStore(d.ptr, cast(shared void*) null); 373 static if(is(T == delegate)) 374 d.funcptr = null; 375 version(assert) ++repeatCount; 376 } 377 version(assert) assert(repeatCount); 378 } 379 } 380 381 382 /** 383 Convenience function that returns a $(D WeakReferenceArray!T) 384 with initial capacity $(D initialCapacity). 385 */ 386 @safe WeakReferenceArray!T weakReferenceArray(T)(in size_t initialCapacity = 64) 387 if(isWeakReferenceable!T) 388 { 389 return new WeakReferenceArray!T(initialCapacity); 390 } 391 392 @safe unittest 393 { 394 void destroy(T)(T t) @trusted { .destroy(t); } // to make unittest `@safe` 395 396 { 397 auto o = new Object(); 398 auto w = weakReferenceArray!Object(1); 399 w ~= o; 400 assert(w.aliveCount == 1 && w[0] is o); 401 destroy(o); 402 assert(!w.aliveCount && !w[0]); 403 404 auto o1 = new Object(), o2 = new Object(), o3 = new Object(); 405 w ~= o1; 406 w ~= o2; 407 w ~= o3; 408 assert(!w.hard && w.aliveCount == 3 && w[1] is o1 && w[2] is o2 && w[3] is o3); 409 w.makeHard(); 410 assert(w.hard && w.aliveCount == 3 && w.buff == [null, o1, o2, o3]); 411 destroy(o2); 412 assert(w.aliveCount == 2 && w.buff == [null, o1, null, o3]); 413 w.removeDead(); 414 assert(w.aliveCount == 2 && w.buff == [o1, o3]); 415 w.makeWeak(); 416 assert(!w.hard); 417 destroy(o1); 418 destroy(o3); 419 assert(!w.aliveCount); 420 assert(w.count == 2); 421 w.removeDead(); 422 assert(!w.count); 423 } 424 425 { 426 auto o = new Object(), o1 = new Object(), o2 = new Object(); 427 auto w = weakReferenceArray!Object(1); 428 w ~= o; 429 w ~= o1; 430 w[0] = o2; 431 assert(w.aliveCount == 2 && w[0] is o2 && w[1] is o1); 432 destroy(o); 433 assert(w.aliveCount == 2 && w[0] is o2 && w[1] is o1); 434 destroy(o2); 435 assert(w.aliveCount == 1 && !w[0] && w[1] is o1); 436 w[0] = o1; 437 assert(w.aliveCount == 2 && w[0] is o1 && w[1] is o1); 438 destroy(o1); 439 assert(w.aliveCount == 0 && !w[0] && !w[1]); 440 } 441 442 interface I { } 443 class C: I { void f() {} void f1() {} } 444 { 445 I i = new C(), i1 = new C(); 446 auto w = weakReferenceArray!I(1); 447 w ~= i; 448 w ~= i; 449 w ~= i; 450 w ~= i1; 451 assert(w.aliveCount == 4 && w[0] is i && w[1] is i && w[2] is i && w[3] is i1); 452 destroy(i); 453 assert(w.aliveCount == 1 && !w[0] && !w[1] && !w[2] && w[3] is i1); 454 destroy(i1); 455 assert(!w.aliveCount && !w[0] && !w[1] && !w[2] && !w[3]); 456 } 457 { 458 auto c = new C(), c1 = new C(); 459 auto w = weakReferenceArray!(void delegate())(1); 460 w ~= &c.f; 461 w ~= &c1.f; 462 w ~= &c.f1; 463 assert(w.aliveCount == 3 && w[0] is &c.f && w[1] is &c1.f && w[2] is &c.f1); 464 destroy(c1); 465 assert(w.aliveCount == 2 && w[0] is &c.f && !w[1] && w[2] is &c.f1); 466 destroy(c); 467 assert(!w.aliveCount && !w[0] && !w[1] && !w[2]); 468 } 469 } 470 471 472 private: 473 474 alias DisposeEvt = void delegate(Object); 475 476 extern(C) 477 { 478 Object _d_toObject(void* p) pure nothrow @nogc; 479 void rt_attachDisposeEvent(Object obj, DisposeEvt evt); 480 void rt_detachDisposeEvent(Object obj, DisposeEvt evt); 481 } 482 483 union _WeakData(T) 484 if(isWeakReferenceable!T) 485 { 486 T target; 487 struct 488 { 489 shared void* ptr; // delegate also has ptr at offset 0 490 static if(is(T == delegate)) 491 void* funcptr; 492 } 493 494 // Returns referenced object if it isn't finalized. 495 @property inout(T) getTarget() inout nothrow 496 { 497 auto ptr = cast(inout shared void*) atomicLoad(/*de-inout*/(cast(const) this).ptr); 498 if(!ptr) 499 return null; 500 501 // Note: this is an implementation dependent GC fence as there 502 // is no guarantee `addrOf` will really lock GC mutex. 503 GC.addrOf(cast(void*) -1); 504 505 // We have strong reference to ptr here so just test 506 // whether we are still alive: 507 if(!atomicLoad(/*de-inout*/(cast(const) this).ptr)) 508 return null; 509 510 // We have to use obtained reference to ptr in result: 511 static if(is(T == delegate)) 512 inout _WeakData res = { ptr: ptr, funcptr: funcptr }; 513 else 514 inout _WeakData res = { ptr: ptr }; 515 return res.target; 516 } 517 } 518 519 Object _targetToObj(T)(T t) pure nothrow @nogc 520 if(is(T == delegate)) 521 { return _d_toObject(t.ptr); } 522 523 inout(Object) _targetToObj(T)(inout T t) @trusted pure nothrow @nogc 524 if(is(T == class) || is(T == interface)) 525 { return t.upcast!Object; }