1 /** 2 Utility and ancillary artifacts of `stdx.collections`. 3 */ 4 module stdx.collections.common; 5 import std.range: isInputRange; 6 7 auto tail(Collection)(Collection collection) 8 if (isInputRange!Collection) 9 { 10 collection.popFront(); 11 return collection; 12 } 13 14 package auto threadAllocatorObject() 15 { 16 import std.experimental.allocator : RCIAllocator; 17 18 static @nogc nothrow 19 RCIAllocator wrapAllocatorObject() 20 { 21 import std.experimental.allocator.gc_allocator : GCAllocator; 22 import std.experimental.allocator : allocatorObject; 23 24 return allocatorObject(GCAllocator.instance); 25 } 26 auto fn = (() @trusted => 27 cast(RCIAllocator function() @nogc nothrow pure @safe)(&wrapAllocatorObject))(); 28 return fn(); 29 } 30 31 package auto processAllocatorObject() 32 { 33 import std.experimental.allocator : RCISharedAllocator; 34 35 static @nogc nothrow 36 RCISharedAllocator wrapAllocatorObject() 37 { 38 import std.experimental.allocator.gc_allocator : GCAllocator; 39 import std.experimental.allocator : sharedAllocatorObject; 40 41 return sharedAllocatorObject(GCAllocator.instance); 42 } 43 auto fn = (() @trusted => 44 cast(RCISharedAllocator function() @nogc nothrow pure @safe)(&wrapAllocatorObject))(); 45 return fn(); 46 } 47 48 // Returns an instance of the default allocator 49 package auto defaultAllocator(Q)() 50 { 51 static if (is(Q == immutable) || is(Q == const)) 52 return processAllocatorObject(); 53 else 54 return threadAllocatorObject(); 55 } 56 57 package struct AllocatorHandler 58 { 59 import std.experimental.allocator : RCIAllocator, RCISharedAllocator, 60 dispose, stateSize, theAllocator, processAllocator; 61 import std.experimental.allocator.building_blocks.affix_allocator; 62 import std.conv : emplace; 63 import core.atomic : atomicOp; 64 import std.algorithm.mutation : move; 65 debug(AllocatorHandler) import std.stdio; 66 67 private union 68 { 69 void *_; 70 size_t _pMeta; 71 } 72 73 alias LocalAllocT = AffixAllocator!(RCIAllocator, size_t); 74 alias SharedAllocT = shared AffixAllocator!(RCISharedAllocator, size_t); 75 76 private static struct Metadata 77 { 78 union LAllocator 79 { 80 LocalAllocT alloc; 81 } 82 union SAllocator 83 { 84 SharedAllocT alloc; 85 } 86 87 LAllocator _localAlloc; 88 SAllocator _sharedAlloc; 89 bool _isShared; 90 size_t _rc = 1; 91 } 92 93 pragma(inline, true) 94 pure nothrow @trusted @nogc 95 bool isNull() const 96 { 97 return (cast(void*) _pMeta) is null; 98 } 99 100 pragma(inline, true) 101 pure nothrow @safe @nogc 102 bool isShared() const 103 { 104 return isSharedMeta(_pMeta); 105 } 106 107 nothrow pure @trusted 108 this(A, this Q)(A alloc) 109 if (!is(Q == shared) 110 && (is(A == RCISharedAllocator) || !is(Q == immutable)) 111 && (is(A == RCIAllocator) || is(A == RCISharedAllocator))) 112 { 113 //assert(alloc.alignment >= Metadata.alignof); 114 115 // Allocate mem for metadata 116 //auto state = alloc.allocate(stateSize!Metadata); 117 118 auto dg = cast(void[] delegate(size_t, TypeInfo) nothrow pure)(&alloc.allocate); 119 auto state = dg(stateSize!Metadata, null); 120 assert(state !is null); 121 122 auto meta = emplace!Metadata(state); 123 assert(state.ptr == meta); 124 assert(meta._rc == 1); 125 126 static if (is(A == RCISharedAllocator)) 127 { 128 auto shAlloc = SharedAllocT(alloc); 129 auto sz = stateSize!SharedAllocT; 130 (cast(void*) &meta._sharedAlloc.alloc)[0 .. sz] = (cast(void*) &shAlloc)[0 .. sz]; 131 meta._isShared = true; 132 SharedAllocT init; 133 (cast(void*) &shAlloc)[0 .. sz] = (cast(void*) &init)[0 .. sz]; 134 } 135 else 136 { 137 auto lcAlloc = LocalAllocT(alloc); 138 move(lcAlloc, meta._localAlloc.alloc); 139 } 140 _pMeta = cast(size_t) state.ptr; 141 } 142 143 pure nothrow @safe @nogc 144 //this(this Q)(ref Q rhs) 145 this(const ref typeof(this) rhs) 146 { 147 assert((() @trusted => (cast(void*) _pMeta) is null)()); 148 _pMeta = rhs._pMeta; 149 incRef(_pMeta); 150 } 151 152 pure nothrow @safe /*@nogc*/ 153 ref typeof(this) opAssign(const ref typeof(this) rhs) 154 { 155 debug(AllocatorHandler) 156 { 157 writefln("AllocatorHandler.opAssign: begin"); 158 scope(exit) writefln("AllocatorHandler.opAssign: end"); 159 } 160 161 auto pMeta = (() @trusted => cast(void*) _pMeta)(); 162 auto rhspMeta = (() @trusted => cast(void*) rhs._pMeta)(); 163 if (rhspMeta !is null && _pMeta == rhs._pMeta) 164 { 165 return this; 166 } 167 if (rhspMeta !is null) 168 { 169 rhs.incRef(rhs._pMeta); 170 debug(AllocatorHandler) writefln( 171 "AllocatorHandler.opAssign: AllocatorHandler %s has refcount: %s", 172 &this, rhs.getRC); 173 } 174 if (pMeta) decRef(_pMeta); 175 _pMeta = rhs._pMeta; 176 return this; 177 } 178 179 pure nothrow @safe @nogc 180 void bootstrap(this Q)() 181 { 182 assert((() @trusted => cast(void*) _pMeta)()); 183 incRef(_pMeta); 184 } 185 186 pure nothrow @safe /*@nogc*/ 187 ~this() 188 { 189 auto pMeta = (() @trusted => cast(void*) _pMeta)(); 190 if (pMeta is null) 191 { 192 debug(AllocatorHandler) writeln("META IS NULL"); 193 return; 194 } 195 decRef(_pMeta); 196 } 197 198 //debug(AllocatorHandler) 199 pragma(inline, true) 200 private pure nothrow @trusted @nogc 201 size_t getRC(this _)() 202 { 203 auto meta = cast(Metadata*) _pMeta; 204 return meta._rc; 205 } 206 207 pragma(inline, true) 208 static private pure nothrow @trusted @nogc 209 bool isSharedMeta(const size_t pMeta) 210 { 211 assert(cast(void*) pMeta); 212 auto meta = cast(Metadata*) pMeta; 213 return meta._isShared; 214 } 215 216 pragma(inline, true) 217 static private pure nothrow @trusted @nogc 218 ref auto localAllocator(const size_t pMeta) 219 { 220 assert(cast(void*) pMeta); 221 auto meta = cast(Metadata*) pMeta; 222 assert(!meta._isShared); 223 return meta._localAlloc.alloc; 224 } 225 226 pragma(inline, true) 227 static private pure nothrow @trusted @nogc 228 ref auto sharedAllocator(const size_t pMeta) 229 { 230 assert(cast(void*) pMeta); 231 auto meta = cast(Metadata*) pMeta; 232 assert(meta._isShared); 233 return meta._sharedAlloc.alloc; 234 } 235 236 static private @nogc nothrow pure @trusted 237 void incRef(const size_t pMeta) 238 { 239 auto tmeta = cast(Metadata*) pMeta; 240 if (tmeta._isShared) 241 { 242 auto meta = cast(shared Metadata*) pMeta; 243 atomicOp!"+="(meta._rc, 1); 244 } 245 else 246 { 247 auto meta = cast(Metadata*) pMeta; 248 ++meta._rc; 249 } 250 } 251 252 static private @nogc nothrow pure @trusted 253 void decRef(const size_t pMeta) 254 { 255 auto tmeta = cast(Metadata*) pMeta; 256 void[] origState = (cast(void*) tmeta)[0 .. stateSize!Metadata]; 257 258 if (tmeta._isShared) 259 { 260 auto meta = cast(shared Metadata*) pMeta; 261 debug(AllocatorHandler) writeln("is shared"); 262 if (atomicOp!"-="(meta._rc, 1) == 0) 263 { 264 debug(AllocatorHandler) writeln("Here 2"); 265 SharedAllocT a; 266 // Bitblast the allocator on the stack copy; this will ensure that the 267 // dtor inside the union will be called 268 // Workaround for move 269 auto sz = stateSize!SharedAllocT; 270 (cast(void*) &a)[0 .. sz] = (cast(void*) &meta._sharedAlloc.alloc)[0 .. sz]; 271 SharedAllocT init; 272 (cast(void*) &meta._sharedAlloc.alloc)[0 .. sz] = (cast(void*) &init)[0 .. sz]; 273 //a.parent.deallocate(origState); 274 (cast(bool delegate(void[]) @nogc nothrow pure)(&a.parent.deallocate))(origState); 275 } 276 } 277 else 278 { 279 debug(AllocatorHandler) writeln("is not shared"); 280 auto meta = cast(Metadata*) pMeta; 281 if (--meta._rc == 0) 282 { 283 debug(AllocatorHandler) writeln("Here 3"); 284 LocalAllocT a; 285 move(meta._localAlloc.alloc, a); 286 //assert(meta._localAlloc.alloc == LocalAllocT.init); 287 //a.parent.deallocate(origState); 288 (cast(bool delegate(void[]) @nogc nothrow pure)(&a.parent.deallocate))(origState); 289 } 290 } 291 } 292 293 nothrow: 294 295 pure @trusted 296 void[] allocate(size_t n) const 297 { 298 return (cast(void[] delegate(size_t) const nothrow pure)(&_allocate))(n); 299 } 300 301 void[] _allocate(size_t n) const 302 { 303 assert(cast(void*) _pMeta); 304 return isSharedMeta(_pMeta) ? 305 sharedAllocator(_pMeta).allocate(n) : 306 localAllocator(_pMeta).allocate(n); 307 } 308 309 pure @trusted 310 bool expand(ref void[] b, size_t delta) const 311 { 312 return (cast(bool delegate(ref void[], size_t) const nothrow pure)(&_expand))(b, delta); 313 } 314 315 bool _expand(ref void[] b, size_t delta) const 316 { 317 assert(cast(void*) _pMeta); 318 return isSharedMeta(_pMeta) ? 319 sharedAllocator(_pMeta).expand(b, delta) : 320 localAllocator(_pMeta).expand(b, delta); 321 } 322 323 pure 324 bool deallocate(void[] b) const 325 { 326 return (cast(bool delegate(void[]) const nothrow pure)(&_deallocate))(b); 327 } 328 329 bool _deallocate(void[] b) const 330 { 331 assert(cast(void*) _pMeta); 332 return isSharedMeta(_pMeta) ? 333 sharedAllocator(_pMeta).deallocate(b) : 334 localAllocator(_pMeta).deallocate(b); 335 } 336 337 @nogc nothrow pure @trusted 338 private size_t prefix(T)(const T[] b) const 339 { 340 assert(cast(void*) _pMeta); 341 return isSharedMeta(_pMeta) ? 342 cast(size_t)&sharedAllocator(_pMeta).prefix(b) : 343 cast(size_t)&localAllocator(_pMeta).prefix(b); 344 } 345 346 @nogc nothrow pure @trusted 347 size_t opPrefix(string op, T)(const T[] support, size_t val) const 348 if ((op == "+=") || (op == "-=")) 349 { 350 assert(cast(void*) _pMeta); 351 if (isSharedMeta(_pMeta)) 352 { 353 return cast(size_t)(atomicOp!op(*cast(shared size_t *)prefix(support), val)); 354 } 355 else 356 { 357 mixin("return cast(size_t)(*cast(size_t *)prefix(support)" ~ op ~ "val);"); 358 } 359 } 360 361 @nogc nothrow pure @trusted 362 size_t opCmpPrefix(string op, T)(const T[] support, size_t val) const 363 if ((op == "==") || (op == "<=") || (op == "<") || (op == ">=") || (op == ">")) 364 { 365 assert(cast(void*) _pMeta); 366 if (isSharedMeta(_pMeta)) 367 { 368 return cast(size_t)(atomicOp!op(*cast(shared size_t *)prefix(support), val)); 369 } 370 else 371 { 372 mixin("return cast(size_t)(*cast(size_t *)prefix(support)" ~ op ~ "val);"); 373 } 374 } 375 376 /*@nogc*/ nothrow pure @safe 377 AllocatorHandler getSharedAlloc() const 378 { 379 if (isNull || !isShared) 380 { 381 return AllocatorHandler(processAllocatorObject()); 382 } 383 return AllocatorHandler(this); 384 } 385 386 @nogc nothrow pure @safe 387 RCIAllocator getLocalAlloc() const 388 { 389 if (isNull || isShared) 390 { 391 return threadAllocatorObject(); 392 } 393 return localAllocator(_pMeta).parent; 394 } 395 } 396 397 version(unittest) 398 { 399 // Structs used to test the type system inference 400 package static struct Unsafe 401 { 402 int _x; 403 @system this(int x) {} 404 } 405 406 package static struct UnsafeDtor 407 { 408 int _x; 409 @nogc nothrow pure @safe this(int x) {} 410 @system ~this() {} 411 } 412 413 package static struct Impure 414 { 415 import std.experimental.allocator : RCIAllocator, theAllocator; 416 RCIAllocator _a; 417 @safe this(int id) { _a = theAllocator; } 418 } 419 420 package static struct ImpureDtor 421 { 422 import std.experimental.allocator : RCIAllocator, theAllocator; 423 RCIAllocator _a; 424 @nogc nothrow pure @safe this(int x) {} 425 @safe ~this() { _a = theAllocator; } 426 } 427 428 package static struct Throws 429 { 430 import std.exception : enforce; 431 int _x; 432 this(int id) { enforce(id > 0); } 433 } 434 435 package static struct ThrowsDtor 436 { 437 import std.exception : enforce; 438 int _x; 439 @nogc nothrow pure @safe this(int x) {} 440 ~this() { enforce(_x > 0); } 441 } 442 } 443 444 unittest 445 { 446 import std.experimental.allocator.mallocator; 447 import std.experimental.allocator.building_blocks.stats_collector; 448 import std.experimental.allocator : RCIAllocator, RCISharedAllocator, 449 allocatorObject, sharedAllocatorObject, processAllocator, theAllocator; 450 import std.conv : to; 451 import std.stdio; 452 import std.traits; 453 454 struct MyA(A) 455 { 456 A a; 457 alias a this; 458 459 pure nothrow @nogc 460 bool deallocate(void[] b) 461 { 462 return (cast(bool delegate(void[]) pure nothrow @nogc)(&a.deallocate))(b); 463 } 464 465 bool forceAttDealloc(void[] b) 466 { 467 return a.deallocate(b); 468 } 469 } 470 471 //alias SCAlloc = MyA!(StatsCollector!(Mallocator, Options.bytesUsed)); 472 alias SCAlloc = StatsCollector!(Mallocator, Options.bytesUsed); 473 SCAlloc statsCollectorAlloc; 474 size_t bytesUsed = statsCollectorAlloc.bytesUsed; 475 assert(bytesUsed == 0); 476 { 477 auto _allocator = allocatorObject(&statsCollectorAlloc); 478 auto sca = AllocatorHandler(_allocator); 479 auto buf = sca.allocate(10); 480 assert(buf.length == 10); 481 482 auto t = cast(size_t*)(sca.prefix(buf)); 483 assert(*t == 0); 484 *t += 1; 485 assert(*t == *cast(size_t*)sca.prefix(buf)); 486 sca.deallocate(buf); 487 } 488 bytesUsed = statsCollectorAlloc.bytesUsed; 489 assert(bytesUsed == 0, "MutableDualAlloc ref count leaks memory; leaked " 490 ~ to!string(bytesUsed) ~ " bytes"); 491 492 // Test immutable allocator 493 auto ia = immutable AllocatorHandler(processAllocator); 494 auto buf = ia.allocate(10); 495 assert(buf.length == 10); 496 ia.deallocate(buf); 497 498 static assert(!__traits(compiles, { auto ia2 = immutable AllocatorHandler(theAllocator); })); 499 auto ca = const AllocatorHandler(theAllocator); 500 } 501 502 unittest 503 { 504 import std.experimental.allocator : RCIAllocator, RCISharedAllocator, 505 allocatorObject, sharedAllocatorObject, processAllocator, theAllocator; 506 import std.stdio; 507 508 auto sca = immutable AllocatorHandler(processAllocator); 509 auto buf = sca.allocate(10); 510 assert(buf.length == 10); 511 sca.deallocate(buf); 512 513 auto al = sca.getSharedAlloc; 514 AllocatorHandler al2 = al.getSharedAlloc; 515 assert(al._pMeta == al2._pMeta); 516 } 517 518 enum allocatorHandler = q{ 519 AllocatorHandler _allocator; 520 521 /* 522 Constructs the ouroboros allocator from allocator if the ouroboros 523 allocator wasn't previously set 524 */ 525 /*@nogc*/ nothrow pure @safe 526 bool setAllocator(A)(ref A allocator) 527 if (is(A == RCIAllocator) || is(A == RCISharedAllocator)) 528 { 529 if (_allocator.isNull) 530 { 531 auto a = typeof(_allocator)(allocator); 532 move(a, _allocator); 533 return true; 534 } 535 return false; 536 } 537 };