1 module stdx.collections.slist; 2 3 import stdx.collections.common; 4 5 debug(CollectionSList) import std.stdio; 6 7 version(unittest) 8 { 9 import std.experimental.allocator.mallocator; 10 import std.experimental.allocator.building_blocks.stats_collector; 11 import std.experimental.allocator : RCIAllocator, RCISharedAllocator, 12 allocatorObject, sharedAllocatorObject; 13 14 private alias SCAlloc = StatsCollector!(Mallocator, Options.bytesUsed); 15 } 16 17 struct SList(T) 18 { 19 import std.experimental.allocator : RCIAllocator, RCISharedAllocator, 20 theAllocator, processAllocator, make, dispose; 21 import std.experimental.allocator.building_blocks.affix_allocator; 22 import std.traits : isImplicitlyConvertible; 23 import std.range.primitives : isInputRange, ElementType; 24 import std.variant : Algebraic; 25 import std.conv : emplace; 26 import core.atomic : atomicOp; 27 28 //private: 29 struct Node 30 { 31 T _payload; 32 Node *_next; 33 34 this(T v, Node *n) 35 { 36 debug(CollectionSList) writefln("SList.Node.ctor: Constructing node" ~ 37 " with payload: %s", v); 38 _payload = v; 39 _next = n; 40 } 41 42 ~this() 43 { 44 debug(CollectionSList) writefln("SList.Node.dtor: Destroying node" ~ 45 " with payload: %s", _payload); 46 } 47 } 48 49 Node *_head; 50 51 alias LocalAllocT = AffixAllocator!(RCIAllocator, size_t); 52 alias SharedAllocT = AffixAllocator!(RCISharedAllocator, size_t); 53 alias MutableAlloc = Algebraic!(LocalAllocT, SharedAllocT); 54 //alias MutableAlloc = DualAllocatorU!(LocalAllocT, SharedAllocT); 55 Mutable!MutableAlloc _ouroborosAllocator; 56 57 /// Returns the actual allocator from ouroboros 58 @trusted ref auto allocator(this Q)() 59 { 60 static if (is(Q == immutable) || is(Q == const)) 61 { 62 import std.traits : Unqual; 63 alias UnqMutable = Unqual!(typeof(_ouroborosAllocator)); 64 auto ouroborosAllocator = cast(UnqMutable *) &_ouroborosAllocator; 65 //pragma(msg, "==="); 66 //pragma(msg, typeof(_ouroborosAllocator).stringof); 67 //pragma(msg, Unqual!(typeof(_ouroborosAllocator)).stringof); 68 //pragma(msg, "==="); 69 assert(ouroborosAllocator.get.peek!(SharedAllocT) !is null); 70 //assert(!ouroborosAllocator.get.get!(SharedAllocT).parent.isNull); 71 return ouroborosAllocator.get.get!(SharedAllocT); 72 } 73 else 74 { 75 assert(_ouroborosAllocator.get.peek!(LocalAllocT) !is null); 76 //assert(!_ouroborosAllocator.get.get!(LocalAllocT).parent.isNull); 77 return _ouroborosAllocator.get.get!(LocalAllocT); 78 } 79 } 80 81 /// Constructs the ouroboros allocator from allocator if the ouroboros 82 //allocator wasn't previously set 83 @trusted bool setAllocator(RCIAllocator allocator) 84 { 85 if (_ouroborosAllocator.get.peek!(LocalAllocT) is null) 86 //if (_ouroborosAllocator.get.get!(LocalAllocT).parent.isNull) 87 { 88 _ouroborosAllocator = Mutable!(MutableAlloc)(allocator, 89 MutableAlloc(LocalAllocT(allocator))); 90 return true; 91 } 92 return false; 93 } 94 95 mixin template setSharedAllocator(alias _ouroborosAllocator, RCISharedAllocator allocator) 96 { 97 mixin("_ouroborosAllocator = Mutable!(MutableAlloc)(allocator, 98 MutableAlloc(SharedAllocT(allocator)));"); 99 } 100 101 public @trusted auto ref getAllocator(this Q)() 102 { 103 static if (is(Q == immutable) || is(Q == const)) 104 { 105 import std.traits : Unqual; 106 alias UnqMutable = Unqual!(typeof(_ouroborosAllocator)); 107 auto ouroborosAllocator = cast(UnqMutable *) &_ouroborosAllocator; 108 109 return ouroborosAllocator.get.peek!(SharedAllocT) is null ? 110 RCISharedAllocator(null) : allocator().parent; 111 } 112 else 113 { 114 return _ouroborosAllocator.get.peek!(LocalAllocT) is null ? 115 RCIAllocator(null) : allocator().parent; 116 } 117 } 118 119 @trusted void addRef(QualNode, this Q)(QualNode node) 120 { 121 assert(node !is null); 122 debug(CollectionSList) 123 { 124 writefln("SList.addRef: Node %s has refcount: %s; will be: %s", 125 node._payload, *prefCount(node), *prefCount(node) + 1); 126 } 127 static if (is(Q == immutable) || is(Q == const)) 128 { 129 atomicOp!"+="(*prefCount(node), 1); 130 } 131 else 132 { 133 ++*prefCount(node); 134 } 135 } 136 137 @trusted void delRef(ref Node *node) 138 { 139 assert(node !is null); 140 size_t *pref = prefCount(node); 141 debug(CollectionSList) writefln("SList.delRef: Node %s has refcount: %s; will be: %s", 142 node._payload, *pref, *pref - 1); 143 if (*pref == 0) 144 { 145 debug(CollectionSList) writefln("SList.delRef: Deleting node %s", node._payload); 146 allocator.dispose(node); 147 node = null; 148 } 149 else 150 { 151 --*pref; 152 } 153 } 154 155 @trusted auto prefCount(QualNode, this Q)(QualNode node) 156 { 157 assert(node !is null); 158 static if (is(Q == immutable) || is(Q == const)) 159 { 160 return cast(shared size_t*)(&allocator.prefix(cast(void[Node.sizeof])(*node))); 161 } 162 else 163 { 164 return cast(size_t*)(&allocator.prefix(cast(void[Node.sizeof])(*node))); 165 } 166 } 167 168 static string immutableInsert(string stuff) 169 { 170 return "" 171 ~"auto tmpAlloc = Mutable!(MutableAlloc)(allocator, MutableAlloc(allocator));" 172 ~"_ouroborosAllocator = (() @trusted => cast(immutable)(tmpAlloc))();" 173 ~"Node *tmpNode;" 174 ~"Node *tmpHead;" 175 ~"foreach (item; " ~ stuff ~ ")" 176 ~"{" 177 ~"Node *newNode;" 178 ~"() @trusted { newNode =" 179 ~"tmpAlloc.get().make!(Node)(item, null);" 180 ~"}();" 181 ~"(tmpHead ? tmpNode._next : tmpHead) = newNode;" 182 ~"tmpNode = newNode;" 183 ~"}" 184 ~"_head = (() @trusted => cast(immutable Node*)(tmpHead))();"; 185 } 186 187 public: 188 this(A, this Q)(A allocator) 189 if (((is(Q == immutable) || is(Q == const)) && is(A == RCISharedAllocator)) 190 || (!(is(Q == immutable) || is(Q == const)) && is(A == RCIAllocator))) 191 { 192 debug(CollectionSList) 193 { 194 writefln("SList.ctor: begin"); 195 scope(exit) writefln("SList.ctor: end"); 196 } 197 static if (is(Q == immutable) || is(Q == const)) 198 { 199 mixin setSharedAllocator!(_ouroborosAllocator, allocator); 200 } 201 else 202 { 203 setAllocator(allocator); 204 } 205 } 206 207 this(U, this Q)(U[] values...) 208 if (isImplicitlyConvertible!(U, T)) 209 { 210 static if (is(Q == immutable) || is(Q == const)) 211 { 212 this(processAllocator, values); 213 } 214 else 215 { 216 this(theAllocator, values); 217 } 218 } 219 220 this(A, U, this Q)(A allocator, U[] values...) 221 if ((((is(Q == immutable) || is(Q == const)) && is(A == RCISharedAllocator)) 222 || (!(is(Q == immutable) || is(Q == const)) && is(A == RCIAllocator))) 223 && isImplicitlyConvertible!(U, T)) 224 { 225 debug(CollectionSList) 226 { 227 writefln("SList.ctor: begin"); 228 scope(exit) writefln("SList.ctor: end"); 229 } 230 static if (is(Q == immutable) || is(Q == const)) 231 { 232 //mixin setSharedAllocator!(allocator); 233 //mixin setSharedAllocator!(_ouroborosAllocator, allocator); 234 //mixin(immutableInsert("values")); 235 } 236 else 237 { 238 setAllocator(allocator); 239 insert(0, values); 240 } 241 } 242 243 this(Stuff, this Q)(Stuff stuff) 244 if (isInputRange!Stuff 245 && isImplicitlyConvertible!(ElementType!Stuff, T) 246 && !is(Stuff == T[])) 247 { 248 static if (is(Q == immutable) || is(Q == const)) 249 { 250 this(processAllocator, stuff); 251 } 252 else 253 { 254 this(theAllocator, stuff); 255 } 256 } 257 258 this(A, Stuff, this Q)(A allocator, Stuff stuff) 259 if ((((is(Q == immutable) || is(Q == const)) && is(A == RCISharedAllocator)) 260 || (!(is(Q == immutable) || is(Q == const)) && is(A == RCIAllocator))) 261 && isInputRange!Stuff 262 && isImplicitlyConvertible!(ElementType!Stuff, T) 263 && !is(Stuff == T[])) 264 { 265 debug(CollectionSList) 266 { 267 writefln("SList.ctor: begin"); 268 scope(exit) writefln("SList.ctor: end"); 269 } 270 static if (is(Q == immutable) || is(Q == const)) 271 { 272 //mixin setSharedAllocator!(allocator); 273 mixin setSharedAllocator!(_ouroborosAllocator, allocator); 274 mixin(immutableInsert("stuff")); 275 } 276 else 277 { 278 setAllocator(allocator); 279 insert(0, stuff); 280 } 281 } 282 283 this(this) 284 { 285 debug(CollectionSList) 286 { 287 writefln("SList.postblit: begin"); 288 scope(exit) writefln("SList.postblit: end"); 289 } 290 if (_head !is null) 291 { 292 addRef(_head); 293 debug(CollectionSList) writefln("SList.postblit: Node %s has refcount: %s", 294 _head._payload, *prefCount(_head)); 295 } 296 } 297 298 // Immutable ctors 299 private this(NodeQual, OuroQual, this Qualified)(NodeQual _newHead, 300 OuroQual ouroborosAllocator) 301 if (is(typeof(_head) : typeof(_newHead)) 302 && (is(Qualified == immutable) || is(Qualified == const))) 303 { 304 _head = _newHead; 305 _ouroborosAllocator = ouroborosAllocator; 306 if (_head !is null) 307 { 308 addRef(_head); 309 debug(CollectionSList) writefln("SList.ctor immutable: Node %s has " 310 ~ "refcount: %s", _head._payload, *prefCount(_head)); 311 } 312 } 313 314 @safe ~this() 315 { 316 debug(CollectionSList) 317 { 318 writefln("SList.dtor: Begin for instance %s of type %s", 319 cast(size_t)(&this), typeof(this).stringof); 320 scope(exit) writefln("SList.dtor: End for instance %s of type %s", 321 cast(size_t)(&this), typeof(this).stringof); 322 } 323 (() @trusted => destroyUnused())(); 324 } 325 326 void destroyUnused() @trusted 327 { 328 debug(CollectionSList) 329 { 330 writefln("SList.destoryUnused: begin"); 331 scope(exit) writefln("SList.destoryUnused: end"); 332 } 333 while (_head !is null && *prefCount(_head) == 0) 334 { 335 debug(CollectionSList) writefln("SList.destoryUnused: One ref with head at %s", 336 _head._payload); 337 Node *tmpNode = _head; 338 _head = _head._next; 339 delRef(tmpNode); 340 } 341 342 if (_head !is null && *prefCount(_head) > 0) 343 { 344 // We reached a copy, so just remove the head ref, thus deleting 345 // the copy in constant time (we are undoing the postblit) 346 debug(CollectionSList) writefln("SList.destoryUnused: Multiple refs with head at %s", 347 _head._payload); 348 delRef(_head); 349 } 350 } 351 352 bool isUnique(this _)() 353 { 354 debug(CollectionSList) 355 { 356 writefln("SList.isUnique: begin"); 357 scope(exit) writefln("SList.isUnique: end"); 358 } 359 360 Node *tmpNode = (() @trusted => cast(Node*)_head)(); 361 while (tmpNode !is null) 362 { 363 if (*prefCount(tmpNode) > 0) 364 { 365 return false; 366 } 367 tmpNode = tmpNode._next; 368 } 369 return true; 370 } 371 372 bool empty(this _)() 373 { 374 return _head is null; 375 } 376 377 ref auto front(this _)() 378 { 379 assert(!empty, "SList.front: List is empty"); 380 return _head._payload; 381 } 382 383 void popFront() 384 { 385 debug(CollectionSList) 386 { 387 writefln("SList.popFront: begin"); 388 scope(exit) writefln("SList.popFront: end"); 389 } 390 assert(!empty, "SList.popFront: List is empty"); 391 392 Node *tmpNode = _head; 393 _head = _head._next; 394 if (*prefCount(tmpNode) > 0 && _head !is null) 395 { 396 // If we have another copy of the list then the refcount 397 // must increase, otherwise it will remain the same 398 // This condition is needed because the recounting is zero based 399 addRef(_head); 400 } 401 delRef(tmpNode); 402 } 403 404 Qualified tail(this Qualified)() @trusted 405 { 406 debug(CollectionSList) 407 { 408 writefln("SList.tail: begin"); 409 scope(exit) writefln("SList.tail: end"); 410 } 411 assert(!empty, "SList.tail: List is empty"); 412 413 static if (is(Qualified == immutable) || is(Qualified == const)) 414 { 415 return Qualified(_head._next, _ouroborosAllocator); 416 } 417 else 418 { 419 return .tail(this); 420 } 421 } 422 423 ref Qualified save(this Qualified)() 424 { 425 debug(CollectionSList) 426 { 427 writefln("SList.save: begin"); 428 scope(exit) writefln("SList.save: end"); 429 } 430 return this; 431 } 432 433 typeof(this) dup() 434 { 435 debug(CollectionSList) 436 { 437 writefln("SList.dup: begin"); 438 scope(exit) writefln("SList.dup: end"); 439 } 440 RCIAllocator alloc = getAllocator(); 441 if (alloc.isNull) 442 { 443 alloc = theAllocator; 444 } 445 return typeof(this)(alloc, this); 446 } 447 448 size_t insert(Stuff)(size_t pos, Stuff stuff) 449 if (isInputRange!Stuff && isImplicitlyConvertible!(ElementType!Stuff, T)) 450 { 451 debug(CollectionSList) 452 { 453 writefln("SList.insert: begin"); 454 scope(exit) writefln("SList.insert: end"); 455 } 456 // Ensure we have an allocator. If it was already set, this will do nothing 457 setAllocator(theAllocator); 458 459 size_t result; 460 Node *tmpNode; 461 Node *tmpHead; 462 foreach (item; stuff) 463 { 464 Node *newNode; 465 () @trusted { newNode = allocator.make!(Node)(item, null); }(); 466 (tmpHead ? tmpNode._next : tmpHead) = newNode; 467 tmpNode = newNode; 468 ++result; 469 } 470 471 if (!tmpNode) 472 { 473 return 0; 474 } 475 476 Node *needle = _head; 477 Node *needlePrev = null; 478 while (pos) 479 { 480 needlePrev = needle; 481 needle = needle._next; 482 --pos; 483 } 484 485 tmpNode._next = needle; 486 if (needlePrev is null) 487 { 488 _head = tmpHead; 489 } 490 else 491 { 492 needlePrev._next = tmpHead; 493 } 494 return result; 495 } 496 497 size_t insert(Stuff)(size_t pos, Stuff[] stuff...) 498 if (isImplicitlyConvertible!(Stuff, T)) 499 { 500 return insert(pos, stuff); 501 } 502 503 size_t insertBack(Stuff)(Stuff stuff) 504 if (isInputRange!Stuff && isImplicitlyConvertible!(ElementType!Stuff, T)) 505 { 506 debug(CollectionSList) 507 { 508 writefln("SList.insertBack: begin"); 509 scope(exit) writefln("SList.insertBack: end"); 510 } 511 // Ensure we have an allocator. If it was already set, this will do nothing 512 setAllocator(theAllocator); 513 514 size_t result; 515 Node *tmpNode; 516 Node *tmpHead; 517 foreach (item; stuff) 518 { 519 Node *newNode; 520 () @trusted { newNode = allocator.make!(Node)(item, null); }(); 521 (tmpHead ? tmpNode._next : tmpHead) = newNode; 522 tmpNode = newNode; 523 ++result; 524 } 525 526 if (!tmpNode) 527 { 528 return 0; 529 } 530 531 if (_head is null) 532 { 533 _head = tmpHead; 534 } 535 else 536 { 537 Node *endNode; 538 for (endNode = _head; endNode._next !is null; endNode = endNode._next) { } 539 endNode._next = tmpHead; 540 } 541 542 return result; 543 } 544 545 size_t insertBack(Stuff)(Stuff[] stuff...) 546 if (isImplicitlyConvertible!(Stuff, T)) 547 { 548 return insertBack(stuff); 549 } 550 551 auto ref opBinary(string op, U)(auto ref U rhs) @trusted 552 if (op == "~" && 553 (is (U == typeof(this)) 554 || is (U : T) 555 || (isInputRange!U && isImplicitlyConvertible!(ElementType!U, T)) 556 )) 557 { 558 debug(CollectionSList) 559 { 560 writefln("SList.opBinary!~: begin"); 561 scope(exit) writefln("SList.opBinary!~: end"); 562 } 563 564 RCIAllocator alloc = getAllocator(); 565 if (alloc.isNull) 566 { 567 static if (is(U == typeof(this))) 568 { 569 alloc = rhs.getAllocator(); 570 } 571 else 572 { 573 alloc = RCIAllocator(null); 574 } 575 if (alloc.isNull) 576 { 577 alloc = theAllocator; 578 } 579 } 580 typeof(this) newList = typeof(this)(alloc, rhs); 581 newList.insert(0, this); 582 return newList; 583 } 584 585 auto ref opAssign()(auto ref typeof(this) rhs) @trusted 586 { 587 debug(CollectionSList) 588 { 589 writefln("SList.opAssign: begin"); 590 scope(exit) writefln("SList.opAssign: end"); 591 } 592 593 if (rhs._head !is null && _head is rhs._head) 594 { 595 return this; 596 } 597 598 if (rhs._head !is null) 599 { 600 rhs.addRef(rhs._head); 601 debug(CollectionSList) writefln("SList.opAssign: Node %s has refcount: %s", 602 rhs._head._payload, *prefCount(rhs._head)); 603 } 604 destroyUnused(); 605 _head = rhs._head; 606 _ouroborosAllocator = rhs._ouroborosAllocator; 607 608 return this; 609 } 610 611 auto ref opOpAssign(string op, U)(auto ref U rhs) @trusted 612 if (op == "~" && 613 (is (U == typeof(this)) 614 || is (U : T) 615 || (isInputRange!U && isImplicitlyConvertible!(ElementType!U, T)) 616 )) 617 { 618 debug(CollectionSList) 619 { 620 writefln("SList.opOpAssign!~: %s begin", typeof(this).stringof); 621 scope(exit) writefln("SList.opOpAssign!~: %s end", typeof(this).stringof); 622 } 623 624 insertBack(rhs); 625 return this; 626 } 627 628 void remove(size_t idx = 0) 629 { 630 assert(!empty, "SList.remove: List is empty"); 631 if (idx == 0) 632 { 633 popFront(); 634 return; 635 } 636 637 Node *tmpNode = _head; 638 while(--idx != 0) 639 { 640 tmpNode = tmpNode._next; 641 } 642 Node *toDel = tmpNode._next; 643 assert(toDel !is null, "SList.remove: Index out of bounds"); 644 tmpNode._next = tmpNode._next._next; 645 delRef(toDel); 646 } 647 648 debug(CollectionSList) void printRefCount(this _)() 649 { 650 writefln("SList.printRefCount: begin"); 651 scope(exit) writefln("SList.printRefCount: end"); 652 653 Node *tmpNode = (() @trusted => cast(Node*)_head)(); 654 while (tmpNode !is null) 655 { 656 writefln("SList.printRefCount: Node %s has ref count %s", 657 tmpNode._payload, *prefCount(tmpNode)); 658 tmpNode = tmpNode._next; 659 } 660 } 661 } 662 663 version(unittest) private @safe void testImmutability(RCISharedAllocator allocator) 664 { 665 auto s = immutable SList!(int)(allocator, 1, 2, 3); 666 auto s2 = s; 667 auto s3 = s2.save(); 668 669 assert(s2.front == 1); 670 static assert(!__traits(compiles, s2.front = 4)); 671 static assert(!__traits(compiles, s2.popFront())); 672 673 auto s4 = s2.tail; 674 assert(s4.front == 2); 675 static assert(!__traits(compiles, s4 = s4.tail)); 676 } 677 678 version(unittest) private @safe void testConstness(RCISharedAllocator allocator) 679 { 680 auto s = const SList!(int)(allocator, 1, 2, 3); 681 auto s2 = s; 682 auto s3 = s2.save(); 683 684 assert(s2.front == 1); 685 static assert(!__traits(compiles, s2.front = 4)); 686 static assert(!__traits(compiles, s2.popFront())); 687 688 auto s4 = s2.tail; 689 assert(s4.front == 2); 690 static assert(!__traits(compiles, s4 = s4.tail)); 691 } 692 693 // TODO: StatsCollector need to be made shareable 694 version(none) 695 @trusted unittest 696 { 697 import std.conv; 698 SCAlloc statsCollectorAlloc; 699 auto _allocator = sharedAllocatorObject(&statsCollectorAlloc); 700 701 () @safe { 702 testImmutability(_allocator); 703 testConstness(_allocator); 704 }(); 705 auto bytesUsed = statsCollectorAlloc.bytesUsed; 706 assert(bytesUsed == 0, "SList ref count leaks memory; leaked " 707 ~ to!string(bytesUsed) ~ " bytes"); 708 709 //() @nogc { 710 //_allocator.allocate(1); 711 //}(); 712 713 //() @nogc { 714 //import std.experimental.allocator.building_blocks.affix_allocator; 715 //import std.stdio; 716 //auto a = AffixAllocator!(Mallocator, size_t).instance; 717 //auto ia = allocatorObject(a); 718 //pragma(msg, typeof(a)); 719 //auto b = ia.allocate(1); 720 //pragma(msg, typeof(ia.impl.prefix(b))); 721 //}(); 722 } 723 724 version(unittest) private @safe void testConcatAndAppend(RCIAllocator allocator) 725 { 726 import std.algorithm.comparison : equal; 727 728 auto sl = SList!(int)(allocator, 1, 2, 3); 729 SList!(int) sl2 = SList!int(allocator); 730 731 auto sl3 = sl ~ sl2; 732 assert(equal(sl3, [1, 2, 3])); 733 734 auto sl4 = sl3; 735 sl3 = sl3 ~ 4; 736 assert(equal(sl3, [1, 2, 3, 4])); 737 sl3 = sl3 ~ [5]; 738 assert(equal(sl3, [1, 2, 3, 4, 5])); 739 assert(equal(sl4, [1, 2, 3])); 740 741 sl4 = sl3; 742 sl3 ~= 6; 743 assert(equal(sl3, [1, 2, 3, 4, 5, 6])); 744 sl3 ~= [7]; 745 assert(equal(sl3, [1, 2, 3, 4, 5, 6, 7])); 746 assert(equal(sl4, [1, 2, 3, 4, 5, 6, 7])); 747 748 sl3 ~= sl3; 749 assert(equal(sl3, [1, 2, 3, 4, 5, 6, 7, 1, 2, 3, 4, 5, 6, 7])); 750 assert(equal(sl4, [1, 2, 3, 4, 5, 6, 7, 1, 2, 3, 4, 5, 6, 7])); 751 752 SList!int sl5 = SList!int(allocator); 753 sl5 ~= [1, 2, 3]; 754 assert(equal(sl5, [1, 2, 3])); 755 } 756 757 @trusted unittest 758 { 759 import std.conv; 760 SCAlloc statsCollectorAlloc; 761 auto _allocator = allocatorObject(&statsCollectorAlloc); 762 763 () @safe { 764 testConcatAndAppend(_allocator); 765 }(); 766 auto bytesUsed = statsCollectorAlloc.bytesUsed; 767 assert(bytesUsed == 0, "SList ref count leaks memory; leaked " 768 ~ to!string(bytesUsed) ~ " bytes"); 769 } 770 771 version(unittest) private @safe void testSimple(RCIAllocator allocator) 772 { 773 import std.algorithm.comparison : equal; 774 import std.algorithm.searching : canFind; 775 import std.range.primitives : walkLength; 776 777 auto sl = SList!int(allocator); 778 assert(sl.empty); 779 assert(sl.isUnique); 780 781 size_t pos = 0; 782 sl.insert(pos, 1, 2, 3); 783 assert(sl.front == 1); 784 assert(equal(sl, sl)); 785 assert(equal(sl, [1, 2, 3])); 786 787 sl.popFront(); 788 assert(sl.front == 2); 789 assert(equal(sl, [2, 3])); 790 791 sl.insert(pos, [4, 5, 6]); 792 sl.insert(pos, 7); 793 sl.insert(pos, [8]); 794 assert(equal(sl, [8, 7, 4, 5, 6, 2, 3])); 795 796 sl.insertBack(0, 1); 797 sl.insertBack([-1, -2]); 798 assert(equal(sl, [8, 7, 4, 5, 6, 2, 3, 0, 1, -1, -2])); 799 800 sl.front = 9; 801 assert(equal(sl, [9, 7, 4, 5, 6, 2, 3, 0, 1, -1, -2])); 802 803 auto slTail = sl.tail; 804 assert(slTail.front == 7); 805 slTail.front = 8; 806 assert(slTail.front == 8); 807 assert(sl.tail.front == 8); 808 assert(!sl.isUnique); 809 810 assert(canFind(sl, 2)); 811 assert(!canFind(sl, -10)); 812 813 sl.remove(); 814 assert(equal(sl, [8, 4, 5, 6, 2, 3, 0, 1, -1, -2])); 815 sl.remove(2); 816 assert(equal(sl, [8, 4, 6, 2, 3, 0, 1, -1, -2])); 817 sl.remove(walkLength(sl) - 1); 818 assert(equal(sl, [8, 4, 6, 2, 3, 0, 1, -1])); 819 pos = 1; 820 sl.insert(pos, 10); 821 assert(equal(sl, [8, 10, 4, 6, 2, 3, 0, 1, -1])); 822 } 823 824 @trusted unittest 825 { 826 import std.conv; 827 SCAlloc statsCollectorAlloc; 828 auto _allocator = allocatorObject(&statsCollectorAlloc); 829 830 () @safe { 831 testSimple(_allocator); 832 }(); 833 auto bytesUsed = statsCollectorAlloc.bytesUsed; 834 assert(bytesUsed == 0, "SList ref count leaks memory; leaked " 835 ~ to!string(bytesUsed) ~ " bytes"); 836 } 837 838 version(unittest) private @safe void testSimpleImmutable(RCIAllocator allocator) 839 { 840 import std.algorithm.comparison : equal; 841 import std.algorithm.searching : canFind; 842 import std.range.primitives : walkLength; 843 844 auto sl = SList!(immutable int)(allocator); 845 assert(sl.empty); 846 847 size_t pos = 0; 848 sl.insert(pos, 1, 2, 3); 849 assert(sl.front == 1); 850 assert(equal(sl, sl)); 851 assert(equal(sl, [1, 2, 3])); 852 853 sl.popFront(); 854 assert(sl.front == 2); 855 assert(equal(sl, [2, 3])); 856 assert(sl.tail.front == 3); 857 858 sl.insert(pos, [4, 5, 6]); 859 sl.insert(pos, 7); 860 sl.insert(pos, [8]); 861 assert(equal(sl, [8, 7, 4, 5, 6, 2, 3])); 862 863 sl.insertBack(0, 1); 864 sl.insertBack([-1, -2]); 865 assert(equal(sl, [8, 7, 4, 5, 6, 2, 3, 0, 1, -1, -2])); 866 867 // Cannot modify immutable values 868 static assert(!__traits(compiles, sl.front = 9)); 869 870 assert(canFind(sl, 2)); 871 assert(!canFind(sl, -10)); 872 873 sl.remove(); 874 assert(equal(sl, [7, 4, 5, 6, 2, 3, 0, 1, -1, -2])); 875 sl.remove(2); 876 assert(equal(sl, [7, 4, 6, 2, 3, 0, 1, -1, -2])); 877 sl.remove(walkLength(sl) - 1); 878 assert(equal(sl, [7, 4, 6, 2, 3, 0, 1, -1])); 879 } 880 881 @trusted unittest 882 { 883 import std.conv; 884 SCAlloc statsCollectorAlloc; 885 auto _allocator = allocatorObject(&statsCollectorAlloc); 886 887 () @safe { 888 testSimpleImmutable(_allocator); 889 }(); 890 auto bytesUsed = statsCollectorAlloc.bytesUsed; 891 assert(bytesUsed == 0, "SList ref count leaks memory; leaked " 892 ~ to!string(bytesUsed) ~ " bytes"); 893 } 894 895 version(unittest) private @safe void testCopyAndRef(RCIAllocator allocator) 896 { 897 import std.algorithm.comparison : equal; 898 899 auto slFromList = SList!int(allocator, 1, 2, 3); 900 auto slFromRange = SList!int(allocator, slFromList); 901 assert(equal(slFromList, slFromRange)); 902 903 slFromList.popFront(); 904 assert(equal(slFromList, [2, 3])); 905 assert(equal(slFromRange, [1, 2, 3])); 906 907 SList!int slInsFromRange = SList!int(allocator); 908 size_t pos = 0; 909 slInsFromRange.insert(pos, slFromList); 910 slFromList.popFront(); 911 assert(equal(slFromList, [3])); 912 assert(equal(slInsFromRange, [2, 3])); 913 914 SList!int slInsBackFromRange = SList!int(allocator); 915 slInsBackFromRange.insert(pos, slFromList); 916 slFromList.popFront(); 917 assert(slFromList.empty); 918 assert(equal(slInsBackFromRange, [3])); 919 920 auto slFromRef = slInsFromRange; 921 auto slFromDup = slInsFromRange.dup; 922 assert(slInsFromRange.front == 2); 923 slFromRef.front = 5; 924 assert(slInsFromRange.front == 5); 925 assert(slFromDup.front == 2); 926 } 927 928 @trusted unittest 929 { 930 import std.conv; 931 SCAlloc statsCollectorAlloc; 932 auto _allocator = allocatorObject(&statsCollectorAlloc); 933 934 () @safe { 935 testCopyAndRef(_allocator); 936 }(); 937 auto bytesUsed = statsCollectorAlloc.bytesUsed; 938 assert(bytesUsed == 0, "SList ref count leaks memory; leaked " 939 ~ to!string(bytesUsed) ~ " bytes"); 940 } 941 942 version(unittest) private @safe void testWithStruct(RCIAllocator allocator) 943 { 944 import std.algorithm.comparison : equal; 945 946 auto list = SList!int(allocator, 1, 2, 3); 947 { 948 auto listOfLists = SList!(SList!int)(allocator, list); 949 assert(equal(listOfLists.front, [1, 2, 3])); 950 listOfLists.front.front = 2; 951 assert(equal(listOfLists.front, [2, 2, 3])); 952 size_t pos = 0; 953 static assert(!__traits(compiles, listOfLists.insert(pos, 1))); 954 955 //auto immListOfLists = immutable SList!(SList!int)(allocator, list); 956 //assert(immListOfLists.front.front == 2); 957 //static assert(!__traits(compiles, immListOfLists.front.front = 2)); 958 } 959 assert(equal(list, [2, 2, 3])); 960 } 961 962 @trusted unittest 963 { 964 import std.conv; 965 SCAlloc statsCollectorAlloc; 966 auto _allocator = allocatorObject(&statsCollectorAlloc); 967 968 () @safe { 969 testWithStruct(_allocator); 970 }(); 971 auto bytesUsed = statsCollectorAlloc.bytesUsed; 972 assert(bytesUsed == 0, "SList ref count leaks memory; leaked " 973 ~ to!string(bytesUsed) ~ " bytes"); 974 } 975 976 version(unittest) private @safe void testWithClass(RCIAllocator allocator) 977 { 978 class MyClass 979 { 980 int x; 981 this(int x) { this.x = x; } 982 } 983 984 MyClass c = new MyClass(10); 985 { 986 auto sl = SList!MyClass(allocator, c); 987 assert(sl.front.x == 10); 988 assert(sl.front is c); 989 sl.front.x = 20; 990 } 991 assert(c.x == 20); 992 } 993 994 @trusted unittest 995 { 996 import std.conv; 997 SCAlloc statsCollectorAlloc; 998 auto _allocator = allocatorObject(statsCollectorAlloc); 999 1000 () @safe { 1001 testWithClass(_allocator); 1002 }(); 1003 auto bytesUsed = statsCollectorAlloc.bytesUsed; 1004 assert(bytesUsed == 0, "SList ref count leaks memory; leaked " 1005 ~ to!string(bytesUsed) ~ " bytes"); 1006 }